From bad0bda9fe01f80bea4f65fcbff3fafbb5f75fce Mon Sep 17 00:00:00 2001 From: "Chrome Release Bot (LUCI)" Date: Wed, 16 Aug 2023 14:10:37 +0000 Subject: [PATCH 1/3] Publish DEPS for 114.0.5735.331 git-subtree-dir: build git-subtree-split: fed499399d3f44d3a7957549d493bf30a5d8c867 --- .gitignore | 29 + .style.yapf | 6 + BUILD.gn | 80 + DIR_METADATA | 5 + OWNERS | 34 + OWNERS.setnoparent | 85 + OWNERS.status | 12 + PRESUBMIT.py | 57 + PRESUBMIT_test.py | 43 + README.md | 36 + action_helpers.py | 126 + action_helpers_unittest.py | 87 + add_rts_filters.py | 56 + android/AndroidManifest.xml | 12 + android/BUILD.gn | 241 + android/COMMON_METADATA | 1 + android/CheckInstallApk-debug.apk | Bin 0 -> 37106 bytes android/DIR_METADATA | 1 + android/OWNERS | 6 + android/PRESUBMIT.py | 137 + android/adb_chrome_public_command_line | 16 + android/adb_command_line.py | 97 + android/adb_gdb | 942 +++ android/adb_install_apk.py | 134 + android/adb_logcat_monitor.py | 157 + android/adb_logcat_printer.py | 222 + android/adb_profile_chrome | 9 + android/adb_profile_chrome_startup | 9 + android/adb_reverse_forwarder.py | 87 + android/adb_system_webengine_command_line | 16 + android/adb_system_webview_command_line | 16 + android/android_only_explicit_jni_exports.lst | 13 + android/android_only_jni_exports.lst | 13 + android/apk_operations.py | 2147 +++++++ android/apk_operations.pydeps | 112 + android/apply_shared_preference_file.py | 50 + android/asan_symbolize.py | 153 + android/bytecode/BUILD.gn | 86 + .../chromium/bytecode/ByteCodeProcessor.java | 167 + .../chromium/bytecode/ByteCodeRewriter.java | 116 + .../chromium/bytecode/ClassPathValidator.java | 238 + .../EmptyOverrideGeneratorClassAdapter.java | 104 + .../bytecode/FragmentActivityReplacer.java | 303 + .../bytecode/MethodCheckerClassAdapter.java | 144 + .../chromium/bytecode/MethodDescription.java | 20 + .../ParentMethodCheckerClassAdapter.java | 109 + .../chromium/bytecode/TraceEventAdder.java | 109 + .../bytecode/TraceEventAdderClassAdapter.java | 47 + .../TraceEventAdderMethodAdapter.java | 83 + .../java/org/chromium/bytecode/TypeUtils.java | 87 + android/chromium-debug.keystore | Bin 0 -> 2223 bytes android/chromium_annotations.flags | 79 + android/convert_dex_profile.py | 569 ++ android/convert_dex_profile_tests.py | 277 + android/dcheck_is_off.flags | 12 + android/devil_chromium.json | 84 + android/devil_chromium.py | 200 + android/devil_chromium.pydeps | 39 + android/diff_resource_sizes.py | 200 + android/docs/README.md | 16 + android/docs/build_config.md | 168 + android/docs/class_verification_failures.md | 294 + android/docs/coverage.md | 85 + android/docs/java_asserts.md | 80 + android/docs/java_optimization.md | 149 + android/docs/java_toolchain.md | 289 + android/docs/life_of_a_resource.md | 289 + android/docs/lint.md | 132 + android/download_doclava.py | 32 + android/dump_apk_resource_strings.py | 662 ++ android/envsetup.sh | 35 + android/fast_local_dev_server.py | 336 + android/generate_jacoco_report.py | 273 + android/gradle/AndroidManifest.xml | 14 + android/gradle/OWNERS | 2 + android/gradle/android.jinja | 114 + android/gradle/cmake.jinja | 25 + android/gradle/dependencies.jinja | 28 + android/gradle/generate_gradle.py | 937 +++ android/gradle/java.jinja | 41 + android/gradle/manifest.jinja | 7 + android/gradle/root.jinja | 24 + android/gtest_apk/BUILD.gn | 15 + .../NativeTestInstrumentationTestRunner.java | 281 + .../build/gtest_apk/NativeTestIntent.java | 22 + .../build/gtest_apk/TestStatusIntent.java | 21 + .../build/gtest_apk/TestStatusReceiver.java | 89 + android/gyp/OWNERS | 6 + android/gyp/aar.py | 216 + android/gyp/aar.pydeps | 7 + android/gyp/aidl.py | 66 + android/gyp/aidl.pydeps | 8 + android/gyp/allot_native_libraries.py | 186 + android/gyp/allot_native_libraries.pydeps | 7 + android/gyp/apkbuilder.py | 533 ++ android/gyp/apkbuilder.pydeps | 10 + android/gyp/assert_static_initializers.py | 169 + android/gyp/assert_static_initializers.pydeps | 6 + android/gyp/binary_baseline_profile.py | 57 + android/gyp/binary_baseline_profile.pydeps | 7 + android/gyp/bundletool.py | 43 + android/gyp/bytecode_processor.py | 95 + android/gyp/bytecode_processor.pydeps | 28 + android/gyp/bytecode_rewriter.py | 38 + android/gyp/bytecode_rewriter.pydeps | 7 + android/gyp/check_flag_expectations.py | 132 + android/gyp/check_flag_expectations.pydeps | 8 + android/gyp/compile_java.py | 865 +++ android/gyp/compile_java.pydeps | 32 + android/gyp/compile_kt.py | 182 + android/gyp/compile_kt.pydeps | 33 + android/gyp/compile_resources.py | 1014 +++ android/gyp/compile_resources.pydeps | 38 + android/gyp/copy_ex.py | 131 + android/gyp/copy_ex.pydeps | 7 + android/gyp/create_apk_operations_script.py | 88 + .../gyp/create_apk_operations_script.pydeps | 6 + android/gyp/create_app_bundle.py | 605 ++ android/gyp/create_app_bundle.pydeps | 49 + android/gyp/create_app_bundle_apks.py | 53 + android/gyp/create_app_bundle_apks.pydeps | 36 + android/gyp/create_bundle_wrapper_script.py | 123 + .../gyp/create_bundle_wrapper_script.pydeps | 7 + android/gyp/create_java_binary_script.py | 137 + android/gyp/create_java_binary_script.pydeps | 7 + android/gyp/create_r_java.py | 65 + android/gyp/create_r_java.pydeps | 31 + android/gyp/create_r_txt.py | 31 + android/gyp/create_r_txt.pydeps | 31 + android/gyp/create_size_info_files.py | 195 + android/gyp/create_size_info_files.pydeps | 8 + android/gyp/create_stub_manifest.py | 41 + android/gyp/create_test_apk_wrapper_script.py | 85 + .../gyp/create_test_apk_wrapper_script.pydeps | 6 + android/gyp/create_ui_locale_resources.py | 92 + android/gyp/create_ui_locale_resources.pydeps | 31 + android/gyp/create_unwind_table.py | 1095 ++++ android/gyp/create_unwind_table_tests.py | 1182 ++++ android/gyp/dex.py | 538 ++ android/gyp/dex.pydeps | 10 + android/gyp/dex_test.py | 50 + android/gyp/dist_aar.py | 169 + android/gyp/dist_aar.pydeps | 9 + android/gyp/extract_unwind_tables.py | 283 + android/gyp/extract_unwind_tables_tests.py | 121 + android/gyp/filter_zip.py | 68 + android/gyp/filter_zip.pydeps | 8 + android/gyp/finalize_apk.py | 78 + android/gyp/find.py | 33 + android/gyp/flatc_java.py | 42 + android/gyp/flatc_java.pydeps | 8 + android/gyp/gcc_preprocess.py | 65 + android/gyp/gcc_preprocess.pydeps | 8 + android/gyp/generate_android_wrapper.py | 43 + android/gyp/generate_linker_version_script.py | 108 + .../gyp/generate_linker_version_script.pydeps | 7 + android/gyp/ijar.py | 37 + android/gyp/ijar.pydeps | 7 + android/gyp/jacoco_instr.py | 257 + android/gyp/jacoco_instr.pydeps | 8 + android/gyp/java_cpp_enum.py | 440 ++ android/gyp/java_cpp_enum.pydeps | 9 + android/gyp/java_cpp_enum_tests.py | 783 +++ android/gyp/java_cpp_features.py | 112 + android/gyp/java_cpp_features.pydeps | 9 + android/gyp/java_cpp_features_tests.py | 183 + android/gyp/java_cpp_strings.py | 105 + android/gyp/java_cpp_strings.pydeps | 9 + android/gyp/java_cpp_strings_tests.py | 151 + android/gyp/java_google_api_keys.py | 118 + android/gyp/java_google_api_keys.pydeps | 8 + android/gyp/java_google_api_keys_tests.py | 42 + android/gyp/javac_output_processor.py | 216 + android/gyp/jinja_template.py | 163 + android/gyp/jinja_template.pydeps | 43 + android/gyp/lint.py | 530 ++ android/gyp/lint.pydeps | 9 + android/gyp/merge_manifest.py | 153 + android/gyp/merge_manifest.pydeps | 8 + android/gyp/nocompile_test.py | 212 + android/gyp/optimize_resources.py | 152 + android/gyp/optimize_resources.pydeps | 7 + android/gyp/prepare_resources.py | 209 + android/gyp/prepare_resources.pydeps | 35 + android/gyp/process_native_prebuilt.py | 39 + android/gyp/process_native_prebuilt.pydeps | 7 + android/gyp/proguard.py | 691 ++ android/gyp/proguard.pydeps | 12 + android/gyp/proto/Configuration_pb2.py | 697 ++ android/gyp/proto/README.md | 13 + android/gyp/proto/Resources_pb2.py | 2779 ++++++++ android/gyp/proto/__init__.py | 0 android/gyp/system_image_apks.py | 62 + android/gyp/system_image_apks.pydeps | 6 + android/gyp/test/BUILD.gn | 11 + .../chromium/helloworld/HelloWorldMain.java | 15 + .../helloworld/HelloWorldPrinter.java | 12 + android/gyp/trace_event_bytecode_rewriter.py | 50 + .../gyp/trace_event_bytecode_rewriter.pydeps | 7 + android/gyp/turbine.py | 168 + android/gyp/turbine.pydeps | 33 + android/gyp/unused_resources.py | 115 + android/gyp/unused_resources.pydeps | 30 + android/gyp/util/__init__.py | 3 + android/gyp/util/build_utils.py | 488 ++ android/gyp/util/build_utils_test.py | 48 + android/gyp/util/diff_utils.py | 136 + android/gyp/util/jar_info_utils.py | 59 + android/gyp/util/java_cpp_utils.py | 192 + android/gyp/util/manifest_utils.py | 322 + android/gyp/util/manifest_utils_test.py | 132 + android/gyp/util/md5_check.py | 469 ++ android/gyp/util/md5_check_test.py | 178 + android/gyp/util/parallel.py | 217 + android/gyp/util/protoresources.py | 308 + android/gyp/util/resource_utils.py | 1055 +++ android/gyp/util/resource_utils_test.py | 275 + android/gyp/util/resources_parser.py | 155 + android/gyp/util/server_utils.py | 47 + android/gyp/validate_inputs.py | 34 + .../validate_static_library_dex_references.py | 93 + ...idate_static_library_dex_references.pydeps | 10 + android/gyp/write_build_config.py | 2215 +++++++ android/gyp/write_build_config.pydeps | 30 + android/gyp/write_native_libraries_java.py | 124 + .../gyp/write_native_libraries_java.pydeps | 8 + android/gyp/zip.py | 83 + android/gyp/zip.pydeps | 8 + android/host_heartbeat.py | 36 + android/incremental_install/BUILD.gn | 24 + android/incremental_install/README.md | 83 + android/incremental_install/__init__.py | 3 + .../generate_android_manifest.py | 106 + .../generate_android_manifest.pydeps | 8 + android/incremental_install/installer.py | 374 ++ .../BootstrapApplication.java | 297 + .../BootstrapInstrumentation.java | 25 + .../ClassLoaderPatcher.java | 303 + .../chromium/incrementalinstall/LockFile.java | 129 + .../chromium/incrementalinstall/Reflect.java | 157 + .../SecondInstrumentation.java | 12 + .../AndroidHiddenApiBypass/BUILD.gn | 29 + .../AndroidHiddenApiBypass/LICENSE | 201 + .../AndroidHiddenApiBypass/README.chromium | 16 + .../AndroidHiddenApiBypass/README.md | 84 + .../org/lsposed/hiddenapibypass/Helper.java | 108 + .../hiddenapibypass/HiddenApiBypass.java | 415 ++ .../hiddenapibypass/library/BuildConfig.java | 9 + .../main/java/dalvik/system/VMRuntime.java | 9 + .../write_installer_json.py | 69 + .../write_installer_json.pydeps | 7 + .../build/annotations/AlwaysInline.java | 17 + .../build/annotations/CheckDiscard.java | 24 + .../build/annotations/DoNotClassMerge.java | 20 + .../build/annotations/DoNotInline.java | 20 + .../build/annotations/DoNotStripLogs.java | 17 + .../annotations/IdentifierNameString.java | 35 + .../chromium/build/annotations/MainDex.java | 23 + .../build/annotations/MockedInTests.java | 17 + .../build/annotations/UsedByReflection.java | 22 + android/java/templates/BuildConfig.template | 95 + android/java/templates/ProductConfig.template | 31 + android/java/test/DefaultLocaleLintTest.java | 17 + android/java/test/NewApiLintTest.java | 17 + ...eChangeIncrementalJavacTestHelper.template | 18 + ...tureChangeIncrementalJavacTestHelper2.java | 11 + android/java/test/missing_symbol/B.java | 9 + android/java/test/missing_symbol/D.template | 9 + .../test/missing_symbol/Importer.template | 13 + .../java/test/missing_symbol/ImportsSubB.java | 13 + android/java/test/missing_symbol/c.jar | Bin 0 -> 393 bytes .../sub/BInMethodSignature.java | 13 + .../java/test/missing_symbol/sub/SubB.java | 9 + android/junit/AndroidManifest_mergetest.xml | 12 + android/junit/res/values/strings.xml | 8 + .../org/chromium/build/AndroidAssetsTest.java | 58 + .../chromium/build/IncrementalJavacTest.java | 33 + android/lighttpd_server.py | 263 + android/list_class_verification_failures.py | 279 + .../list_class_verification_failures_test.py | 232 + android/list_java_targets.py | 297 + android/main_dex_classes.flags | 47 + android/method_count.py | 117 + android/native_flags/BUILD.gn | 37 + android/native_flags/argcapture.py | 17 + android/native_flags/empty.cc | 5 + android/print_cipd_version.py | 46 + android/provision_devices.py | 562 ++ android/pylib/__init__.py | 45 + android/pylib/android/__init__.py | 3 + android/pylib/android/logcat_symbolizer.py | 99 + android/pylib/base/__init__.py | 3 + android/pylib/base/base_test_result.py | 300 + .../pylib/base/base_test_result_unittest.py | 83 + android/pylib/base/environment.py | 52 + android/pylib/base/environment_factory.py | 35 + android/pylib/base/mock_environment.py | 11 + android/pylib/base/mock_test_instance.py | 11 + android/pylib/base/output_manager.py | 179 + android/pylib/base/output_manager_factory.py | 17 + .../pylib/base/output_manager_test_case.py | 15 + android/pylib/base/test_collection.py | 82 + android/pylib/base/test_exception.py | 7 + android/pylib/base/test_instance.py | 40 + android/pylib/base/test_instance_factory.py | 27 + android/pylib/base/test_run.py | 55 + android/pylib/base/test_run_factory.py | 37 + android/pylib/base/test_server.py | 19 + android/pylib/constants/__init__.py | 287 + android/pylib/constants/host_paths.py | 97 + .../pylib/constants/host_paths_unittest.py | 51 + android/pylib/content_settings.py | 80 + android/pylib/device/__init__.py | 0 android/pylib/device/commands/BUILD.gn | 20 + .../android/commands/unzip/Unzip.java | 93 + android/pylib/device_settings.py | 201 + android/pylib/dex/__init__.py | 3 + android/pylib/dex/dex_parser.py | 532 ++ android/pylib/gtest/__init__.py | 3 + android/pylib/gtest/filter/OWNERS | 1 + .../gtest/filter/base_unittests_disabled | 25 + ...ase_unittests_emulator_additional_disabled | 10 + .../gtest/filter/breakpad_unittests_disabled | 9 + .../filter/content_browsertests_disabled | 45 + .../pylib/gtest/filter/unit_tests_disabled | 64 + android/pylib/gtest/gtest_config.py | 55 + android/pylib/gtest/gtest_test_instance.py | 629 ++ .../pylib/gtest/gtest_test_instance_test.py | 348 + android/pylib/instrumentation/__init__.py | 3 + .../instrumentation/instrumentation_parser.py | 112 + .../instrumentation_parser_test.py | 135 + .../instrumentation_test_instance.py | 1235 ++++ .../instrumentation_test_instance_test.py | 1397 ++++ .../pylib/instrumentation/json_perf_parser.py | 162 + .../instrumentation/render_test.html.jinja | 40 + android/pylib/instrumentation/test_result.py | 33 + android/pylib/junit/__init__.py | 3 + android/pylib/junit/junit_test_instance.py | 87 + android/pylib/local/__init__.py | 3 + android/pylib/local/device/__init__.py | 3 + .../local/device/local_device_environment.py | 354 ++ .../local/device/local_device_gtest_run.py | 970 +++ .../device/local_device_gtest_run_test.py | 118 + .../local_device_instrumentation_test_run.py | 1718 +++++ ...al_device_instrumentation_test_run_test.py | 197 + .../device/local_device_monkey_test_run.py | 140 + .../local/device/local_device_test_run.py | 420 ++ .../device/local_device_test_run_test.py | 174 + android/pylib/local/emulator/OWNERS | 3 + android/pylib/local/emulator/__init__.py | 3 + android/pylib/local/emulator/avd.py | 1161 ++++ android/pylib/local/emulator/ini.py | 104 + android/pylib/local/emulator/ini_test.py | 155 + .../emulator/local_emulator_environment.py | 106 + .../pylib/local/emulator/proto/__init__.py | 3 + android/pylib/local/emulator/proto/avd.proto | 94 + android/pylib/local/emulator/proto/avd_pb2.py | 455 ++ .../pylib/local/local_test_server_spawner.py | 101 + android/pylib/local/machine/__init__.py | 3 + .../machine/local_machine_environment.py | 19 + .../machine/local_machine_junit_test_run.py | 443 ++ .../local_machine_junit_test_run_test.py | 89 + android/pylib/monkey/__init__.py | 0 android/pylib/monkey/monkey_test_instance.py | 73 + android/pylib/output/__init__.py | 3 + android/pylib/output/local_output_manager.py | 48 + .../pylib/output/local_output_manager_test.py | 34 + android/pylib/output/noop_output_manager.py | 38 + .../pylib/output/noop_output_manager_test.py | 27 + android/pylib/output/remote_output_manager.py | 88 + .../output/remote_output_manager_test.py | 32 + android/pylib/pexpect.py | 21 + android/pylib/restart_adbd.sh | 20 + android/pylib/results/__init__.py | 3 + .../results/flakiness_dashboard/__init__.py | 3 + .../json_results_generator.py | 702 ++ .../json_results_generator_unittest.py | 210 + .../flakiness_dashboard/results_uploader.py | 174 + android/pylib/results/json_results.py | 239 + android/pylib/results/json_results_test.py | 311 + .../pylib/results/presentation/__init__.py | 3 + .../presentation/javascript/main_html.js | 193 + .../presentation/standard_gtest_merge.py | 175 + .../results/presentation/template/main.html | 93 + .../results/presentation/template/table.html | 60 + .../presentation/test_results_presentation.py | 544 ++ .../test_results_presentation.pydeps | 46 + android/pylib/results/report_results.py | 135 + android/pylib/symbols/__init__.py | 0 android/pylib/symbols/deobfuscator.py | 50 + .../symbols/expensive_line_transformer.py | 233 + .../pylib/symbols/mock_addr2line/__init__.py | 0 .../symbols/mock_addr2line/mock_addr2line | 80 + android/pylib/symbols/stack_symbolizer.py | 137 + android/pylib/utils/__init__.py | 0 android/pylib/utils/app_bundle_utils.py | 195 + android/pylib/utils/argparse_utils.py | 52 + android/pylib/utils/chrome_proxy_utils.py | 171 + .../pylib/utils/chrome_proxy_utils_test.py | 235 + android/pylib/utils/decorators.py | 37 + android/pylib/utils/decorators_test.py | 104 + android/pylib/utils/device_dependencies.py | 145 + .../pylib/utils/device_dependencies_test.py | 52 + android/pylib/utils/dexdump.py | 313 + android/pylib/utils/dexdump_test.py | 207 + android/pylib/utils/gold_utils.py | 78 + android/pylib/utils/gold_utils_test.py | 123 + android/pylib/utils/google_storage_helper.py | 128 + .../pylib/utils/instrumentation_tracing.py | 206 + android/pylib/utils/local_utils.py | 19 + android/pylib/utils/logdog_helper.py | 96 + android/pylib/utils/logging_utils.py | 140 + android/pylib/utils/maven_downloader.py | 140 + android/pylib/utils/repo_utils.py | 28 + .../pylib/utils/shared_preference_utils.py | 116 + android/pylib/utils/simpleperf.py | 293 + android/pylib/utils/test_filter.py | 145 + android/pylib/utils/test_filter_test.py | 256 + android/pylib/utils/time_profile.py | 45 + android/pylib/utils/xvfb.py | 58 + android/pylib/valgrind_tools.py | 116 + android/pylintrc | 15 + android/resource_sizes.gni | 94 + android/resource_sizes.py | 929 +++ android/resource_sizes.pydeps | 61 + android/screenshot.py | 13 + android/stacktrace/BUILD.gn | 24 + android/stacktrace/README.md | 28 + android/stacktrace/crashpad_stackwalker.py | 175 + .../org/chromium/build/FlushingReTrace.java | 146 + android/stacktrace/java_deobfuscate.py | 36 + android/stacktrace/java_deobfuscate_java.jar | Bin 0 -> 7643 bytes android/stacktrace/java_deobfuscate_test.py | 174 + android/stacktrace/stackwalker.py | 136 + android/test/BUILD.gn | 83 + android/test/incremental_javac_gn/BUILD.gn | 98 + .../incremental_javac_test_android_library.py | 154 + android/test/missing_symbol_test.gni | 57 + android/test/nocompile_gn/BUILD.gn | 101 + .../test/nocompile_gn/nocompile_sources.gni | 14 + android/test_runner.py | 1388 ++++ android/test_runner.pydeps | 231 + android/test_wrapper/logdog_wrapper.py | 170 + android/test_wrapper/logdog_wrapper.pydeps | 12 + android/tests/symbolize/Makefile | 11 + android/tests/symbolize/a.cc | 14 + android/tests/symbolize/b.cc | 14 + android/tests/symbolize/liba.so | Bin 0 -> 6908 bytes android/tests/symbolize/libb.so | Bin 0 -> 6896 bytes android/tombstones.py | 280 + android/unused_resources/BUILD.gn | 19 + android/unused_resources/UnusedResources.java | 619 ++ .../update_deps/update_third_party_deps.py | 142 + android/update_verification.py | 116 + android/video_recorder.py | 13 + apple/OWNERS | 4 + apple/README.md | 12 + apple/apple_info_plist.gni | 60 + apple/compile_entitlements.gni | 51 + apple/compile_plist.gni | 76 + apple/convert_plist.gni | 41 + apple/plist_util.py | 236 + apple/tweak_info_plist.gni | 86 + apple/tweak_info_plist.py | 450 ++ apple/write_pkg_info.py | 52 + apple/xcrun.py | 52 + args/OWNERS | 1 + args/README.txt | 27 + args/chromeos/README.md | 57 + args/headless.gn | 56 + build-ctags.sh | 49 + build_config.h | 385 ++ buildflag.h | 47 + buildflag_header.gni | 137 + check_gn_headers.py | 311 + check_gn_headers_unittest.py | 101 + check_gn_headers_whitelist.txt | 232 + check_return_value.py | 18 + chromeos/.style.yapf | 2 + chromeos/OWNERS | 1 + chromeos/PRESUBMIT.py | 38 + chromeos/generate_skylab_deps.py | 206 + chromeos/generate_skylab_deps_test.py | 178 + chromeos/pylintrc | 15 + chromeos/test_runner.py | 990 +++ chromeos/test_runner_test.py | 381 ++ ciopfs.sha1 | 1 + cipd/cipd.gni | 140 + cipd/cipd_from_file.py | 65 + clobber.py | 143 + clobber_unittest.py | 148 + compiled_action.gni | 167 + compute_build_timestamp.py | 137 + config/BUILD.gn | 421 ++ config/BUILDCONFIG.gn | 753 +++ config/OWNERS | 4 + config/aix/BUILD.gn | 61 + config/android/BUILD.gn | 173 + config/android/DIR_METADATA | 1 + config/android/OWNERS | 1 + config/android/abi.gni | 107 + config/android/android_nocompile.gni | 114 + config/android/build_vars.gni | 31 + config/android/channel.gni | 14 + config/android/config.gni | 366 ++ config/android/copy_ex.gni | 72 + config/android/create_unwind_table.gni | 50 + config/android/extract_unwind_tables.gni | 47 + config/android/internal_rules.gni | 4463 +++++++++++++ config/android/linker_version_script.gni | 48 + config/android/rules.gni | 5651 +++++++++++++++++ config/android/sdk.gni | 13 + config/android/system_image.gni | 174 + config/android/test/proto/BUILD.gn | 103 + .../proto/absolute_dep/absolute_dep.proto | 10 + .../proto/relative_dep/relative_dep.proto | 10 + .../test/proto/root/absolute_child.proto | 10 + .../test/proto/root/absolute_root.proto | 18 + .../test/proto/root/relative_child.proto | 10 + .../test/proto/root/relative_root.proto | 18 + config/android/test/resource_overlay/BUILD.gn | 60 + .../java/res_template/values/values.xml | 10 + .../resource_overlay/ResourceOverlayTest.java | 49 + config/apple/BUILD.gn | 17 + config/apple/OWNERS | 1 + config/apple/sdk_info.py | 160 + config/apple/symbols.gni | 30 + config/arm.gni | 150 + config/buildflags_paint_preview.gni | 16 + config/c++/BUILD.gn | 100 + config/c++/c++.gni | 89 + config/c++/libc++.natvis | 435 ++ config/chrome_build.gni | 68 + config/chromebox_for_meetings/BUILD.gn | 11 + config/chromebox_for_meetings/OWNERS | 1 + config/chromebox_for_meetings/README.md | 31 + config/chromebox_for_meetings/buildflags.gni | 8 + config/chromecast/BUILD.gn | 90 + config/chromecast/OWNERS | 3 + config/chromecast_build.gni | 124 + config/chromeos/BUILD.gn | 62 + config/chromeos/args.gni | 41 + config/chromeos/rules.gni | 653 ++ config/chromeos/ui_mode.gni | 60 + config/clang/BUILD.gn | 81 + config/clang/clang.gni | 22 + config/compiler/BUILD.gn | 2729 ++++++++ config/compiler/compiler.gni | 343 + config/compiler/pgo/BUILD.gn | 135 + config/compiler/pgo/pgo.gni | 34 + config/compute_inputs_for_analyze.gni | 14 + config/coverage/BUILD.gn | 43 + config/coverage/OWNERS | 1 + config/coverage/coverage.gni | 40 + config/cronet/OWNERS | 1 + config/cronet/config.gni | 10 + config/dcheck_always_on.gni | 39 + config/devtools.gni | 37 + config/features.gni | 47 + config/freetype/BUILD.gn | 14 + config/freetype/OWNERS | 2 + config/freetype/freetype.gni | 14 + config/fuchsia/BUILD.gn | 100 + config/fuchsia/DIR_METADATA | 1 + config/fuchsia/OWNERS | 5 + config/fuchsia/build_symbol_archive.py | 62 + config/fuchsia/config.gni | 8 + config/fuchsia/extend_fvm.py | 26 + config/fuchsia/fuchsia_package_metadata.gni | 38 + config/fuchsia/generate_runner_scripts.gni | 250 + ...ackaged_content_embedder_excluded_dirs.gni | 16 + .../size_optimized_cast_receiver_args.gn | 43 + ...e_optimized_cast_receiver_args_internal.gn | 18 + config/fuchsia/sizes.gni | 51 + config/fuchsia/symbol_archive.gni | 47 + config/fuchsia/test/OWNERS | 7 + config/fuchsia/test/README.md | 112 + config/fuchsia/test/archivist.shard.test-cml | 28 + .../fuchsia/test/audio_output.shard.test-cml | 16 + .../chromium_system_test_facet.shard.test-cml | 8 + .../test/chromium_test_facet.shard.test-cml | 8 + .../test/context_provider.shard.test-cml | 30 + ...lf_test_ambient_exec_runner.shard.test-cml | 17 + .../test/elf_test_runner.shard.test-cml | 17 + config/fuchsia/test/fonts.shard.test-cml | 38 + .../test/gfx_test_ui_stack.shard.test-cml | 49 + config/fuchsia/test/logger.shard.test-cml | 8 + .../test/mark_vmo_executable.shard.test-cml | 12 + config/fuchsia/test/minimum.shard.test-cml | 78 + config/fuchsia/test/network.shard.test-cml | 20 + .../test/platform_video_codecs.shard.test-cml | 48 + .../fuchsia/test/present_view.shard.test-cml | 42 + config/fuchsia/test/sysmem.shard.test-cml | 10 + .../test/system_test_minimum.shard.test-cml | 46 + config/fuchsia/test/test_fonts.shard.test-cml | 37 + .../fuchsia/test/test_ui_stack.shard.test-cml | 48 + .../fuchsia/test/web_instance.shard.test-cml | 21 + config/gcc/BUILD.gn | 117 + config/get_host_byteorder.py | 11 + config/host_byteorder.gni | 27 + config/ios/BUILD.gn | 292 + config/ios/BuildInfo.plist | 35 + config/ios/Host-Info.plist | 126 + config/ios/Module-Info.plist | 26 + config/ios/OWNERS | 1 + config/ios/asset_catalog.gni | 150 + config/ios/bundle_data_from_filelist.gni | 24 + config/ios/codesign.py | 722 +++ config/ios/compile_ib_files.py | 56 + config/ios/compile_xcassets_unittests.py | 141 + config/ios/config.gni | 23 + config/ios/dummy.py | 15 + config/ios/entitlements.plist | 12 + config/ios/find_signing_identity.py | 90 + config/ios/generate_umbrella_header.py | 75 + config/ios/hardlink.py | 71 + config/ios/ios_sdk.gni | 147 + config/ios/ios_sdk_overrides.gni | 17 + config/ios/ios_test_runner_wrapper.gni | 152 + config/ios/ios_test_runner_xcuitest.gni | 72 + .../resources/XCTRunnerAddition+Info.plist | 12 + config/ios/rules.gni | 1870 ++++++ config/ios/strip_arm64e.py | 70 + config/ios/swift_source_set.gni | 25 + config/ios/write_framework_hmap.py | 102 + config/ios/write_framework_modulemap.py | 28 + config/ios/xctest_shell.mm | 19 + config/linux/BUILD.gn | 70 + config/linux/OWNERS | 1 + config/linux/atk/BUILD.gn | 35 + config/linux/atspi2/BUILD.gn | 29 + config/linux/dbus/BUILD.gn | 14 + config/linux/dri/BUILD.gn | 18 + config/linux/gtk/BUILD.gn | 45 + config/linux/gtk/gtk.gni | 14 + config/linux/libdrm/BUILD.gn | 31 + config/linux/libffi/BUILD.gn | 24 + config/linux/libva/BUILD.gn | 17 + config/linux/nss/BUILD.gn | 18 + config/linux/pangocairo/BUILD.gn | 19 + config/linux/pangocairo/pangocairo.gni | 10 + config/linux/pkg-config.py | 247 + config/linux/pkg_config.gni | 129 + config/locales.gni | 261 + config/logging.gni | 32 + config/mac/BUILD.gn | 129 + config/mac/BuildInfo.plist | 16 + config/mac/OWNERS | 1 + config/mac/mac_sdk.gni | 136 + config/mac/mac_sdk_overrides.gni | 16 + config/mac/package_framework.py | 60 + config/mac/prepare_framework_version.py | 42 + config/mac/rules.gni | 627 ++ config/mips.gni | 67 + config/nacl/BUILD.gn | 132 + config/nacl/config.gni | 53 + config/nacl/host_toolchain.gni | 18 + config/nacl/rules.gni | 130 + config/ozone.gni | 132 + config/ozone_extra.gni | 33 + config/pch.gni | 15 + config/posix/BUILD.gn | 59 + config/profiling/OWNERS | 1 + config/profiling/profiling.gni | 14 + config/python.gni | 161 + config/riscv.gni | 19 + config/rts.gni | 5 + config/rust.gni | 310 + config/sanitizers/BUILD.gn | 609 ++ config/sanitizers/OWNERS | 1 + config/sanitizers/sanitizers.gni | 330 + config/siso/.gitignore | 1 + config/siso/OWNERS | 6 + config/siso/README.md | 8 + config/siso/clang_linux.star | 109 + config/siso/configure_siso.py | 36 + config/siso/linux.star | 43 + config/siso/mac.star | 23 + config/siso/main.star | 47 + config/siso/mojo.star | 129 + config/siso/nacl_linux.star | 179 + config/siso/remote_exec_wrapper.star | 58 + config/siso/simple.star | 46 + config/siso/windows.star | 23 + config/sysroot.gni | 79 + config/ui.gni | 55 + config/v8_target_cpu.gni | 64 + config/win/BUILD.gn | 607 ++ config/win/console_app.gni | 18 + config/win/control_flow_guard.gni | 23 + config/win/manifest.gni | 118 + config/win/visual_studio_version.gni | 44 + config/zip.gni | 59 + config/zos/BUILD.gn | 57 + copy_test_data_ios.py | 106 + cp.py | 23 + del_ninja_deps_cache.py | 40 + detect_host_arch.py | 54 + dir_exists.py | 23 + docs/debugging_slow_builds.md | 48 + docs/mac_hermetic_toolchain.md | 34 + docs/writing_gn_templates.md | 351 + dotfile_settings.gni | 43 + download_nacl_toolchains.py | 55 + env_dump.py | 56 + extract_from_cab.py | 64 + extract_partition.py | 176 + find_depot_tools.py | 74 + fix_gn_headers.py | 219 + fuchsia/COMMON_METADATA | 5 + fuchsia/DIR_METADATA | 1 + fuchsia/OWNERS | 18 + fuchsia/PRESUBMIT.py | 47 + fuchsia/SECURITY_OWNERS | 16 + fuchsia/__init__.py | 0 fuchsia/binary_size_differ.py | 153 + fuchsia/binary_size_differ_test.py | 171 + fuchsia/binary_sizes.py | 618 ++ fuchsia/binary_sizes_test.py | 132 + fuchsia/cipd/BUILD.gn | 436 ++ fuchsia/cipd/DIR_METADATA | 1 + fuchsia/cipd/README.md | 11 + fuchsia/cipd/version.template | 1 + fuchsia/gcs_download.py | 51 + fuchsia/gcs_download_test.py | 88 + fuchsia/linux_internal.sdk.sha1 | 1 + fuchsia/sdk-bucket.txt | 0 fuchsia/sdk-hash-files.list | 1 + fuchsia/test/.coveragerc | 8 + fuchsia/test/.style.yapf | 2 + fuchsia/test/OWNERS | 3 + fuchsia/test/PRESUBMIT.py | 51 + fuchsia/test/base_ermine_ctl.py | 201 + fuchsia/test/base_ermine_ctl_unittests.py | 236 + fuchsia/test/common.py | 617 ++ fuchsia/test/common_unittests.py | 54 + fuchsia/test/compatible_utils.py | 207 + fuchsia/test/compatible_utils_unittests.py | 238 + fuchsia/test/coveragetest.py | 59 + fuchsia/test/deploy_to_fuchsia.py | 44 + fuchsia/test/deploy_to_fuchsia_unittests.py | 38 + fuchsia/test/ermine_ctl.py | 25 + fuchsia/test/ffx_emulator.py | 162 + fuchsia/test/ffx_emulator_unittests.py | 49 + fuchsia/test/ffx_integration.py | 236 + fuchsia/test/flash_device.py | 243 + fuchsia/test/flash_device_unittests.py | 349 + fuchsia/test/lockfile.py | 79 + fuchsia/test/log_manager.py | 160 + fuchsia/test/log_manager_unittests.py | 115 + fuchsia/test/publish_package.py | 68 + fuchsia/test/publish_package_unittests.py | 103 + fuchsia/test/pylintrc | 26 + fuchsia/test/run_blink_test.py | 36 + fuchsia/test/run_executable_test.py | 263 + fuchsia/test/run_pytype.py | 42 + fuchsia/test/run_telemetry_test.py | 61 + fuchsia/test/run_test.py | 127 + fuchsia/test/run_webpage_test.py | 60 + fuchsia/test/serve_repo.py | 98 + fuchsia/test/serve_repo_unittests.py | 89 + fuchsia/test/start_emulator.py | 83 + fuchsia/test/test_runner.py | 74 + fuchsia/test/test_server.py | 130 + fuchsia/test/test_server_unittests.py | 84 + fuchsia/update_images.py | 266 + fuchsia/update_images_test.py | 97 + fuchsia/update_product_bundles.py | 359 ++ fuchsia/update_product_bundles_test.py | 288 + fuchsia/update_sdk.py | 109 + fuchsia/update_sdk_test.py | 69 + gdb-add-index | 184 + get_landmines.py | 91 + get_symlink_targets.py | 27 + gn_helpers.py | 542 ++ gn_helpers_unittest.py | 316 + gn_logs.gni | 8 + gn_run_binary.py | 35 + install-build-deps.sh | 812 +++ install-chroot.sh | 888 +++ internal/README.chromium | 24 + ios/OWNERS | 1 + ios/PRESUBMIT.py | 20 + ios/extension_bundle_data.gni | 23 + ios/intent_definition.gni | 121 + ios/presubmit_support.py | 39 + ios/presubmit_support_test.py | 165 + ios/test_data/bar.html | 0 ios/test_data/basic.filelist | 7 + ios/test_data/basic.globlist | 5 + ios/test_data/comment.filelist | 2 + ios/test_data/comment.globlist | 7 + ios/test_data/different_local_path.filelist | 9 + ios/test_data/different_local_path.globlist | 6 + ios/test_data/duplicates.filelist | 7 + ios/test_data/duplicates.globlist | 7 + ios/test_data/exclusions.filelist | 9 + ios/test_data/exclusions.globlist | 6 + ios/test_data/extra.filelist | 8 + ios/test_data/extra.globlist | 5 + ios/test_data/foo.css | 0 .../ignore_outside_globlist_dir.filelist | 8 + .../ignore_outside_globlist_dir.globlist | 8 + ios/test_data/missing.filelist | 9 + ios/test_data/missing.globlist | 8 + ios/test_data/outside_globlist_dir.filelist | 8 + ios/test_data/outside_globlist_dir.globlist | 6 + ios/test_data/reorder.filelist | 9 + ios/test_data/reorder.globlist | 6 + ios/test_data/repository_relative.filelist | 9 + ios/test_data/repository_relative.globlist | 6 + ios/test_data/subdirectory/baz.txt | 0 ios/update_bundle_filelist.py | 318 + lacros/BUILD.gn | 28 + lacros/OWNERS | 2 + lacros/PRESUBMIT.py | 28 + lacros/README.md | 11 + lacros/lacros_resource_sizes.gni | 21 + lacros/lacros_resource_sizes.py | 398 ++ lacros/lacros_resource_sizes.pydeps | 15 + lacros/mojo_connection_lacros_launcher.py | 210 + lacros/test_runner.py | 856 +++ lacros/test_runner_test.py | 300 + landmine_utils.py | 33 + landmines.py | 151 + linux/BUILD.gn | 32 + linux/OWNERS | 3 + linux/chrome.map | 97 + linux/dump_app_syms.py | 30 + linux/extract_symbols.gni | 41 + linux/install-chromeos-fonts.py | 119 + linux/libncursesw/DIR_METADATA | 5 + linux/libncursesw/OWNERS | 1 + linux/libpci/BUILD.gn | 22 + linux/libudev/BUILD.gn | 67 + linux/rewrite_dirs.py | 72 + linux/strip_binary.gni | 56 + linux/strip_binary.py | 32 + linux/sysroot_scripts/build_and_upload.py | 101 + linux/sysroot_scripts/generate_keyring.sh | 41 + .../generated_package_lists/bullseye.amd64 | 411 ++ .../generated_package_lists/bullseye.arm | 411 ++ .../generated_package_lists/bullseye.arm64 | 414 ++ .../generated_package_lists/bullseye.armel | 409 ++ .../generated_package_lists/bullseye.i386 | 409 ++ .../generated_package_lists/bullseye.mips64el | 404 ++ .../generated_package_lists/bullseye.mipsel | 403 ++ linux/sysroot_scripts/install-sysroot.py | 170 + linux/sysroot_scripts/keyring.gpg | Bin 0 -> 94381 bytes linux/sysroot_scripts/libxcomposite1-symbols | 15 + linux/sysroot_scripts/merge-package-lists.py | 34 + linux/sysroot_scripts/reversion_glibc.py | 124 + .../sysroot-creator-bullseye.sh | 505 ++ linux/sysroot_scripts/sysroot-creator.sh | 963 +++ linux/sysroot_scripts/sysroots.json | 44 + .../update-archive-timestamp.sh | 18 + linux/unbundle/README | 57 + linux/unbundle/absl_algorithm.gn | 22 + linux/unbundle/absl_base.gn | 67 + linux/unbundle/absl_cleanup.gn | 20 + linux/unbundle/absl_container.gn | 119 + linux/unbundle/absl_debugging.gn | 47 + linux/unbundle/absl_flags.gn | 50 + linux/unbundle/absl_functional.gn | 49 + linux/unbundle/absl_hash.gn | 22 + linux/unbundle/absl_log.gn | 13 + linux/unbundle/absl_log_internal.gn | 1 + linux/unbundle/absl_memory.gn | 20 + linux/unbundle/absl_meta.gn | 20 + linux/unbundle/absl_numeric.gn | 32 + linux/unbundle/absl_random.gn | 17 + linux/unbundle/absl_status.gn | 38 + linux/unbundle/absl_strings.gn | 93 + linux/unbundle/absl_synchronization.gn | 22 + linux/unbundle/absl_time.gn | 21 + linux/unbundle/absl_types.gn | 97 + linux/unbundle/absl_utility.gn | 17 + linux/unbundle/brotli.gn | 35 + linux/unbundle/crc32c.gn | 11 + linux/unbundle/dav1d.gn | 23 + linux/unbundle/double-conversion.gn | 23 + linux/unbundle/ffmpeg.gn | 38 + linux/unbundle/flac.gn | 30 + linux/unbundle/fontconfig.gn | 13 + linux/unbundle/freetype.gn | 14 + linux/unbundle/harfbuzz-ng.gn | 13 + linux/unbundle/icu.gn | 254 + linux/unbundle/jsoncpp.gn | 32 + linux/unbundle/libXNVCtrl.gn | 19 + linux/unbundle/libaom.gn | 34 + linux/unbundle/libavif.gn | 16 + linux/unbundle/libdrm.gn | 20 + linux/unbundle/libevent.gn | 15 + linux/unbundle/libjpeg.gn | 12 + linux/unbundle/libpng.gn | 23 + linux/unbundle/libvpx.gn | 32 + linux/unbundle/libwebp.gn | 35 + linux/unbundle/libxml.gn | 58 + linux/unbundle/libxslt.gn | 13 + linux/unbundle/libyuv.gn | 37 + linux/unbundle/openh264.gn | 36 + linux/unbundle/opus.gn | 43 + linux/unbundle/re2.gn | 27 + linux/unbundle/remove_bundled_libraries.py | 105 + linux/unbundle/replace_gn_files.py | 121 + linux/unbundle/snappy.gn | 20 + linux/unbundle/swiftshader-SPIRV-Headers.gn | 17 + linux/unbundle/swiftshader-SPIRV-Tools.gn | 32 + linux/unbundle/vulkan-SPIRV-Headers.gn | 19 + linux/unbundle/vulkan-SPIRV-Tools.gn | 69 + linux/unbundle/woff2.gn | 20 + linux/unbundle/zlib.gn | 64 + locale_tool.py | 1511 +++++ mac/OWNERS | 1 + mac/find_sdk.py | 112 + mac/should_use_hermetic_xcode.py | 62 + mac_toolchain.py | 210 + metadata.json.in | 6 + nocompile.gni | 151 + noop.py | 4 + partitioned_shared_library.gni | 142 + precompile.cc | 7 + precompile.h | 53 + print_python_deps.py | 186 + private_code_test/BUILD.gn | 47 + private_code_test/README.md | 36 + private_code_test/list_gclient_deps.py | 43 + private_code_test/private_code_test.gni | 63 + private_code_test/private_code_test.py | 135 + protoc_java.py | 109 + protoc_java.pydeps | 8 + redirect_stdout.py | 28 + rm.py | 39 + rust/BUILD.gn | 84 + rust/OWNERS | 7 + rust/analyze.gni | 79 + rust/cargo_crate.gni | 340 + rust/collect_rust_sources.py | 64 + rust/filter_clang_args.py | 31 + rust/rs_bindings_from_cc.gni | 297 + rust/run_bindgen.py | 98 + rust/run_build_script.py | 164 + rust/run_rs_bindings_from_cc.py | 127 + rust/rust_bindgen.gni | 193 + rust/rust_executable.gni | 70 + rust/rust_macro.gni | 19 + rust/rust_shared_library.gni | 26 + rust/rust_static_library.gni | 169 + rust/rust_target.gni | 448 ++ rust/rust_unit_test.gni | 138 + rust/rust_unit_tests_group.gni | 93 + rust/rustc_wrapper.py | 157 + rust/std/BUILD.gn | 346 + rust/std/fake_root/.cargo/config.toml | 5 + rust/std/fake_root/.gitignore | 2 + rust/std/fake_root/Cargo.toml | 16 + rust/std/fake_root/README.md | 2 + rust/std/fake_root/src/main.rs | 3 + rust/std/find_std_rlibs.py | 164 + rust/std/gnrt_config.toml | 60 + rust/std/immediate_crash.h | 170 + rust/std/remap_alloc.cc | 152 + rust/std/rules/BUILD.gn | 878 +++ rust/tests/BUILD.gn | 97 + rust/tests/bindgen_test/BUILD.gn | 48 + rust/tests/bindgen_test/lib.c | 11 + rust/tests/bindgen_test/lib.h | 45 + rust/tests/bindgen_test/lib2.h | 10 + rust/tests/bindgen_test/main.rs | 9 + rust/tests/bindgen_test/src/lib.rs | 25 + rust/tests/test_aliased_deps/BUILD.gn | 30 + rust/tests/test_aliased_deps/lib.rs | 11 + rust/tests/test_aliased_deps/main.rs | 7 + rust/tests/test_aliased_deps/real_name.rs | 11 + rust/tests/test_bin_crate/BUILD.gn | 15 + rust/tests/test_bin_crate/crate/build.rs | 62 + rust/tests/test_bin_crate/crate/src/main.rs | 15 + rust/tests/test_control_flow_guard/BUILD.gn | 14 + .../test_control_flow_guard.rs | 43 + rust/tests/test_cpp_including_rust/BUILD.gn | 23 + rust/tests/test_cpp_including_rust/main.cc | 11 + .../test_cpp_including_rust/unittests.cc | 31 + rust/tests/test_local_std/BUILD.gn | 23 + rust/tests/test_local_std/lib.rs | 8 + rust/tests/test_local_std/main.rs | 7 + rust/tests/test_proc_macro_crate/BUILD.gn | 12 + .../test_proc_macro_crate/crate/src/lib.rs | 10 + rust/tests/test_rlib_crate/BUILD.gn | 55 + rust/tests/test_rlib_crate/crate/build.rs | 90 + rust/tests/test_rlib_crate/crate/src/lib.rs | 56 + rust/tests/test_rlib_crate/crate/src/main.rs | 7 + rust/tests/test_rs_bindings_from_cc/BUILD.gn | 58 + rust/tests/test_rs_bindings_from_cc/main.rs | 32 + .../self_contained_target_header1.h | 12 + .../self_contained_target_header2.cc | 9 + .../self_contained_target_header2.h | 14 + .../target_depending_on_another.h | 14 + rust/tests/test_rust_exe/BUILD.gn | 17 + rust/tests/test_rust_exe/main.rs | 32 + .../BUILD.gn | 25 + .../main.rs | 8 + .../transitive_lib.rs | 7 + .../v1/BUILD.gn | 12 + .../v1/src/lib.rs | 7 + .../v2/BUILD.gn | 12 + .../v2/src/lib.rs | 7 + rust/tests/test_rust_shared_library/BUILD.gn | 12 + .../tests/test_rust_shared_library/src/lib.rs | 41 + rust/tests/test_rust_static_library/BUILD.gn | 12 + .../tests/test_rust_static_library/src/lib.rs | 48 + .../BUILD.gn | 15 + .../foo.rs | 12 + rust/tests/test_rust_unittests/BUILD.gn | 11 + rust/tests/test_rust_unittests/main.rs | 20 + rust/tests/test_serde_json_lenient/BUILD.gn | 27 + rust/tests/test_serde_json_lenient/lib.rs | 29 + .../test_serde_json_lenient/unittests.cc | 10 + rust/tests/test_simple_rust_exe/BUILD.gn | 12 + rust/tests/test_simple_rust_exe/main.rs | 7 + sample_arg_file.gn | 6 + sanitize-mac-build-log.sed | 33 + sanitize-mac-build-log.sh | 5 + sanitize-win-build-log.sed | 15 + sanitize-win-build-log.sh | 5 + sanitizers/OWNERS | 6 + sanitizers/asan_suppressions.cc | 23 + sanitizers/lsan_suppressions.cc | 110 + sanitizers/sanitizer_options.cc | 189 + sanitizers/tsan_suppressions.cc | 96 + shim_headers.gni | 41 + skia_gold_common/.style.yapf | 6 + skia_gold_common/OWNERS | 1 + skia_gold_common/PRESUBMIT.py | 39 + skia_gold_common/README.md | 6 + skia_gold_common/__init__.py | 3 + .../output_managerless_skia_gold_session.py | 62 + ..._managerless_skia_gold_session_unittest.py | 137 + skia_gold_common/run_pytype.py | 44 + skia_gold_common/skia_gold_properties.py | 192 + .../skia_gold_properties_unittest.py | 202 + skia_gold_common/skia_gold_session.py | 576 ++ skia_gold_common/skia_gold_session_manager.py | 130 + .../skia_gold_session_manager_unittest.py | 184 + .../skia_gold_session_unittest.py | 815 +++ skia_gold_common/unittest_utils.py | 44 + symlink.gni | 82 + symlink.py | 92 + timestamp.gni | 34 + toolchain/BUILD.gn | 30 + toolchain/OWNERS | 2 + toolchain/aix/BUILD.gn | 24 + toolchain/android/BUILD.gn | 168 + toolchain/android/DIR_METADATA | 1 + toolchain/android/OWNERS | 1 + toolchain/apple/.style.yapf | 2 + toolchain/apple/BUILD.gn | 22 + toolchain/apple/OWNERS | 1 + toolchain/apple/filter_libtool.py | 51 + toolchain/apple/get_tool_mtime.py | 18 + toolchain/apple/linker_driver.py | 368 ++ toolchain/apple/toolchain.gni | 831 +++ toolchain/cc_wrapper.gni | 43 + toolchain/clang_code_coverage_wrapper.py | 240 + toolchain/concurrent_links.gni | 117 + toolchain/cros/BUILD.gn | 305 + toolchain/cros_toolchain.gni | 92 + toolchain/fuchsia/BUILD.gn | 38 + toolchain/fuchsia/DIR_METADATA | 1 + toolchain/fuchsia/OWNERS | 1 + toolchain/gcc_link_wrapper.py | 91 + toolchain/gcc_solink_wrapper.py | 211 + toolchain/gcc_toolchain.gni | 897 +++ toolchain/get_concurrent_links.py | 157 + toolchain/get_cpu_count.py | 22 + toolchain/get_goma_dir.py | 42 + toolchain/goma.gni | 30 + toolchain/ios/BUILD.gn | 66 + toolchain/ios/OWNERS | 1 + toolchain/ios/compile_xcassets.py | 293 + toolchain/ios/swiftc.py | 327 + toolchain/kythe.gni | 11 + toolchain/linux/BUILD.gn | 440 ++ toolchain/linux/unbundle/BUILD.gn | 41 + toolchain/linux/unbundle/README.md | 41 + toolchain/mac/BUILD.gn | 116 + toolchain/mac/OWNERS | 1 + toolchain/nacl/BUILD.gn | 290 + toolchain/nacl_toolchain.gni | 73 + toolchain/rbe.gni | 62 + toolchain/toolchain.gni | 105 + toolchain/whole_archive.py | 51 + toolchain/win/BUILD.gn | 94 + toolchain/win/midl.gni | 200 + toolchain/win/midl.py | 486 ++ toolchain/win/ml.py | 286 + toolchain/win/rc/.gitignore | 3 + toolchain/win/rc/README.md | 30 + toolchain/win/rc/linux64/rc.sha1 | 1 + toolchain/win/rc/mac/rc.sha1 | 1 + toolchain/win/rc/rc.py | 275 + toolchain/win/rc/upload_rc_binaries.sh | 46 + toolchain/win/rc/win/rc.exe.sha1 | 1 + toolchain/win/setup_toolchain.py | 316 + toolchain/win/tool_wrapper.py | 191 + toolchain/win/toolchain.gni | 691 ++ toolchain/win/win_toolchain_data.gni | 43 + toolchain/wrapper_utils.py | 99 + toolchain/zos/BUILD.gn | 174 + tree_truth.sh | 102 + update-linux-sandbox.sh | 82 + util/BUILD.gn | 37 + util/LASTCHANGE.dummy | 1 + util/PRESUBMIT.py | 64 + util/action_remote.py | 146 + util/android_chrome_version.py | 354 ++ util/android_chrome_version_test.py | 958 +++ util/branding.gni | 45 + util/chromium_git_revision.h.in | 8 + util/generate_wrapper.gni | 98 + util/generate_wrapper.py | 251 + util/java_action.gni | 99 + util/java_action.py | 82 + util/lastchange.gni | 16 + util/lastchange.py | 344 + util/lib/__init__.py | 0 util/lib/common/PRESUBMIT.py | 24 + util/lib/common/__init__.py | 0 util/lib/common/chrome_test_server_spawner.py | 485 ++ util/lib/common/perf_result_data_type.py | 20 + util/lib/common/perf_tests_results_helper.py | 201 + util/lib/common/unittest_util.py | 155 + util/lib/common/unittest_util_test.py | 65 + util/lib/common/util.py | 151 + util/lib/results/DIR_METADATA | 11 + util/lib/results/OWNERS | 1 + util/lib/results/__init__.py | 0 util/lib/results/result_sink.py | 207 + util/lib/results/result_sink_test.py | 138 + util/lib/results/result_types.py | 25 + util/process_version.gni | 128 + util/version.py | 266 + util/version_test.py | 164 + vs_toolchain.py | 589 ++ whitespace_file.txt | 211 + win/BUILD.gn | 81 + win/as_invoker.manifest | 9 + win/common_controls.manifest | 8 + win/compatibility.manifest | 20 + win/copy_cdb_to_output.py | 115 + win/gn_meta_sln.py | 213 + win/message_compiler.gni | 87 + win/message_compiler.py | 147 + win/reorder-imports.py | 98 + win/require_administrator.manifest | 9 + win/segment_heap.manifest | 8 + win/set_appcontainer_acls.py | 35 + win/use_ansi_codes.py | 11 + write_buildflag_header.py | 97 + xcode_binaries.yaml | 59 + zip_helpers.py | 238 + zip_helpers_unittest.py | 58 + 1161 files changed, 154403 insertions(+) create mode 100644 .gitignore create mode 100644 .style.yapf create mode 100644 BUILD.gn create mode 100644 DIR_METADATA create mode 100644 OWNERS create mode 100644 OWNERS.setnoparent create mode 100644 OWNERS.status create mode 100644 PRESUBMIT.py create mode 100755 PRESUBMIT_test.py create mode 100644 README.md create mode 100644 action_helpers.py create mode 100755 action_helpers_unittest.py create mode 100755 add_rts_filters.py create mode 100644 android/AndroidManifest.xml create mode 100644 android/BUILD.gn create mode 100644 android/COMMON_METADATA create mode 100644 android/CheckInstallApk-debug.apk create mode 100644 android/DIR_METADATA create mode 100644 android/OWNERS create mode 100644 android/PRESUBMIT.py create mode 100755 android/adb_chrome_public_command_line create mode 100755 android/adb_command_line.py create mode 100755 android/adb_gdb create mode 100755 android/adb_install_apk.py create mode 100755 android/adb_logcat_monitor.py create mode 100755 android/adb_logcat_printer.py create mode 100755 android/adb_profile_chrome create mode 100755 android/adb_profile_chrome_startup create mode 100755 android/adb_reverse_forwarder.py create mode 100755 android/adb_system_webengine_command_line create mode 100755 android/adb_system_webview_command_line create mode 100644 android/android_only_explicit_jni_exports.lst create mode 100644 android/android_only_jni_exports.lst create mode 100755 android/apk_operations.py create mode 100644 android/apk_operations.pydeps create mode 100755 android/apply_shared_preference_file.py create mode 100755 android/asan_symbolize.py create mode 100644 android/bytecode/BUILD.gn create mode 100644 android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java create mode 100644 android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java create mode 100644 android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java create mode 100644 android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java create mode 100644 android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java create mode 100644 android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java create mode 100644 android/bytecode/java/org/chromium/bytecode/MethodDescription.java create mode 100644 android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java create mode 100644 android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java create mode 100644 android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java create mode 100644 android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java create mode 100644 android/bytecode/java/org/chromium/bytecode/TypeUtils.java create mode 100644 android/chromium-debug.keystore create mode 100644 android/chromium_annotations.flags create mode 100755 android/convert_dex_profile.py create mode 100755 android/convert_dex_profile_tests.py create mode 100644 android/dcheck_is_off.flags create mode 100644 android/devil_chromium.json create mode 100644 android/devil_chromium.py create mode 100644 android/devil_chromium.pydeps create mode 100755 android/diff_resource_sizes.py create mode 100644 android/docs/README.md create mode 100644 android/docs/build_config.md create mode 100644 android/docs/class_verification_failures.md create mode 100644 android/docs/coverage.md create mode 100644 android/docs/java_asserts.md create mode 100644 android/docs/java_optimization.md create mode 100644 android/docs/java_toolchain.md create mode 100644 android/docs/life_of_a_resource.md create mode 100644 android/docs/lint.md create mode 100755 android/download_doclava.py create mode 100755 android/dump_apk_resource_strings.py create mode 100755 android/envsetup.sh create mode 100755 android/fast_local_dev_server.py create mode 100755 android/generate_jacoco_report.py create mode 100644 android/gradle/AndroidManifest.xml create mode 100644 android/gradle/OWNERS create mode 100644 android/gradle/android.jinja create mode 100644 android/gradle/cmake.jinja create mode 100644 android/gradle/dependencies.jinja create mode 100755 android/gradle/generate_gradle.py create mode 100644 android/gradle/java.jinja create mode 100644 android/gradle/manifest.jinja create mode 100644 android/gradle/root.jinja create mode 100644 android/gtest_apk/BUILD.gn create mode 100644 android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java create mode 100644 android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java create mode 100644 android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java create mode 100644 android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java create mode 100644 android/gyp/OWNERS create mode 100755 android/gyp/aar.py create mode 100644 android/gyp/aar.pydeps create mode 100755 android/gyp/aidl.py create mode 100644 android/gyp/aidl.pydeps create mode 100755 android/gyp/allot_native_libraries.py create mode 100644 android/gyp/allot_native_libraries.pydeps create mode 100755 android/gyp/apkbuilder.py create mode 100644 android/gyp/apkbuilder.pydeps create mode 100755 android/gyp/assert_static_initializers.py create mode 100644 android/gyp/assert_static_initializers.pydeps create mode 100755 android/gyp/binary_baseline_profile.py create mode 100644 android/gyp/binary_baseline_profile.pydeps create mode 100755 android/gyp/bundletool.py create mode 100755 android/gyp/bytecode_processor.py create mode 100644 android/gyp/bytecode_processor.pydeps create mode 100755 android/gyp/bytecode_rewriter.py create mode 100644 android/gyp/bytecode_rewriter.pydeps create mode 100755 android/gyp/check_flag_expectations.py create mode 100644 android/gyp/check_flag_expectations.pydeps create mode 100755 android/gyp/compile_java.py create mode 100644 android/gyp/compile_java.pydeps create mode 100755 android/gyp/compile_kt.py create mode 100644 android/gyp/compile_kt.pydeps create mode 100755 android/gyp/compile_resources.py create mode 100644 android/gyp/compile_resources.pydeps create mode 100755 android/gyp/copy_ex.py create mode 100644 android/gyp/copy_ex.pydeps create mode 100755 android/gyp/create_apk_operations_script.py create mode 100644 android/gyp/create_apk_operations_script.pydeps create mode 100755 android/gyp/create_app_bundle.py create mode 100644 android/gyp/create_app_bundle.pydeps create mode 100755 android/gyp/create_app_bundle_apks.py create mode 100644 android/gyp/create_app_bundle_apks.pydeps create mode 100755 android/gyp/create_bundle_wrapper_script.py create mode 100644 android/gyp/create_bundle_wrapper_script.pydeps create mode 100755 android/gyp/create_java_binary_script.py create mode 100644 android/gyp/create_java_binary_script.pydeps create mode 100755 android/gyp/create_r_java.py create mode 100644 android/gyp/create_r_java.pydeps create mode 100755 android/gyp/create_r_txt.py create mode 100644 android/gyp/create_r_txt.pydeps create mode 100755 android/gyp/create_size_info_files.py create mode 100644 android/gyp/create_size_info_files.pydeps create mode 100755 android/gyp/create_stub_manifest.py create mode 100755 android/gyp/create_test_apk_wrapper_script.py create mode 100644 android/gyp/create_test_apk_wrapper_script.pydeps create mode 100755 android/gyp/create_ui_locale_resources.py create mode 100644 android/gyp/create_ui_locale_resources.pydeps create mode 100755 android/gyp/create_unwind_table.py create mode 100755 android/gyp/create_unwind_table_tests.py create mode 100755 android/gyp/dex.py create mode 100644 android/gyp/dex.pydeps create mode 100755 android/gyp/dex_test.py create mode 100755 android/gyp/dist_aar.py create mode 100644 android/gyp/dist_aar.pydeps create mode 100755 android/gyp/extract_unwind_tables.py create mode 100755 android/gyp/extract_unwind_tables_tests.py create mode 100755 android/gyp/filter_zip.py create mode 100644 android/gyp/filter_zip.pydeps create mode 100644 android/gyp/finalize_apk.py create mode 100755 android/gyp/find.py create mode 100755 android/gyp/flatc_java.py create mode 100644 android/gyp/flatc_java.pydeps create mode 100755 android/gyp/gcc_preprocess.py create mode 100644 android/gyp/gcc_preprocess.pydeps create mode 100755 android/gyp/generate_android_wrapper.py create mode 100755 android/gyp/generate_linker_version_script.py create mode 100644 android/gyp/generate_linker_version_script.pydeps create mode 100755 android/gyp/ijar.py create mode 100644 android/gyp/ijar.pydeps create mode 100755 android/gyp/jacoco_instr.py create mode 100644 android/gyp/jacoco_instr.pydeps create mode 100755 android/gyp/java_cpp_enum.py create mode 100644 android/gyp/java_cpp_enum.pydeps create mode 100755 android/gyp/java_cpp_enum_tests.py create mode 100755 android/gyp/java_cpp_features.py create mode 100644 android/gyp/java_cpp_features.pydeps create mode 100755 android/gyp/java_cpp_features_tests.py create mode 100755 android/gyp/java_cpp_strings.py create mode 100644 android/gyp/java_cpp_strings.pydeps create mode 100755 android/gyp/java_cpp_strings_tests.py create mode 100755 android/gyp/java_google_api_keys.py create mode 100644 android/gyp/java_google_api_keys.pydeps create mode 100755 android/gyp/java_google_api_keys_tests.py create mode 100755 android/gyp/javac_output_processor.py create mode 100755 android/gyp/jinja_template.py create mode 100644 android/gyp/jinja_template.pydeps create mode 100755 android/gyp/lint.py create mode 100644 android/gyp/lint.pydeps create mode 100755 android/gyp/merge_manifest.py create mode 100644 android/gyp/merge_manifest.pydeps create mode 100755 android/gyp/nocompile_test.py create mode 100755 android/gyp/optimize_resources.py create mode 100644 android/gyp/optimize_resources.pydeps create mode 100755 android/gyp/prepare_resources.py create mode 100644 android/gyp/prepare_resources.pydeps create mode 100755 android/gyp/process_native_prebuilt.py create mode 100644 android/gyp/process_native_prebuilt.pydeps create mode 100755 android/gyp/proguard.py create mode 100644 android/gyp/proguard.pydeps create mode 100644 android/gyp/proto/Configuration_pb2.py create mode 100644 android/gyp/proto/README.md create mode 100644 android/gyp/proto/Resources_pb2.py create mode 100644 android/gyp/proto/__init__.py create mode 100755 android/gyp/system_image_apks.py create mode 100644 android/gyp/system_image_apks.pydeps create mode 100644 android/gyp/test/BUILD.gn create mode 100644 android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java create mode 100644 android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java create mode 100755 android/gyp/trace_event_bytecode_rewriter.py create mode 100644 android/gyp/trace_event_bytecode_rewriter.pydeps create mode 100755 android/gyp/turbine.py create mode 100644 android/gyp/turbine.pydeps create mode 100755 android/gyp/unused_resources.py create mode 100644 android/gyp/unused_resources.pydeps create mode 100644 android/gyp/util/__init__.py create mode 100644 android/gyp/util/build_utils.py create mode 100755 android/gyp/util/build_utils_test.py create mode 100644 android/gyp/util/diff_utils.py create mode 100644 android/gyp/util/jar_info_utils.py create mode 100644 android/gyp/util/java_cpp_utils.py create mode 100644 android/gyp/util/manifest_utils.py create mode 100755 android/gyp/util/manifest_utils_test.py create mode 100644 android/gyp/util/md5_check.py create mode 100755 android/gyp/util/md5_check_test.py create mode 100644 android/gyp/util/parallel.py create mode 100644 android/gyp/util/protoresources.py create mode 100644 android/gyp/util/resource_utils.py create mode 100755 android/gyp/util/resource_utils_test.py create mode 100644 android/gyp/util/resources_parser.py create mode 100644 android/gyp/util/server_utils.py create mode 100755 android/gyp/validate_inputs.py create mode 100755 android/gyp/validate_static_library_dex_references.py create mode 100644 android/gyp/validate_static_library_dex_references.pydeps create mode 100755 android/gyp/write_build_config.py create mode 100644 android/gyp/write_build_config.pydeps create mode 100755 android/gyp/write_native_libraries_java.py create mode 100644 android/gyp/write_native_libraries_java.pydeps create mode 100755 android/gyp/zip.py create mode 100644 android/gyp/zip.pydeps create mode 100755 android/host_heartbeat.py create mode 100644 android/incremental_install/BUILD.gn create mode 100644 android/incremental_install/README.md create mode 100644 android/incremental_install/__init__.py create mode 100755 android/incremental_install/generate_android_manifest.py create mode 100644 android/incremental_install/generate_android_manifest.pydeps create mode 100755 android/incremental_install/installer.py create mode 100644 android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java create mode 100644 android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java create mode 100644 android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java create mode 100644 android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java create mode 100644 android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java create mode 100644 android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/README.md create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java create mode 100644 android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java create mode 100755 android/incremental_install/write_installer_json.py create mode 100644 android/incremental_install/write_installer_json.pydeps create mode 100644 android/java/src/org/chromium/build/annotations/AlwaysInline.java create mode 100644 android/java/src/org/chromium/build/annotations/CheckDiscard.java create mode 100644 android/java/src/org/chromium/build/annotations/DoNotClassMerge.java create mode 100644 android/java/src/org/chromium/build/annotations/DoNotInline.java create mode 100644 android/java/src/org/chromium/build/annotations/DoNotStripLogs.java create mode 100644 android/java/src/org/chromium/build/annotations/IdentifierNameString.java create mode 100644 android/java/src/org/chromium/build/annotations/MainDex.java create mode 100644 android/java/src/org/chromium/build/annotations/MockedInTests.java create mode 100644 android/java/src/org/chromium/build/annotations/UsedByReflection.java create mode 100644 android/java/templates/BuildConfig.template create mode 100644 android/java/templates/ProductConfig.template create mode 100644 android/java/test/DefaultLocaleLintTest.java create mode 100644 android/java/test/NewApiLintTest.java create mode 100644 android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template create mode 100644 android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java create mode 100644 android/java/test/missing_symbol/B.java create mode 100644 android/java/test/missing_symbol/D.template create mode 100644 android/java/test/missing_symbol/Importer.template create mode 100644 android/java/test/missing_symbol/ImportsSubB.java create mode 100644 android/java/test/missing_symbol/c.jar create mode 100644 android/java/test/missing_symbol/sub/BInMethodSignature.java create mode 100644 android/java/test/missing_symbol/sub/SubB.java create mode 100644 android/junit/AndroidManifest_mergetest.xml create mode 100644 android/junit/res/values/strings.xml create mode 100644 android/junit/src/org/chromium/build/AndroidAssetsTest.java create mode 100644 android/junit/src/org/chromium/build/IncrementalJavacTest.java create mode 100755 android/lighttpd_server.py create mode 100755 android/list_class_verification_failures.py create mode 100755 android/list_class_verification_failures_test.py create mode 100755 android/list_java_targets.py create mode 100644 android/main_dex_classes.flags create mode 100755 android/method_count.py create mode 100644 android/native_flags/BUILD.gn create mode 100755 android/native_flags/argcapture.py create mode 100644 android/native_flags/empty.cc create mode 100755 android/print_cipd_version.py create mode 100755 android/provision_devices.py create mode 100644 android/pylib/__init__.py create mode 100644 android/pylib/android/__init__.py create mode 100644 android/pylib/android/logcat_symbolizer.py create mode 100644 android/pylib/base/__init__.py create mode 100644 android/pylib/base/base_test_result.py create mode 100644 android/pylib/base/base_test_result_unittest.py create mode 100644 android/pylib/base/environment.py create mode 100644 android/pylib/base/environment_factory.py create mode 100644 android/pylib/base/mock_environment.py create mode 100644 android/pylib/base/mock_test_instance.py create mode 100644 android/pylib/base/output_manager.py create mode 100644 android/pylib/base/output_manager_factory.py create mode 100644 android/pylib/base/output_manager_test_case.py create mode 100644 android/pylib/base/test_collection.py create mode 100644 android/pylib/base/test_exception.py create mode 100644 android/pylib/base/test_instance.py create mode 100644 android/pylib/base/test_instance_factory.py create mode 100644 android/pylib/base/test_run.py create mode 100644 android/pylib/base/test_run_factory.py create mode 100644 android/pylib/base/test_server.py create mode 100644 android/pylib/constants/__init__.py create mode 100644 android/pylib/constants/host_paths.py create mode 100755 android/pylib/constants/host_paths_unittest.py create mode 100644 android/pylib/content_settings.py create mode 100644 android/pylib/device/__init__.py create mode 100644 android/pylib/device/commands/BUILD.gn create mode 100644 android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java create mode 100644 android/pylib/device_settings.py create mode 100644 android/pylib/dex/__init__.py create mode 100755 android/pylib/dex/dex_parser.py create mode 100644 android/pylib/gtest/__init__.py create mode 100644 android/pylib/gtest/filter/OWNERS create mode 100644 android/pylib/gtest/filter/base_unittests_disabled create mode 100644 android/pylib/gtest/filter/base_unittests_emulator_additional_disabled create mode 100644 android/pylib/gtest/filter/breakpad_unittests_disabled create mode 100644 android/pylib/gtest/filter/content_browsertests_disabled create mode 100644 android/pylib/gtest/filter/unit_tests_disabled create mode 100644 android/pylib/gtest/gtest_config.py create mode 100644 android/pylib/gtest/gtest_test_instance.py create mode 100755 android/pylib/gtest/gtest_test_instance_test.py create mode 100644 android/pylib/instrumentation/__init__.py create mode 100644 android/pylib/instrumentation/instrumentation_parser.py create mode 100755 android/pylib/instrumentation/instrumentation_parser_test.py create mode 100644 android/pylib/instrumentation/instrumentation_test_instance.py create mode 100755 android/pylib/instrumentation/instrumentation_test_instance_test.py create mode 100644 android/pylib/instrumentation/json_perf_parser.py create mode 100644 android/pylib/instrumentation/render_test.html.jinja create mode 100644 android/pylib/instrumentation/test_result.py create mode 100644 android/pylib/junit/__init__.py create mode 100644 android/pylib/junit/junit_test_instance.py create mode 100644 android/pylib/local/__init__.py create mode 100644 android/pylib/local/device/__init__.py create mode 100644 android/pylib/local/device/local_device_environment.py create mode 100644 android/pylib/local/device/local_device_gtest_run.py create mode 100755 android/pylib/local/device/local_device_gtest_run_test.py create mode 100644 android/pylib/local/device/local_device_instrumentation_test_run.py create mode 100755 android/pylib/local/device/local_device_instrumentation_test_run_test.py create mode 100644 android/pylib/local/device/local_device_monkey_test_run.py create mode 100644 android/pylib/local/device/local_device_test_run.py create mode 100755 android/pylib/local/device/local_device_test_run_test.py create mode 100644 android/pylib/local/emulator/OWNERS create mode 100644 android/pylib/local/emulator/__init__.py create mode 100644 android/pylib/local/emulator/avd.py create mode 100644 android/pylib/local/emulator/ini.py create mode 100755 android/pylib/local/emulator/ini_test.py create mode 100644 android/pylib/local/emulator/local_emulator_environment.py create mode 100644 android/pylib/local/emulator/proto/__init__.py create mode 100644 android/pylib/local/emulator/proto/avd.proto create mode 100644 android/pylib/local/emulator/proto/avd_pb2.py create mode 100644 android/pylib/local/local_test_server_spawner.py create mode 100644 android/pylib/local/machine/__init__.py create mode 100644 android/pylib/local/machine/local_machine_environment.py create mode 100644 android/pylib/local/machine/local_machine_junit_test_run.py create mode 100755 android/pylib/local/machine/local_machine_junit_test_run_test.py create mode 100644 android/pylib/monkey/__init__.py create mode 100644 android/pylib/monkey/monkey_test_instance.py create mode 100644 android/pylib/output/__init__.py create mode 100644 android/pylib/output/local_output_manager.py create mode 100755 android/pylib/output/local_output_manager_test.py create mode 100644 android/pylib/output/noop_output_manager.py create mode 100755 android/pylib/output/noop_output_manager_test.py create mode 100644 android/pylib/output/remote_output_manager.py create mode 100755 android/pylib/output/remote_output_manager_test.py create mode 100644 android/pylib/pexpect.py create mode 100755 android/pylib/restart_adbd.sh create mode 100644 android/pylib/results/__init__.py create mode 100644 android/pylib/results/flakiness_dashboard/__init__.py create mode 100644 android/pylib/results/flakiness_dashboard/json_results_generator.py create mode 100644 android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py create mode 100644 android/pylib/results/flakiness_dashboard/results_uploader.py create mode 100644 android/pylib/results/json_results.py create mode 100755 android/pylib/results/json_results_test.py create mode 100644 android/pylib/results/presentation/__init__.py create mode 100644 android/pylib/results/presentation/javascript/main_html.js create mode 100755 android/pylib/results/presentation/standard_gtest_merge.py create mode 100644 android/pylib/results/presentation/template/main.html create mode 100644 android/pylib/results/presentation/template/table.html create mode 100755 android/pylib/results/presentation/test_results_presentation.py create mode 100644 android/pylib/results/presentation/test_results_presentation.pydeps create mode 100644 android/pylib/results/report_results.py create mode 100644 android/pylib/symbols/__init__.py create mode 100644 android/pylib/symbols/deobfuscator.py create mode 100644 android/pylib/symbols/expensive_line_transformer.py create mode 100644 android/pylib/symbols/mock_addr2line/__init__.py create mode 100755 android/pylib/symbols/mock_addr2line/mock_addr2line create mode 100644 android/pylib/symbols/stack_symbolizer.py create mode 100644 android/pylib/utils/__init__.py create mode 100644 android/pylib/utils/app_bundle_utils.py create mode 100644 android/pylib/utils/argparse_utils.py create mode 100644 android/pylib/utils/chrome_proxy_utils.py create mode 100755 android/pylib/utils/chrome_proxy_utils_test.py create mode 100644 android/pylib/utils/decorators.py create mode 100755 android/pylib/utils/decorators_test.py create mode 100644 android/pylib/utils/device_dependencies.py create mode 100755 android/pylib/utils/device_dependencies_test.py create mode 100644 android/pylib/utils/dexdump.py create mode 100755 android/pylib/utils/dexdump_test.py create mode 100644 android/pylib/utils/gold_utils.py create mode 100755 android/pylib/utils/gold_utils_test.py create mode 100644 android/pylib/utils/google_storage_helper.py create mode 100644 android/pylib/utils/instrumentation_tracing.py create mode 100644 android/pylib/utils/local_utils.py create mode 100644 android/pylib/utils/logdog_helper.py create mode 100644 android/pylib/utils/logging_utils.py create mode 100755 android/pylib/utils/maven_downloader.py create mode 100644 android/pylib/utils/repo_utils.py create mode 100644 android/pylib/utils/shared_preference_utils.py create mode 100644 android/pylib/utils/simpleperf.py create mode 100644 android/pylib/utils/test_filter.py create mode 100755 android/pylib/utils/test_filter_test.py create mode 100644 android/pylib/utils/time_profile.py create mode 100644 android/pylib/utils/xvfb.py create mode 100644 android/pylib/valgrind_tools.py create mode 100644 android/pylintrc create mode 100644 android/resource_sizes.gni create mode 100755 android/resource_sizes.py create mode 100644 android/resource_sizes.pydeps create mode 100755 android/screenshot.py create mode 100644 android/stacktrace/BUILD.gn create mode 100644 android/stacktrace/README.md create mode 100755 android/stacktrace/crashpad_stackwalker.py create mode 100644 android/stacktrace/java/org/chromium/build/FlushingReTrace.java create mode 100755 android/stacktrace/java_deobfuscate.py create mode 100644 android/stacktrace/java_deobfuscate_java.jar create mode 100755 android/stacktrace/java_deobfuscate_test.py create mode 100755 android/stacktrace/stackwalker.py create mode 100644 android/test/BUILD.gn create mode 100644 android/test/incremental_javac_gn/BUILD.gn create mode 100755 android/test/incremental_javac_gn/incremental_javac_test_android_library.py create mode 100644 android/test/missing_symbol_test.gni create mode 100644 android/test/nocompile_gn/BUILD.gn create mode 100644 android/test/nocompile_gn/nocompile_sources.gni create mode 100755 android/test_runner.py create mode 100644 android/test_runner.pydeps create mode 100755 android/test_wrapper/logdog_wrapper.py create mode 100644 android/test_wrapper/logdog_wrapper.pydeps create mode 100644 android/tests/symbolize/Makefile create mode 100644 android/tests/symbolize/a.cc create mode 100644 android/tests/symbolize/b.cc create mode 100644 android/tests/symbolize/liba.so create mode 100644 android/tests/symbolize/libb.so create mode 100755 android/tombstones.py create mode 100644 android/unused_resources/BUILD.gn create mode 100644 android/unused_resources/UnusedResources.java create mode 100755 android/update_deps/update_third_party_deps.py create mode 100755 android/update_verification.py create mode 100755 android/video_recorder.py create mode 100644 apple/OWNERS create mode 100644 apple/README.md create mode 100644 apple/apple_info_plist.gni create mode 100644 apple/compile_entitlements.gni create mode 100644 apple/compile_plist.gni create mode 100644 apple/convert_plist.gni create mode 100644 apple/plist_util.py create mode 100644 apple/tweak_info_plist.gni create mode 100755 apple/tweak_info_plist.py create mode 100644 apple/write_pkg_info.py create mode 100755 apple/xcrun.py create mode 100644 args/OWNERS create mode 100644 args/README.txt create mode 100644 args/chromeos/README.md create mode 100644 args/headless.gn create mode 100755 build-ctags.sh create mode 100644 build_config.h create mode 100644 buildflag.h create mode 100644 buildflag_header.gni create mode 100755 check_gn_headers.py create mode 100755 check_gn_headers_unittest.py create mode 100644 check_gn_headers_whitelist.txt create mode 100755 check_return_value.py create mode 100644 chromeos/.style.yapf create mode 100644 chromeos/OWNERS create mode 100644 chromeos/PRESUBMIT.py create mode 100755 chromeos/generate_skylab_deps.py create mode 100755 chromeos/generate_skylab_deps_test.py create mode 100644 chromeos/pylintrc create mode 100755 chromeos/test_runner.py create mode 100755 chromeos/test_runner_test.py create mode 100644 ciopfs.sha1 create mode 100644 cipd/cipd.gni create mode 100755 cipd/cipd_from_file.py create mode 100755 clobber.py create mode 100755 clobber_unittest.py create mode 100644 compiled_action.gni create mode 100755 compute_build_timestamp.py create mode 100644 config/BUILD.gn create mode 100644 config/BUILDCONFIG.gn create mode 100644 config/OWNERS create mode 100644 config/aix/BUILD.gn create mode 100644 config/android/BUILD.gn create mode 100644 config/android/DIR_METADATA create mode 100644 config/android/OWNERS create mode 100644 config/android/abi.gni create mode 100644 config/android/android_nocompile.gni create mode 100644 config/android/build_vars.gni create mode 100644 config/android/channel.gni create mode 100644 config/android/config.gni create mode 100644 config/android/copy_ex.gni create mode 100644 config/android/create_unwind_table.gni create mode 100644 config/android/extract_unwind_tables.gni create mode 100644 config/android/internal_rules.gni create mode 100644 config/android/linker_version_script.gni create mode 100644 config/android/rules.gni create mode 100644 config/android/sdk.gni create mode 100644 config/android/system_image.gni create mode 100644 config/android/test/proto/BUILD.gn create mode 100644 config/android/test/proto/absolute_dep/absolute_dep.proto create mode 100644 config/android/test/proto/relative_dep/relative_dep.proto create mode 100644 config/android/test/proto/root/absolute_child.proto create mode 100644 config/android/test/proto/root/absolute_root.proto create mode 100644 config/android/test/proto/root/relative_child.proto create mode 100644 config/android/test/proto/root/relative_root.proto create mode 100644 config/android/test/resource_overlay/BUILD.gn create mode 100644 config/android/test/resource_overlay/java/res_template/values/values.xml create mode 100644 config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java create mode 100644 config/apple/BUILD.gn create mode 100644 config/apple/OWNERS create mode 100755 config/apple/sdk_info.py create mode 100644 config/apple/symbols.gni create mode 100644 config/arm.gni create mode 100644 config/buildflags_paint_preview.gni create mode 100644 config/c++/BUILD.gn create mode 100644 config/c++/c++.gni create mode 100644 config/c++/libc++.natvis create mode 100644 config/chrome_build.gni create mode 100644 config/chromebox_for_meetings/BUILD.gn create mode 100644 config/chromebox_for_meetings/OWNERS create mode 100644 config/chromebox_for_meetings/README.md create mode 100644 config/chromebox_for_meetings/buildflags.gni create mode 100644 config/chromecast/BUILD.gn create mode 100644 config/chromecast/OWNERS create mode 100644 config/chromecast_build.gni create mode 100644 config/chromeos/BUILD.gn create mode 100644 config/chromeos/args.gni create mode 100644 config/chromeos/rules.gni create mode 100644 config/chromeos/ui_mode.gni create mode 100644 config/clang/BUILD.gn create mode 100644 config/clang/clang.gni create mode 100644 config/compiler/BUILD.gn create mode 100644 config/compiler/compiler.gni create mode 100644 config/compiler/pgo/BUILD.gn create mode 100644 config/compiler/pgo/pgo.gni create mode 100644 config/compute_inputs_for_analyze.gni create mode 100644 config/coverage/BUILD.gn create mode 100644 config/coverage/OWNERS create mode 100644 config/coverage/coverage.gni create mode 100644 config/cronet/OWNERS create mode 100644 config/cronet/config.gni create mode 100644 config/dcheck_always_on.gni create mode 100644 config/devtools.gni create mode 100644 config/features.gni create mode 100644 config/freetype/BUILD.gn create mode 100644 config/freetype/OWNERS create mode 100644 config/freetype/freetype.gni create mode 100644 config/fuchsia/BUILD.gn create mode 100644 config/fuchsia/DIR_METADATA create mode 100644 config/fuchsia/OWNERS create mode 100755 config/fuchsia/build_symbol_archive.py create mode 100644 config/fuchsia/config.gni create mode 100644 config/fuchsia/extend_fvm.py create mode 100644 config/fuchsia/fuchsia_package_metadata.gni create mode 100644 config/fuchsia/generate_runner_scripts.gni create mode 100644 config/fuchsia/packaged_content_embedder_excluded_dirs.gni create mode 100644 config/fuchsia/size_optimized_cast_receiver_args.gn create mode 100644 config/fuchsia/size_optimized_cast_receiver_args_internal.gn create mode 100644 config/fuchsia/sizes.gni create mode 100644 config/fuchsia/symbol_archive.gni create mode 100644 config/fuchsia/test/OWNERS create mode 100644 config/fuchsia/test/README.md create mode 100644 config/fuchsia/test/archivist.shard.test-cml create mode 100644 config/fuchsia/test/audio_output.shard.test-cml create mode 100644 config/fuchsia/test/chromium_system_test_facet.shard.test-cml create mode 100644 config/fuchsia/test/chromium_test_facet.shard.test-cml create mode 100644 config/fuchsia/test/context_provider.shard.test-cml create mode 100644 config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml create mode 100644 config/fuchsia/test/elf_test_runner.shard.test-cml create mode 100644 config/fuchsia/test/fonts.shard.test-cml create mode 100644 config/fuchsia/test/gfx_test_ui_stack.shard.test-cml create mode 100644 config/fuchsia/test/logger.shard.test-cml create mode 100644 config/fuchsia/test/mark_vmo_executable.shard.test-cml create mode 100644 config/fuchsia/test/minimum.shard.test-cml create mode 100644 config/fuchsia/test/network.shard.test-cml create mode 100644 config/fuchsia/test/platform_video_codecs.shard.test-cml create mode 100644 config/fuchsia/test/present_view.shard.test-cml create mode 100644 config/fuchsia/test/sysmem.shard.test-cml create mode 100644 config/fuchsia/test/system_test_minimum.shard.test-cml create mode 100644 config/fuchsia/test/test_fonts.shard.test-cml create mode 100644 config/fuchsia/test/test_ui_stack.shard.test-cml create mode 100644 config/fuchsia/test/web_instance.shard.test-cml create mode 100644 config/gcc/BUILD.gn create mode 100755 config/get_host_byteorder.py create mode 100644 config/host_byteorder.gni create mode 100644 config/ios/BUILD.gn create mode 100644 config/ios/BuildInfo.plist create mode 100644 config/ios/Host-Info.plist create mode 100644 config/ios/Module-Info.plist create mode 100644 config/ios/OWNERS create mode 100644 config/ios/asset_catalog.gni create mode 100644 config/ios/bundle_data_from_filelist.gni create mode 100644 config/ios/codesign.py create mode 100644 config/ios/compile_ib_files.py create mode 100644 config/ios/compile_xcassets_unittests.py create mode 100644 config/ios/config.gni create mode 100644 config/ios/dummy.py create mode 100644 config/ios/entitlements.plist create mode 100644 config/ios/find_signing_identity.py create mode 100644 config/ios/generate_umbrella_header.py create mode 100644 config/ios/hardlink.py create mode 100644 config/ios/ios_sdk.gni create mode 100644 config/ios/ios_sdk_overrides.gni create mode 100644 config/ios/ios_test_runner_wrapper.gni create mode 100644 config/ios/ios_test_runner_xcuitest.gni create mode 100644 config/ios/resources/XCTRunnerAddition+Info.plist create mode 100644 config/ios/rules.gni create mode 100644 config/ios/strip_arm64e.py create mode 100644 config/ios/swift_source_set.gni create mode 100644 config/ios/write_framework_hmap.py create mode 100644 config/ios/write_framework_modulemap.py create mode 100644 config/ios/xctest_shell.mm create mode 100644 config/linux/BUILD.gn create mode 100644 config/linux/OWNERS create mode 100644 config/linux/atk/BUILD.gn create mode 100644 config/linux/atspi2/BUILD.gn create mode 100644 config/linux/dbus/BUILD.gn create mode 100644 config/linux/dri/BUILD.gn create mode 100644 config/linux/gtk/BUILD.gn create mode 100644 config/linux/gtk/gtk.gni create mode 100644 config/linux/libdrm/BUILD.gn create mode 100644 config/linux/libffi/BUILD.gn create mode 100644 config/linux/libva/BUILD.gn create mode 100644 config/linux/nss/BUILD.gn create mode 100644 config/linux/pangocairo/BUILD.gn create mode 100644 config/linux/pangocairo/pangocairo.gni create mode 100755 config/linux/pkg-config.py create mode 100644 config/linux/pkg_config.gni create mode 100644 config/locales.gni create mode 100644 config/logging.gni create mode 100644 config/mac/BUILD.gn create mode 100644 config/mac/BuildInfo.plist create mode 100644 config/mac/OWNERS create mode 100644 config/mac/mac_sdk.gni create mode 100644 config/mac/mac_sdk_overrides.gni create mode 100644 config/mac/package_framework.py create mode 100644 config/mac/prepare_framework_version.py create mode 100644 config/mac/rules.gni create mode 100644 config/mips.gni create mode 100644 config/nacl/BUILD.gn create mode 100644 config/nacl/config.gni create mode 100644 config/nacl/host_toolchain.gni create mode 100644 config/nacl/rules.gni create mode 100644 config/ozone.gni create mode 100644 config/ozone_extra.gni create mode 100644 config/pch.gni create mode 100644 config/posix/BUILD.gn create mode 100644 config/profiling/OWNERS create mode 100644 config/profiling/profiling.gni create mode 100644 config/python.gni create mode 100644 config/riscv.gni create mode 100644 config/rts.gni create mode 100644 config/rust.gni create mode 100644 config/sanitizers/BUILD.gn create mode 100644 config/sanitizers/OWNERS create mode 100644 config/sanitizers/sanitizers.gni create mode 100644 config/siso/.gitignore create mode 100644 config/siso/OWNERS create mode 100644 config/siso/README.md create mode 100644 config/siso/clang_linux.star create mode 100755 config/siso/configure_siso.py create mode 100644 config/siso/linux.star create mode 100644 config/siso/mac.star create mode 100644 config/siso/main.star create mode 100644 config/siso/mojo.star create mode 100644 config/siso/nacl_linux.star create mode 100644 config/siso/remote_exec_wrapper.star create mode 100644 config/siso/simple.star create mode 100644 config/siso/windows.star create mode 100644 config/sysroot.gni create mode 100644 config/ui.gni create mode 100644 config/v8_target_cpu.gni create mode 100644 config/win/BUILD.gn create mode 100644 config/win/console_app.gni create mode 100644 config/win/control_flow_guard.gni create mode 100644 config/win/manifest.gni create mode 100644 config/win/visual_studio_version.gni create mode 100644 config/zip.gni create mode 100644 config/zos/BUILD.gn create mode 100755 copy_test_data_ios.py create mode 100755 cp.py create mode 100755 del_ninja_deps_cache.py create mode 100755 detect_host_arch.py create mode 100755 dir_exists.py create mode 100644 docs/debugging_slow_builds.md create mode 100644 docs/mac_hermetic_toolchain.md create mode 100644 docs/writing_gn_templates.md create mode 100644 dotfile_settings.gni create mode 100755 download_nacl_toolchains.py create mode 100755 env_dump.py create mode 100755 extract_from_cab.py create mode 100755 extract_partition.py create mode 100755 find_depot_tools.py create mode 100755 fix_gn_headers.py create mode 100644 fuchsia/COMMON_METADATA create mode 100644 fuchsia/DIR_METADATA create mode 100644 fuchsia/OWNERS create mode 100644 fuchsia/PRESUBMIT.py create mode 100644 fuchsia/SECURITY_OWNERS create mode 100644 fuchsia/__init__.py create mode 100755 fuchsia/binary_size_differ.py create mode 100755 fuchsia/binary_size_differ_test.py create mode 100755 fuchsia/binary_sizes.py create mode 100755 fuchsia/binary_sizes_test.py create mode 100644 fuchsia/cipd/BUILD.gn create mode 100644 fuchsia/cipd/DIR_METADATA create mode 100644 fuchsia/cipd/README.md create mode 100644 fuchsia/cipd/version.template create mode 100644 fuchsia/gcs_download.py create mode 100755 fuchsia/gcs_download_test.py create mode 100644 fuchsia/linux_internal.sdk.sha1 create mode 100644 fuchsia/sdk-bucket.txt create mode 100644 fuchsia/sdk-hash-files.list create mode 100644 fuchsia/test/.coveragerc create mode 100644 fuchsia/test/.style.yapf create mode 100644 fuchsia/test/OWNERS create mode 100644 fuchsia/test/PRESUBMIT.py create mode 100644 fuchsia/test/base_ermine_ctl.py create mode 100755 fuchsia/test/base_ermine_ctl_unittests.py create mode 100644 fuchsia/test/common.py create mode 100755 fuchsia/test/common_unittests.py create mode 100644 fuchsia/test/compatible_utils.py create mode 100755 fuchsia/test/compatible_utils_unittests.py create mode 100755 fuchsia/test/coveragetest.py create mode 100755 fuchsia/test/deploy_to_fuchsia.py create mode 100755 fuchsia/test/deploy_to_fuchsia_unittests.py create mode 100644 fuchsia/test/ermine_ctl.py create mode 100644 fuchsia/test/ffx_emulator.py create mode 100755 fuchsia/test/ffx_emulator_unittests.py create mode 100644 fuchsia/test/ffx_integration.py create mode 100755 fuchsia/test/flash_device.py create mode 100755 fuchsia/test/flash_device_unittests.py create mode 100644 fuchsia/test/lockfile.py create mode 100755 fuchsia/test/log_manager.py create mode 100755 fuchsia/test/log_manager_unittests.py create mode 100755 fuchsia/test/publish_package.py create mode 100755 fuchsia/test/publish_package_unittests.py create mode 100644 fuchsia/test/pylintrc create mode 100644 fuchsia/test/run_blink_test.py create mode 100755 fuchsia/test/run_executable_test.py create mode 100755 fuchsia/test/run_pytype.py create mode 100644 fuchsia/test/run_telemetry_test.py create mode 100755 fuchsia/test/run_test.py create mode 100644 fuchsia/test/run_webpage_test.py create mode 100755 fuchsia/test/serve_repo.py create mode 100755 fuchsia/test/serve_repo_unittests.py create mode 100755 fuchsia/test/start_emulator.py create mode 100644 fuchsia/test/test_runner.py create mode 100644 fuchsia/test/test_server.py create mode 100755 fuchsia/test/test_server_unittests.py create mode 100755 fuchsia/update_images.py create mode 100755 fuchsia/update_images_test.py create mode 100755 fuchsia/update_product_bundles.py create mode 100755 fuchsia/update_product_bundles_test.py create mode 100755 fuchsia/update_sdk.py create mode 100755 fuchsia/update_sdk_test.py create mode 100755 gdb-add-index create mode 100755 get_landmines.py create mode 100755 get_symlink_targets.py create mode 100644 gn_helpers.py create mode 100755 gn_helpers_unittest.py create mode 100644 gn_logs.gni create mode 100644 gn_run_binary.py create mode 100755 install-build-deps.sh create mode 100755 install-chroot.sh create mode 100644 internal/README.chromium create mode 100644 ios/OWNERS create mode 100644 ios/PRESUBMIT.py create mode 100644 ios/extension_bundle_data.gni create mode 100644 ios/intent_definition.gni create mode 100644 ios/presubmit_support.py create mode 100755 ios/presubmit_support_test.py create mode 100644 ios/test_data/bar.html create mode 100644 ios/test_data/basic.filelist create mode 100644 ios/test_data/basic.globlist create mode 100644 ios/test_data/comment.filelist create mode 100644 ios/test_data/comment.globlist create mode 100644 ios/test_data/different_local_path.filelist create mode 100644 ios/test_data/different_local_path.globlist create mode 100644 ios/test_data/duplicates.filelist create mode 100644 ios/test_data/duplicates.globlist create mode 100644 ios/test_data/exclusions.filelist create mode 100644 ios/test_data/exclusions.globlist create mode 100644 ios/test_data/extra.filelist create mode 100644 ios/test_data/extra.globlist create mode 100644 ios/test_data/foo.css create mode 100644 ios/test_data/ignore_outside_globlist_dir.filelist create mode 100644 ios/test_data/ignore_outside_globlist_dir.globlist create mode 100644 ios/test_data/missing.filelist create mode 100644 ios/test_data/missing.globlist create mode 100644 ios/test_data/outside_globlist_dir.filelist create mode 100644 ios/test_data/outside_globlist_dir.globlist create mode 100644 ios/test_data/reorder.filelist create mode 100644 ios/test_data/reorder.globlist create mode 100644 ios/test_data/repository_relative.filelist create mode 100644 ios/test_data/repository_relative.globlist create mode 100644 ios/test_data/subdirectory/baz.txt create mode 100755 ios/update_bundle_filelist.py create mode 100644 lacros/BUILD.gn create mode 100644 lacros/OWNERS create mode 100644 lacros/PRESUBMIT.py create mode 100644 lacros/README.md create mode 100644 lacros/lacros_resource_sizes.gni create mode 100755 lacros/lacros_resource_sizes.py create mode 100644 lacros/lacros_resource_sizes.pydeps create mode 100755 lacros/mojo_connection_lacros_launcher.py create mode 100755 lacros/test_runner.py create mode 100755 lacros/test_runner_test.py create mode 100644 landmine_utils.py create mode 100755 landmines.py create mode 100644 linux/BUILD.gn create mode 100644 linux/OWNERS create mode 100644 linux/chrome.map create mode 100644 linux/dump_app_syms.py create mode 100644 linux/extract_symbols.gni create mode 100755 linux/install-chromeos-fonts.py create mode 100644 linux/libncursesw/DIR_METADATA create mode 100644 linux/libncursesw/OWNERS create mode 100644 linux/libpci/BUILD.gn create mode 100644 linux/libudev/BUILD.gn create mode 100755 linux/rewrite_dirs.py create mode 100644 linux/strip_binary.gni create mode 100755 linux/strip_binary.py create mode 100755 linux/sysroot_scripts/build_and_upload.py create mode 100755 linux/sysroot_scripts/generate_keyring.sh create mode 100644 linux/sysroot_scripts/generated_package_lists/bullseye.amd64 create mode 100644 linux/sysroot_scripts/generated_package_lists/bullseye.arm create mode 100644 linux/sysroot_scripts/generated_package_lists/bullseye.arm64 create mode 100644 linux/sysroot_scripts/generated_package_lists/bullseye.armel create mode 100644 linux/sysroot_scripts/generated_package_lists/bullseye.i386 create mode 100644 linux/sysroot_scripts/generated_package_lists/bullseye.mips64el create mode 100644 linux/sysroot_scripts/generated_package_lists/bullseye.mipsel create mode 100755 linux/sysroot_scripts/install-sysroot.py create mode 100644 linux/sysroot_scripts/keyring.gpg create mode 100644 linux/sysroot_scripts/libxcomposite1-symbols create mode 100755 linux/sysroot_scripts/merge-package-lists.py create mode 100755 linux/sysroot_scripts/reversion_glibc.py create mode 100755 linux/sysroot_scripts/sysroot-creator-bullseye.sh create mode 100644 linux/sysroot_scripts/sysroot-creator.sh create mode 100644 linux/sysroot_scripts/sysroots.json create mode 100755 linux/sysroot_scripts/update-archive-timestamp.sh create mode 100644 linux/unbundle/README create mode 100644 linux/unbundle/absl_algorithm.gn create mode 100644 linux/unbundle/absl_base.gn create mode 100644 linux/unbundle/absl_cleanup.gn create mode 100644 linux/unbundle/absl_container.gn create mode 100644 linux/unbundle/absl_debugging.gn create mode 100644 linux/unbundle/absl_flags.gn create mode 100644 linux/unbundle/absl_functional.gn create mode 100644 linux/unbundle/absl_hash.gn create mode 100644 linux/unbundle/absl_log.gn create mode 100644 linux/unbundle/absl_log_internal.gn create mode 100644 linux/unbundle/absl_memory.gn create mode 100644 linux/unbundle/absl_meta.gn create mode 100644 linux/unbundle/absl_numeric.gn create mode 100644 linux/unbundle/absl_random.gn create mode 100644 linux/unbundle/absl_status.gn create mode 100644 linux/unbundle/absl_strings.gn create mode 100644 linux/unbundle/absl_synchronization.gn create mode 100644 linux/unbundle/absl_time.gn create mode 100644 linux/unbundle/absl_types.gn create mode 100644 linux/unbundle/absl_utility.gn create mode 100644 linux/unbundle/brotli.gn create mode 100644 linux/unbundle/crc32c.gn create mode 100644 linux/unbundle/dav1d.gn create mode 100644 linux/unbundle/double-conversion.gn create mode 100644 linux/unbundle/ffmpeg.gn create mode 100644 linux/unbundle/flac.gn create mode 100644 linux/unbundle/fontconfig.gn create mode 100644 linux/unbundle/freetype.gn create mode 100644 linux/unbundle/harfbuzz-ng.gn create mode 100644 linux/unbundle/icu.gn create mode 100644 linux/unbundle/jsoncpp.gn create mode 100644 linux/unbundle/libXNVCtrl.gn create mode 100644 linux/unbundle/libaom.gn create mode 100644 linux/unbundle/libavif.gn create mode 100644 linux/unbundle/libdrm.gn create mode 100644 linux/unbundle/libevent.gn create mode 100644 linux/unbundle/libjpeg.gn create mode 100644 linux/unbundle/libpng.gn create mode 100644 linux/unbundle/libvpx.gn create mode 100644 linux/unbundle/libwebp.gn create mode 100644 linux/unbundle/libxml.gn create mode 100644 linux/unbundle/libxslt.gn create mode 100644 linux/unbundle/libyuv.gn create mode 100644 linux/unbundle/openh264.gn create mode 100644 linux/unbundle/opus.gn create mode 100644 linux/unbundle/re2.gn create mode 100755 linux/unbundle/remove_bundled_libraries.py create mode 100755 linux/unbundle/replace_gn_files.py create mode 100644 linux/unbundle/snappy.gn create mode 100644 linux/unbundle/swiftshader-SPIRV-Headers.gn create mode 100644 linux/unbundle/swiftshader-SPIRV-Tools.gn create mode 100644 linux/unbundle/vulkan-SPIRV-Headers.gn create mode 100644 linux/unbundle/vulkan-SPIRV-Tools.gn create mode 100644 linux/unbundle/woff2.gn create mode 100644 linux/unbundle/zlib.gn create mode 100755 locale_tool.py create mode 100644 mac/OWNERS create mode 100755 mac/find_sdk.py create mode 100755 mac/should_use_hermetic_xcode.py create mode 100755 mac_toolchain.py create mode 100644 metadata.json.in create mode 100644 nocompile.gni create mode 100644 noop.py create mode 100644 partitioned_shared_library.gni create mode 100644 precompile.cc create mode 100644 precompile.h create mode 100755 print_python_deps.py create mode 100644 private_code_test/BUILD.gn create mode 100644 private_code_test/README.md create mode 100755 private_code_test/list_gclient_deps.py create mode 100644 private_code_test/private_code_test.gni create mode 100755 private_code_test/private_code_test.py create mode 100755 protoc_java.py create mode 100644 protoc_java.pydeps create mode 100644 redirect_stdout.py create mode 100755 rm.py create mode 100644 rust/BUILD.gn create mode 100644 rust/OWNERS create mode 100644 rust/analyze.gni create mode 100644 rust/cargo_crate.gni create mode 100755 rust/collect_rust_sources.py create mode 100644 rust/filter_clang_args.py create mode 100644 rust/rs_bindings_from_cc.gni create mode 100755 rust/run_bindgen.py create mode 100755 rust/run_build_script.py create mode 100755 rust/run_rs_bindings_from_cc.py create mode 100644 rust/rust_bindgen.gni create mode 100644 rust/rust_executable.gni create mode 100644 rust/rust_macro.gni create mode 100644 rust/rust_shared_library.gni create mode 100644 rust/rust_static_library.gni create mode 100644 rust/rust_target.gni create mode 100644 rust/rust_unit_test.gni create mode 100644 rust/rust_unit_tests_group.gni create mode 100755 rust/rustc_wrapper.py create mode 100644 rust/std/BUILD.gn create mode 100644 rust/std/fake_root/.cargo/config.toml create mode 100644 rust/std/fake_root/.gitignore create mode 100644 rust/std/fake_root/Cargo.toml create mode 100644 rust/std/fake_root/README.md create mode 100644 rust/std/fake_root/src/main.rs create mode 100755 rust/std/find_std_rlibs.py create mode 100644 rust/std/gnrt_config.toml create mode 100644 rust/std/immediate_crash.h create mode 100644 rust/std/remap_alloc.cc create mode 100644 rust/std/rules/BUILD.gn create mode 100644 rust/tests/BUILD.gn create mode 100644 rust/tests/bindgen_test/BUILD.gn create mode 100644 rust/tests/bindgen_test/lib.c create mode 100644 rust/tests/bindgen_test/lib.h create mode 100644 rust/tests/bindgen_test/lib2.h create mode 100644 rust/tests/bindgen_test/main.rs create mode 100644 rust/tests/bindgen_test/src/lib.rs create mode 100644 rust/tests/test_aliased_deps/BUILD.gn create mode 100644 rust/tests/test_aliased_deps/lib.rs create mode 100644 rust/tests/test_aliased_deps/main.rs create mode 100644 rust/tests/test_aliased_deps/real_name.rs create mode 100644 rust/tests/test_bin_crate/BUILD.gn create mode 100644 rust/tests/test_bin_crate/crate/build.rs create mode 100644 rust/tests/test_bin_crate/crate/src/main.rs create mode 100644 rust/tests/test_control_flow_guard/BUILD.gn create mode 100644 rust/tests/test_control_flow_guard/test_control_flow_guard.rs create mode 100644 rust/tests/test_cpp_including_rust/BUILD.gn create mode 100644 rust/tests/test_cpp_including_rust/main.cc create mode 100644 rust/tests/test_cpp_including_rust/unittests.cc create mode 100644 rust/tests/test_local_std/BUILD.gn create mode 100644 rust/tests/test_local_std/lib.rs create mode 100644 rust/tests/test_local_std/main.rs create mode 100644 rust/tests/test_proc_macro_crate/BUILD.gn create mode 100644 rust/tests/test_proc_macro_crate/crate/src/lib.rs create mode 100644 rust/tests/test_rlib_crate/BUILD.gn create mode 100644 rust/tests/test_rlib_crate/crate/build.rs create mode 100644 rust/tests/test_rlib_crate/crate/src/lib.rs create mode 100644 rust/tests/test_rlib_crate/crate/src/main.rs create mode 100644 rust/tests/test_rs_bindings_from_cc/BUILD.gn create mode 100644 rust/tests/test_rs_bindings_from_cc/main.rs create mode 100644 rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h create mode 100644 rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc create mode 100644 rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h create mode 100644 rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h create mode 100644 rust/tests/test_rust_exe/BUILD.gn create mode 100644 rust/tests/test_rust_exe/main.rs create mode 100644 rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn create mode 100644 rust/tests/test_rust_multiple_dep_versions_exe/main.rs create mode 100644 rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs create mode 100644 rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn create mode 100644 rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs create mode 100644 rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn create mode 100644 rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs create mode 100644 rust/tests/test_rust_shared_library/BUILD.gn create mode 100644 rust/tests/test_rust_shared_library/src/lib.rs create mode 100644 rust/tests/test_rust_static_library/BUILD.gn create mode 100644 rust/tests/test_rust_static_library/src/lib.rs create mode 100644 rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn create mode 100644 rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs create mode 100644 rust/tests/test_rust_unittests/BUILD.gn create mode 100644 rust/tests/test_rust_unittests/main.rs create mode 100644 rust/tests/test_serde_json_lenient/BUILD.gn create mode 100644 rust/tests/test_serde_json_lenient/lib.rs create mode 100644 rust/tests/test_serde_json_lenient/unittests.cc create mode 100644 rust/tests/test_simple_rust_exe/BUILD.gn create mode 100644 rust/tests/test_simple_rust_exe/main.rs create mode 100644 sample_arg_file.gn create mode 100644 sanitize-mac-build-log.sed create mode 100755 sanitize-mac-build-log.sh create mode 100644 sanitize-win-build-log.sed create mode 100755 sanitize-win-build-log.sh create mode 100644 sanitizers/OWNERS create mode 100644 sanitizers/asan_suppressions.cc create mode 100644 sanitizers/lsan_suppressions.cc create mode 100644 sanitizers/sanitizer_options.cc create mode 100644 sanitizers/tsan_suppressions.cc create mode 100644 shim_headers.gni create mode 100644 skia_gold_common/.style.yapf create mode 100644 skia_gold_common/OWNERS create mode 100644 skia_gold_common/PRESUBMIT.py create mode 100644 skia_gold_common/README.md create mode 100644 skia_gold_common/__init__.py create mode 100644 skia_gold_common/output_managerless_skia_gold_session.py create mode 100755 skia_gold_common/output_managerless_skia_gold_session_unittest.py create mode 100755 skia_gold_common/run_pytype.py create mode 100644 skia_gold_common/skia_gold_properties.py create mode 100755 skia_gold_common/skia_gold_properties_unittest.py create mode 100644 skia_gold_common/skia_gold_session.py create mode 100644 skia_gold_common/skia_gold_session_manager.py create mode 100755 skia_gold_common/skia_gold_session_manager_unittest.py create mode 100755 skia_gold_common/skia_gold_session_unittest.py create mode 100644 skia_gold_common/unittest_utils.py create mode 100644 symlink.gni create mode 100755 symlink.py create mode 100644 timestamp.gni create mode 100644 toolchain/BUILD.gn create mode 100644 toolchain/OWNERS create mode 100644 toolchain/aix/BUILD.gn create mode 100644 toolchain/android/BUILD.gn create mode 100644 toolchain/android/DIR_METADATA create mode 100644 toolchain/android/OWNERS create mode 100644 toolchain/apple/.style.yapf create mode 100644 toolchain/apple/BUILD.gn create mode 100644 toolchain/apple/OWNERS create mode 100644 toolchain/apple/filter_libtool.py create mode 100644 toolchain/apple/get_tool_mtime.py create mode 100755 toolchain/apple/linker_driver.py create mode 100644 toolchain/apple/toolchain.gni create mode 100644 toolchain/cc_wrapper.gni create mode 100755 toolchain/clang_code_coverage_wrapper.py create mode 100644 toolchain/concurrent_links.gni create mode 100644 toolchain/cros/BUILD.gn create mode 100644 toolchain/cros_toolchain.gni create mode 100644 toolchain/fuchsia/BUILD.gn create mode 100644 toolchain/fuchsia/DIR_METADATA create mode 100644 toolchain/fuchsia/OWNERS create mode 100755 toolchain/gcc_link_wrapper.py create mode 100755 toolchain/gcc_solink_wrapper.py create mode 100644 toolchain/gcc_toolchain.gni create mode 100755 toolchain/get_concurrent_links.py create mode 100644 toolchain/get_cpu_count.py create mode 100644 toolchain/get_goma_dir.py create mode 100644 toolchain/goma.gni create mode 100644 toolchain/ios/BUILD.gn create mode 100644 toolchain/ios/OWNERS create mode 100644 toolchain/ios/compile_xcassets.py create mode 100644 toolchain/ios/swiftc.py create mode 100644 toolchain/kythe.gni create mode 100644 toolchain/linux/BUILD.gn create mode 100644 toolchain/linux/unbundle/BUILD.gn create mode 100644 toolchain/linux/unbundle/README.md create mode 100644 toolchain/mac/BUILD.gn create mode 100644 toolchain/mac/OWNERS create mode 100644 toolchain/nacl/BUILD.gn create mode 100644 toolchain/nacl_toolchain.gni create mode 100644 toolchain/rbe.gni create mode 100644 toolchain/toolchain.gni create mode 100644 toolchain/whole_archive.py create mode 100644 toolchain/win/BUILD.gn create mode 100644 toolchain/win/midl.gni create mode 100644 toolchain/win/midl.py create mode 100755 toolchain/win/ml.py create mode 100644 toolchain/win/rc/.gitignore create mode 100644 toolchain/win/rc/README.md create mode 100644 toolchain/win/rc/linux64/rc.sha1 create mode 100644 toolchain/win/rc/mac/rc.sha1 create mode 100755 toolchain/win/rc/rc.py create mode 100755 toolchain/win/rc/upload_rc_binaries.sh create mode 100644 toolchain/win/rc/win/rc.exe.sha1 create mode 100644 toolchain/win/setup_toolchain.py create mode 100644 toolchain/win/tool_wrapper.py create mode 100644 toolchain/win/toolchain.gni create mode 100644 toolchain/win/win_toolchain_data.gni create mode 100644 toolchain/wrapper_utils.py create mode 100644 toolchain/zos/BUILD.gn create mode 100755 tree_truth.sh create mode 100755 update-linux-sandbox.sh create mode 100644 util/BUILD.gn create mode 100644 util/LASTCHANGE.dummy create mode 100644 util/PRESUBMIT.py create mode 100755 util/action_remote.py create mode 100755 util/android_chrome_version.py create mode 100644 util/android_chrome_version_test.py create mode 100644 util/branding.gni create mode 100644 util/chromium_git_revision.h.in create mode 100644 util/generate_wrapper.gni create mode 100755 util/generate_wrapper.py create mode 100644 util/java_action.gni create mode 100755 util/java_action.py create mode 100644 util/lastchange.gni create mode 100755 util/lastchange.py create mode 100644 util/lib/__init__.py create mode 100644 util/lib/common/PRESUBMIT.py create mode 100644 util/lib/common/__init__.py create mode 100644 util/lib/common/chrome_test_server_spawner.py create mode 100644 util/lib/common/perf_result_data_type.py create mode 100644 util/lib/common/perf_tests_results_helper.py create mode 100644 util/lib/common/unittest_util.py create mode 100755 util/lib/common/unittest_util_test.py create mode 100644 util/lib/common/util.py create mode 100644 util/lib/results/DIR_METADATA create mode 100644 util/lib/results/OWNERS create mode 100644 util/lib/results/__init__.py create mode 100644 util/lib/results/result_sink.py create mode 100755 util/lib/results/result_sink_test.py create mode 100644 util/lib/results/result_types.py create mode 100644 util/process_version.gni create mode 100755 util/version.py create mode 100644 util/version_test.py create mode 100755 vs_toolchain.py create mode 100644 whitespace_file.txt create mode 100644 win/BUILD.gn create mode 100644 win/as_invoker.manifest create mode 100644 win/common_controls.manifest create mode 100644 win/compatibility.manifest create mode 100755 win/copy_cdb_to_output.py create mode 100644 win/gn_meta_sln.py create mode 100644 win/message_compiler.gni create mode 100644 win/message_compiler.py create mode 100755 win/reorder-imports.py create mode 100644 win/require_administrator.manifest create mode 100644 win/segment_heap.manifest create mode 100755 win/set_appcontainer_acls.py create mode 100755 win/use_ansi_codes.py create mode 100755 write_buildflag_header.py create mode 100644 xcode_binaries.yaml create mode 100644 zip_helpers.py create mode 100755 zip_helpers_unittest.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000000..22046984f517 --- /dev/null +++ b/.gitignore @@ -0,0 +1,29 @@ +# This file is needed for projects that has this directory as a separate Git +# mirror in DEPS. Without it, a lot is wiped and re-downloaded for each sync. +*.pyc +ciopfs +/android/bin +/android/binary_size/apks/**/*.apk +/args/chromeos/*.gni +/args/chromeos/rewrapper* +/config/gclient_args.gni +/cros_cache/ +/Debug +/Debug_x64 +/fuchsia/internal/ +/goma +/gomacc.lock +/ipch/ +/lacros/prebuilt_ash_chrome/ +/Release +/Release_x64 +/win_toolchain.json +/util/LASTCHANGE* +/util/support +/x64/ +/linux/debian_*-sysroot/ +/linux/ubuntu_*-sysroot/ +/ios_files +/mac_files + +!/util/LASTCHANGE.dummy diff --git a/.style.yapf b/.style.yapf new file mode 100644 index 000000000000..b4ebbe246704 --- /dev/null +++ b/.style.yapf @@ -0,0 +1,6 @@ +[style] +based_on_style = pep8 + +# New directories should use a .style.yapf that does not include the following: +column_limit = 80 +indent_width = 2 diff --git a/BUILD.gn b/BUILD.gn new file mode 100644 index 000000000000..58f5f20fb4b4 --- /dev/null +++ b/BUILD.gn @@ -0,0 +1,80 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/chromeos/args.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/features.gni") +import("//build/util/process_version.gni") +import("//build_overrides/build.gni") + +source_set("buildflag_header_h") { + sources = [ "buildflag.h" ] +} + +buildflag_header("branding_buildflags") { + header = "branding_buildflags.h" + + if (is_chrome_branded) { + flags = [ + "CHROMIUM_BRANDING=0", + "GOOGLE_CHROME_BRANDING=1", + ] + } else { + flags = [ + "CHROMIUM_BRANDING=1", + "GOOGLE_CHROME_BRANDING=0", + ] + } +} + +buildflag_header("blink_buildflags") { + header = "blink_buildflags.h" + flags = [ "USE_BLINK=$use_blink" ] +} + +buildflag_header("chromecast_buildflags") { + header = "chromecast_buildflags.h" + + flags = [ + "IS_CASTOS=$is_castos", + "IS_CAST_ANDROID=$is_cast_android", + "ENABLE_CAST_RECEIVER=$enable_cast_receiver", + ] +} + +buildflag_header("chromeos_buildflags") { + header = "chromeos_buildflags.h" + + flags = [ + "IS_CHROMEOS_DEVICE=$is_chromeos_device", + + "IS_CHROMEOS_LACROS=$is_chromeos_lacros", + "IS_CHROMEOS_ASH=$is_chromeos_ash", + "IS_CHROMEOS_WITH_HW_DETAILS=$is_chromeos_with_hw_details", + "IS_REVEN=$is_reven", + ] +} + +if (build_with_chromium) { + group("gold_common_pytype") { + testonly = true + + data = [ "//build/skia_gold_common/" ] + + data_deps = [ "//testing:pytype_dependencies" ] + } +} + +if (is_chromeos) { + process_version("version_metadata") { + sources = [ "//chrome/VERSION" ] + + template_file = "metadata.json.in" + output = "$root_out_dir/metadata.json" + process_only = true + } +} diff --git a/DIR_METADATA b/DIR_METADATA new file mode 100644 index 000000000000..c914ddc4a73e --- /dev/null +++ b/DIR_METADATA @@ -0,0 +1,5 @@ +monorail { + component: "Build" +} + +team_email: "build@chromium.org" diff --git a/OWNERS b/OWNERS new file mode 100644 index 000000000000..dce9d5593800 --- /dev/null +++ b/OWNERS @@ -0,0 +1,34 @@ +set noparent +# NOTE: keep this in sync with lsc-owners-override@chromium.org owners +# by emailing lsc-policy@chromium.org when this list changes. +agrieve@chromium.org +brucedawson@chromium.org +dpranke@google.com +jochen@chromium.org +sdefresne@chromium.org +thakis@chromium.org +thomasanderson@chromium.org +tikuta@chromium.org + +# Clang build config changes: +file://tools/clang/scripts/OWNERS + +# For java build changes: +smaier@chromium.org +wnwen@chromium.org + +# NOTE: keep this in sync with lsc-owners-override@chromium.org owners +# by emailing lsc-policy@chromium.org when this list changes. + +# Mac build changes: +per-file mac_toolchain.py=erikchen@chromium.org +per-file mac_toolchain.py=justincohen@chromium.org +per-file mac_toolchain.py=file://build/mac/OWNERS +per-file xcode_binaries.yaml=file://build/mac/OWNERS + +per-file .gitignore=* +per-file check_gn_headers_whitelist.txt=* +per-file whitespace_file.txt=* +per-file OWNERS.status=* +per-file OWNERS.setnoparent=set noparent +per-file OWNERS.setnoparent=file://ATL_OWNERS diff --git a/OWNERS.setnoparent b/OWNERS.setnoparent new file mode 100644 index 000000000000..52755b51367c --- /dev/null +++ b/OWNERS.setnoparent @@ -0,0 +1,85 @@ +# List of OWNERS files that can be used together with "set noparent". See +# docs/code_reviews.md#owners-file-details for more details. + +# Overall project governance. +file://ATL_OWNERS + +# Third-party dependency review, see //docs/adding_to_third_party.md +file://third_party/OWNERS + +# Security reviews +file://build/fuchsia/SECURITY_OWNERS +file://chromeos/SECURITY_OWNERS +file://content/browser/CHILD_PROCESS_SECURITY_POLICY_OWNERS +file://ipc/SECURITY_OWNERS +file://net/base/SECURITY_OWNERS +file://sandbox/linux/OWNERS +file://sandbox/mac/OWNERS +file://sandbox/OWNERS +file://sandbox/win/OWNERS +file://third_party/blink/SECURITY_OWNERS + +# Privacy reviews +file://tools/traffic_annotation/summary/TRAFFIC_ANNOTATION_OWNERS +file://tools/metrics/ukm/PRIVACY_OWNERS +file://base/metrics/OWNERS + +# Blink API owners are responsible for decisions about what APIs Blink should +# expose to the open web. +file://third_party/blink/API_OWNERS + +# third_party/blink/web_tests/VirtualTestSuites need special care. +file://third_party/blink/web_tests/VIRTUAL_OWNERS + +# Extension related files. +file://chrome/browser/extensions/component_extensions_allowlist/EXTENSION_ALLOWLIST_OWNERS +file://extensions/common/api/API_OWNERS + +# This restriction is in place to avoid accidental addition to our top level +# layout files, such as add duplicated assets, or introducing new colors when +# we don't want them. +file://ui/android/java/res/LAYOUT_OWNERS + +# Updating policy_templates.json can have drastic effects for systems depending +# on policy definitions (for example, Google's cloud management tools for +# Chrome and Chrome OS). +# The rules are documented at: +# https://sites.google.com/a/chromium.org/dev/developers/how-tos/enterprise/adding-new-policies +file://components/policy/ENTERPRISE_POLICY_OWNERS + +# This restriction is in place due to the complicated compliance regulations +# around this code. +file://chrome/android/java/src/org/chromium/chrome/browser/searchwidget/COMPLIANCE_OWNERS + +# Notification channels appear in system UI and are persisted forever by +# Android, so should not be added or removed lightly, and the proper +# deprecation and versioning steps must be taken when doing so. +file://chrome/browser/notifications/android/java/src/org/chromium/chrome/browser/notifications/channels/NOTIFICATION_CHANNEL_OWNERS + +# The Weblayer API is supposed to be stable and will be used outside of the +# chromium repository. +file://weblayer/API_OWNERS + +# New features for lock/login UI on Chrome OS need to work stably in all corner +# cases. +file://ash/login/LOGIN_LOCK_OWNERS + +# Changes to the CQ/CI configuration can have a significant impact on infra cost +# and performance. Approval should be limited to a small subset of the users +# that can make infra changes. +file://infra/config/groups/cq-usage/CQ_USAGE_OWNERS +file://infra/config/groups/sheriff-rotations/CHROMIUM_OWNERS + +# Origin Trials owners are responsible for determining trials that need to be +# completed manually. +file://third_party/blink/common/origin_trials/OT_OWNERS + +# New notifiers added to //ash/constants/notifier_catalogs.h and +# //ash/constants/quick_settings_catalogs.h should be reviewed +# by //ash/system owners to ensure that the correct notifier is being used. +file://ash/system/OWNERS + +# WebUI surfaces are user visible and frequently are kept around indefinitely. +# New WebUI additions should be reviewed by WebUI PLATFORM_OWNERS to ensure +# they follow the guidance at https://www.chromium.org/developers/webui +file://ui/webui/PLATFORM_OWNERS diff --git a/OWNERS.status b/OWNERS.status new file mode 100644 index 000000000000..f5cc1fc8bab2 --- /dev/null +++ b/OWNERS.status @@ -0,0 +1,12 @@ +# Use this file to set a global status message that should be shown whenever +# git cl owners proposes to add you as a reviewer. +# +# The status messages should be somewhat stable, so please don't use this for +# short term, or frequently changing updates. +# +# The format of the file is +# +# you@chromium.org: Single line status message. +# + +jochen@chromium.org: EMEA based reviewer. diff --git a/PRESUBMIT.py b/PRESUBMIT.py new file mode 100644 index 000000000000..fba4d3288767 --- /dev/null +++ b/PRESUBMIT.py @@ -0,0 +1,57 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +PRESUBMIT_VERSION = '2.0.0' + +# This line is 'magic' in that git-cl looks for it to decide whether to +# use Python3 instead of Python2 when running the code in this file. +USE_PYTHON3 = True + +import textwrap + + +def CheckNoBadDeps(input_api, output_api): + """Prevent additions of bad dependencies from the //build prefix.""" + build_file_patterns = [ + r'(.+/)?BUILD\.gn', + r'.+\.gni', + ] + blocklist_pattern = input_api.re.compile(r'^[^#]*"//(?!build).+?/.*"') + allowlist_pattern = input_api.re.compile(r'^[^#]*"//third_party/junit') + + warning_message = textwrap.dedent(""" + The //build directory is meant to be as hermetic as possible so that + other projects (webrtc, v8, angle) can make use of it. If you are adding + a new dep from //build onto another directory, you should consider: + 1) Can that dep live within //build? + 2) Can the dep be guarded by "build_with_chromium"? + 3) Have you made this new dep easy to pull in for other projects (ideally + a matter of adding a DEPS entry).:""") + + def FilterFile(affected_file): + return input_api.FilterSourceFile(affected_file, + files_to_check=build_file_patterns) + + problems = [] + for f in input_api.AffectedSourceFiles(FilterFile): + local_path = f.LocalPath() + for line_number, line in f.ChangedContents(): + if blocklist_pattern.search(line) and not allowlist_pattern.search(line): + problems.append('%s:%d\n %s' % + (local_path, line_number, line.strip())) + if problems: + return [output_api.PresubmitPromptOrNotify(warning_message, problems)] + else: + return [] + + +def CheckPythonTests(input_api, output_api): + return input_api.RunTests( + input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + input_api.PresubmitLocalPath(), + files_to_check=[r'.+_(?:unit)?test\.py$'], + run_on_python2=False, + run_on_python3=True)) diff --git a/PRESUBMIT_test.py b/PRESUBMIT_test.py new file mode 100755 index 000000000000..c5065f4f1136 --- /dev/null +++ b/PRESUBMIT_test.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import sys +import unittest + +import PRESUBMIT + +sys.path.append(os.path.join(os.path.dirname(__file__), '..')) + +from PRESUBMIT_test_mocks import MockAffectedFile +from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi + +USE_PYTHON3 = True + + +def _fails_deps_check(line, filename='BUILD.gn'): + mock_input_api = MockInputApi() + mock_input_api.files = [MockAffectedFile(filename, [line])] + errors = PRESUBMIT.CheckNoBadDeps(mock_input_api, MockOutputApi()) + return bool(errors) + + +class CheckNoBadDepsTest(unittest.TestCase): + def testComments(self): + self.assertFalse(_fails_deps_check('no # import("//third_party/foo")')) + + def testFiles(self): + self.assertFalse( + _fails_deps_check('import("//third_party/foo")', filename='foo.txt')) + self.assertTrue( + _fails_deps_check('import("//third_party/foo")', filename='foo.gni')) + + def testPaths(self): + self.assertFalse(_fails_deps_check('import("//build/things.gni")')) + self.assertTrue(_fails_deps_check('import("//chrome/things.gni")')) + + +if __name__ == '__main__': + unittest.main() diff --git a/README.md b/README.md new file mode 100644 index 000000000000..266712508856 --- /dev/null +++ b/README.md @@ -0,0 +1,36 @@ +# About +`//build` contains: + * Core GN templates and configuration + * Core Python build scripts + +Since this directory is DEPS'ed in by some other repositories (webrtc, pdfium, +v8, etc), it should be kept as self-contained as possible by not referring +to files outside of it. Some exceptions exist (`//testing`, select +`//third_party` subdirectories), but new dependencies tend to break these other +projects, and so should be avoided. + +Changes to `//build` should be landed in the Chromium repo. They will then be +replicated to the stand-alone [build repo](https://chromium.googlesource.com/chromium/src/build) +by the [gsubtreed tool.](https://chromium.googlesource.com/infra/infra/+/main/infra/services/gsubtreed) +Note: You can find all directories already available through gsubtreed in the +[list of all chromium repos](https://chromium.googlesource.com/). + +## Contents + * `//build/config` - Common templates via `.gni` files. + * `//build/toolchain` - GN toolchain definitions. + * `Other .py files` - Some are used by GN/Ninja. Some by gclient hooks, some + are just random utilities. + +Files referenced by `//.gn`: + * `//build/BUILDCONFIG.gn` - Included by all `BUILD.gn` files. + * `//build/secondary` - An overlay for `BUILD.gn` files. Enables adding + `BUILD.gn` to directories that live in sub-repositories. + * `//build_overrides` - + Refer to [//build_overrides/README.md](../build_overrides/README.md). + +## Docs + +* [Writing GN Templates](docs/writing_gn_templates.md) +* [Debugging Slow Builds](docs/debugging_slow_builds.md) +* [Mac Hermetic Toolchains](docs/mac_hermetic_toolchain.md) +* [Android Build Documentation](android/docs/README.md) diff --git a/action_helpers.py b/action_helpers.py new file mode 100644 index 000000000000..046a292baf6e --- /dev/null +++ b/action_helpers.py @@ -0,0 +1,126 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helper functions useful when writing scripts used by action() targets.""" + +import contextlib +import filecmp +import os +import pathlib +import posixpath +import shutil +import tempfile + +import gn_helpers + + +@contextlib.contextmanager +def atomic_output(path, mode='w+b', only_if_changed=True): + """Prevent half-written files and dirty mtimes for unchanged files. + + Args: + path: Path to the final output file, which will be written atomically. + mode: The mode to open the file in (str). + only_if_changed: Whether to maintain the mtime if the file has not changed. + Returns: + A Context Manager that yields a NamedTemporaryFile instance. On exit, the + manager will check if the file contents is different from the destination + and if so, move it into place. + + Example: + with action_helpers.atomic_output(output_path) as tmp_file: + subprocess.check_call(['prog', '--output', tmp_file.name]) + """ + # Create in same directory to ensure same filesystem when moving. + dirname = os.path.dirname(path) or '.' + os.makedirs(dirname, exist_ok=True) + with tempfile.NamedTemporaryFile(mode, + suffix=os.path.basename(path), + dir=dirname, + delete=False) as f: + try: + yield f + + # File should be closed before comparison/move. + f.close() + if not (only_if_changed and os.path.exists(path) + and filecmp.cmp(f.name, path)): + shutil.move(f.name, path) + finally: + f.close() + if os.path.exists(f.name): + os.unlink(f.name) + + +def add_depfile_arg(parser): + if hasattr(parser, 'add_option'): + func = parser.add_option + else: + func = parser.add_argument + func('--depfile', help='Path to depfile (refer to "gn help depfile")') + + +def write_depfile(depfile_path, first_gn_output, inputs=None): + """Writes a ninja depfile. + + See notes about how to use depfiles in //build/docs/writing_gn_templates.md. + + Args: + depfile_path: Path to file to write. + first_gn_output: Path of first entry in action's outputs. + inputs: List of inputs to add to depfile. + """ + assert depfile_path != first_gn_output # http://crbug.com/646165 + assert not isinstance(inputs, str) # Easy mistake to make + + def _process_path(path): + assert not os.path.isabs(path), f'Found abs path in depfile: {path}' + if os.path.sep != posixpath.sep: + path = str(pathlib.Path(path).as_posix()) + assert '\\' not in path, f'Found \\ in depfile: {path}' + return path.replace(' ', '\\ ') + + sb = [] + sb.append(_process_path(first_gn_output)) + if inputs: + # Sort and uniquify to ensure file is hermetic. + # One path per line to keep it human readable. + sb.append(': \\\n ') + sb.append(' \\\n '.join(sorted(_process_path(p) for p in set(inputs)))) + else: + sb.append(': ') + sb.append('\n') + + path = pathlib.Path(depfile_path) + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(''.join(sb)) + + +def parse_gn_list(value): + """Converts a "GN-list" command-line parameter into a list. + + Conversions handled: + * None -> [] + * '' -> [] + * 'asdf' -> ['asdf'] + * '["a", "b"]' -> ['a', 'b'] + * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (action='append') + + This allows passing args like: + gn_list = [ "one", "two", "three" ] + args = [ "--items=$gn_list" ] + """ + # Convert None to []. + if not value: + return [] + # Convert a list of GN lists to a flattened list. + if isinstance(value, list): + ret = [] + for arg in value: + ret.extend(parse_gn_list(arg)) + return ret + # Convert normal GN list. + if value.startswith('['): + return gn_helpers.GNValueParser(value).ParseList() + # Convert a single string value to a list. + return [value] diff --git a/action_helpers_unittest.py b/action_helpers_unittest.py new file mode 100755 index 000000000000..6a9f90851bf0 --- /dev/null +++ b/action_helpers_unittest.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import pathlib +import shutil +import sys +import tempfile +import time +import unittest + +import action_helpers + + +class ActionHelpersTest(unittest.TestCase): + def test_atomic_output(self): + tmp_file = pathlib.Path(tempfile.mktemp()) + tmp_file.write_text('test') + try: + # Test that same contents does not change mtime. + orig_mtime = os.path.getmtime(tmp_file) + with action_helpers.atomic_output(str(tmp_file), 'wt') as af: + time.sleep(.01) + af.write('test') + + self.assertEqual(os.path.getmtime(tmp_file), orig_mtime) + + # Test that contents is written. + with action_helpers.atomic_output(str(tmp_file), 'wt') as af: + af.write('test2') + self.assertEqual(tmp_file.read_text(), 'test2') + self.assertNotEqual(os.path.getmtime(tmp_file), orig_mtime) + finally: + tmp_file.unlink() + + def test_parse_gn_list(self): + def test(value, expected): + self.assertEqual(action_helpers.parse_gn_list(value), expected) + + test(None, []) + test('', []) + test('asdf', ['asdf']) + test('["one"]', ['one']) + test(['["one"]', '["two"]'], ['one', 'two']) + test(['["one", "two"]', '["three"]'], ['one', 'two', 'three']) + + def test_write_depfile(self): + tmp_file = pathlib.Path(tempfile.mktemp()) + try: + + def capture_output(inputs): + action_helpers.write_depfile(str(tmp_file), 'output', inputs) + return tmp_file.read_text() + + self.assertEqual(capture_output(None), 'output: \n') + self.assertEqual(capture_output([]), 'output: \n') + self.assertEqual(capture_output(['a']), 'output: \\\n a\n') + # Check sorted. + self.assertEqual(capture_output(['b', 'a']), 'output: \\\n a \\\n b\n') + # Check converts to forward slashes. + self.assertEqual(capture_output(['a', os.path.join('b', 'c')]), + 'output: \\\n a \\\n b/c\n') + + # Arg should be a list. + with self.assertRaises(AssertionError): + capture_output('a') + + # Do not use depfile itself as an output. + with self.assertRaises(AssertionError): + capture_output([str(tmp_file)]) + + # Do not use absolute paths. + with self.assertRaises(AssertionError): + capture_output([os.path.sep + 'foo']) + + # Do not use absolute paths (output path). + with self.assertRaises(AssertionError): + action_helpers.write_depfile(str(tmp_file), '/output', []) + + finally: + tmp_file.unlink() + + +if __name__ == '__main__': + unittest.main() diff --git a/add_rts_filters.py b/add_rts_filters.py new file mode 100755 index 000000000000..94297c550b9b --- /dev/null +++ b/add_rts_filters.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates a dummy RTS filter file and a dummy inverse filter file if a + real ones do not exist yet. Real filter files (and their inverse) are + generated by the RTS binary for suites with any skippable tests. The + rest of the suites need to have dummy files because gn will expect the + file to be present. + + Implementation uses try / except because the filter files are written + relatively close to when this code creates the dummy files. + + The following type of implementation would have a race condition: + if not os.path.isfile(filter_file): + open(filter_file, 'w') as fp: + fp.write('*') +""" +import errno +import os +import sys + + +def main(): + filter_file = sys.argv[1] + # '*' is a dummy that means run everything + write_filter_file(filter_file, '*') + + inverted_filter_file = sys.argv[2] + # '-*' is a dummy that means run nothing + write_filter_file(inverted_filter_file, '-*') + + +def write_filter_file(filter_file, filter_string): + directory = os.path.dirname(filter_file) + try: + os.makedirs(directory) + except OSError as err: + if err.errno == errno.EEXIST: + pass + else: + raise + try: + fp = os.open(filter_file, os.O_CREAT | os.O_EXCL | os.O_WRONLY) + except OSError as err: + if err.errno == errno.EEXIST: + pass + else: + raise + else: + with os.fdopen(fp, 'w') as file_obj: + file_obj.write(filter_string) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/AndroidManifest.xml b/android/AndroidManifest.xml new file mode 100644 index 000000000000..821108f8a177 --- /dev/null +++ b/android/AndroidManifest.xml @@ -0,0 +1,12 @@ + + + + + diff --git a/android/BUILD.gn b/android/BUILD.gn new file mode 100644 index 000000000000..4d035b8d3367 --- /dev/null +++ b/android/BUILD.gn @@ -0,0 +1,241 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/build_vars.gni") +import("//build/config/android/config.gni") +import("//build/config/android/rules.gni") +import("//build/config/python.gni") +import("//build_overrides/build.gni") + +if (enable_java_templates) { + # Create or update the API versions cache if necessary by running a + # functionally empty lint task. This prevents racy creation of the + # cache while linting java targets in android_lint. + android_lint("prepare_android_lint_cache") { + create_cache = true + } + + generate_build_config_srcjar("build_config_gen") { + use_final_fields = false + } + + generate_build_config_srcjar("build_config_for_testing_gen") { + use_final_fields = false + testonly = true + } + + write_native_libraries_java("native_libraries_gen") { + use_final_fields = false + } + + java_library("build_java") { + supports_android = true + srcjar_deps = [ + ":build_config_gen", + ":native_libraries_gen", + ] + sources = [ + "java/src/org/chromium/build/annotations/AlwaysInline.java", + "java/src/org/chromium/build/annotations/CheckDiscard.java", + "java/src/org/chromium/build/annotations/DoNotClassMerge.java", + "java/src/org/chromium/build/annotations/DoNotInline.java", + "java/src/org/chromium/build/annotations/DoNotStripLogs.java", + "java/src/org/chromium/build/annotations/IdentifierNameString.java", + "java/src/org/chromium/build/annotations/MainDex.java", + "java/src/org/chromium/build/annotations/MockedInTests.java", + "java/src/org/chromium/build/annotations/UsedByReflection.java", + ] + + jar_excluded_patterns = [ "*/build/BuildConfig.class" ] + + # New version of NativeLibraries.java (with the actual correct values) will + # be created when creating an apk. + jar_excluded_patterns += [ "*/NativeLibraries.class" ] + + proguard_configs = [ "chromium_annotations.flags" ] + } + + # Not all //build embedders pull in junit_binary deps that live in //third_party. + if (build_with_chromium) { + android_assets("junit_test_assets") { + testonly = true + + # We just need any file here, so use the test itself. + sources = [ "junit/src/org/chromium/build/AndroidAssetsTest.java" ] + } + android_resources("junit_test_resources") { + testonly = true + sources = [ "junit/res/values/strings.xml" ] + mergeable_android_manifests = [ "junit/AndroidManifest_mergetest.xml" ] + } + robolectric_binary("build_junit_tests") { + # Test has no JNI, so skip JNI Generator step. + generate_final_jni = false + resources_package = "org.chromium.build" + sources = [ + "junit/src/org/chromium/build/AndroidAssetsTest.java", + "junit/src/org/chromium/build/IncrementalJavacTest.java", + ] + deps = [ + ":junit_test_assets", + ":junit_test_resources", + "//build/android/test/incremental_javac_gn:no_signature_change_prebuilt_java", + "//third_party/junit", + ] + } + } +} + +# TODO(go/turn-down-test-results): Remove once we turn down +# test-results.appspot.com +python_library("test_result_presentations_py") { + pydeps_file = "pylib/results/presentation/test_results_presentation.pydeps" + data = [ + "//build/android/pylib/results/presentation/template", + "//build/android/pylib/results/presentation/javascript/main_html.js", + "//third_party/catapult/third_party/gsutil/", + "//third_party/jinja2/debug.py", + "//third_party/six", + ] +} + +python_library("devil_chromium_py") { + pydeps_file = "devil_chromium.pydeps" + data = [ + "devil_chromium.py", + "devil_chromium.json", + "//third_party/catapult/third_party/gsutil/", + "//third_party/catapult/devil/devil/devil_dependencies.json", + + # Read by gn_helpers.BuildWithChromium() + "//build/config/gclient_args.gni", + ] +} + +# Contains runtime deps for installing apks. +# E.g. from test_runner.py or from apk_operations.py. +group("apk_installer_data") { + # Other //build users let devil library fetch these from Google Storage. + if (build_with_chromium) { + data_deps = [ + "//build/android/pylib/device/commands", + "//tools/android/md5sum", + ] + data = [ "//third_party/android_build_tools/bundletool/bundletool.jar" ] + } +} + +python_library("apk_operations_py") { + pydeps_file = "apk_operations.pydeps" + deps = [ ":apk_installer_data" ] +} + +group("test_runner_py") { + testonly = true + deps = [ + ":test_runner_core_py", + ":test_runner_device_support", + ] +} + +python_library("test_runner_core_py") { + testonly = true + pydeps_file = "test_runner.pydeps" + data = [ + "pylib/gtest/filter/", + "pylib/instrumentation/render_test.html.jinja", + "test_wrapper/logdog_wrapper.py", + "//third_party/requests/", + ] + data_deps = [ ":logdog_wrapper_py" ] +} + +group("test_runner_device_support") { + testonly = true + + # We hardcode using these tools from the public sdk in devil_chromium.json and + # in pylib's constants. + data = [ + "${public_android_sdk_build_tools}/aapt", + "${public_android_sdk_build_tools}/dexdump", + "${public_android_sdk_build_tools}/lib64/libc++.so", + "${public_android_sdk_build_tools}/split-select", + "${public_android_sdk_root}/platform-tools/adb", + ] + data_deps = [ + ":apk_installer_data", + ":devil_chromium_py", + ":stack_tools", + ] + + # Other //build users let devil library fetch these from Google Storage. + if (build_with_chromium) { + data_deps += [ "//tools/android/forwarder2" ] + data += [ "//tools/android/avd/proto/" ] + if (enable_chrome_android_internal) { + data += [ "//clank/tools/android/avd/proto/" ] + } + if (is_asan) { + data_deps += [ "//tools/android/asan/third_party:asan_device_setup" ] + } + } + + # Proguard is needed only when using apks (rather than native executables). + if (enable_java_templates) { + data_deps += [ "//build/android/stacktrace:java_deobfuscate" ] + } +} + +python_library("logdog_wrapper_py") { + pydeps_file = "test_wrapper/logdog_wrapper.pydeps" +} + +python_library("resource_sizes_py") { + pydeps_file = "resource_sizes.pydeps" + data_deps = [ + ":devil_chromium_py", + "//third_party/catapult/tracing:convert_chart_json", + ] + + data = [ + build_vars_file, + android_readelf, + rebase_path("$android_ndk_library_path/libc++.so.1", root_build_dir), + ] +} + +# Tools necessary for symbolizing tombstones or stack traces that are output to +# logcat. +# Hidden behind build_with_chromium because some third party repos that use +# //build don't pull in //third_party/android_platform. +# TODO(crbug.com/1120190): Move stack script into //build/third_party +# and enable unconditionally. +group("stack_tools") { + if (build_with_chromium) { + data = [ + "tombstones.py", + "pylib/symbols/", + "stacktrace/", + ] + + data_deps = + [ "//third_party/android_platform/development/scripts:stack_py" ] + } +} + +# GN evaluates each .gn file once per toolchain, so restricting to default +# toolchain will ensure write_file() is called only once. +assert(current_toolchain == default_toolchain) + +# NOTE: If other platforms would benefit from exporting variables, we should +# move this to a more top-level place. +# It is currently here (instead of //BUILD.gn) to ensure that the file is +# written even for non-chromium embedders of //build. +_build_vars_json = { + # Underscore prefix so that it appears at the top. + _HEADER = "Generated during 'gn gen' by //build/android/BUILD.gn." + forward_variables_from(android_build_vars_json, "*") +} + +write_file(build_vars_file, _build_vars_json, "json") diff --git a/android/COMMON_METADATA b/android/COMMON_METADATA new file mode 100644 index 000000000000..7a2580a646c4 --- /dev/null +++ b/android/COMMON_METADATA @@ -0,0 +1 @@ +os: ANDROID diff --git a/android/CheckInstallApk-debug.apk b/android/CheckInstallApk-debug.apk new file mode 100644 index 0000000000000000000000000000000000000000..3dc31910a5388024da4558c9e5f249a37e3783bd GIT binary patch literal 37106 zcmdq|bC_gLw>Ai8sS91UZL7<+ZQHhOcGz|_iwjcU`aXlY09VQcd-0RV(1G{_AKAc`D0(BdE$qoM{-mLo}V!ElknL=k2}Ng)f0(8l1c zg@we;?t+z5J|S9JfL+S9GNL29V_#mJU0B$kQ!U;`nKT!LCM@4M$60@{aR7uFL9sj_ z?04{J1+}SsiX)NHu!EtE$bU81MUr*+@7)$I;J6orP8dl(}fvkdW7FdjcT5=98dXB7GB>2zF4iv5ZYR;*M7S zL@x}yu!RvUd3b}q!PTnEU8F4^3RPH3%`9?Qzh}w~tG)=z^1lgx^4FM-ujQ(?F?lX~ z9dxe4y+>WLZkb~Hw1nF$NJB#XSB@3_jeQ$|&;>Lg^D+>B*#`gw?M$5PElp$%?JUhq zon8L2ADT36w;+HTI=gCl!q%HiSTGOqRZRl(y9S|JG7PlBAJ9nN3xXDjc}i$i&Hf;ZTERDuMd%Ka?(*UmJ>#1z-%k~xbkp|>@b<#3A-}qu{tr$nkZfN`f3Ihz> zK5Z>_p_|sdlFtEV&K%$MmzP-NruQ9&#ya zZ_1NwLkuLlc(>r&73T~m6OFOIAFn6N$ByJ>+(Tv)hNpMzz^73Ur>OC?4=l@vMvsO( zE@3AL_fw5FM^AI_C!P6b9!UmS5OgK0L3{ZjSbPJlPsplQe(5!~#N;Kc^j<2M$DK^c z1t;;UXI0tL-o?(Fw=vIqaN=A*~%?QJ%Ky>xNy*X*X;mpY|i zun*t3Q#e;Bk5{B$YW)9*fAux7CQv`mp42hXvF*2gT04CV7Ax3>fB8xF^7e8PqQzaX ztUYB-R%16u9royH;lDX>Bz5Ij;-lwHV)4oe|6VtO<`s+Vf6wpLoxehJthxu&qxl>b zWUu<7a& zld-8Yy`ht{F(?4`UnRhc9smNs1tbH7FMpf20rmh_fD15X3rtx8>;Uut51?!VtWEU4YMKBgLx4LFM!*tN03Gnt1gO~( z*v=AQ4AB3VE>~b{V}J#)+zF_~0hluXAGFy1zoNzBe_IRj|ILg%fFu9kof9lT_CMJY z1kTt6$blOWExU0Wa*ra(>&fs(_&*rWeXmW%<`z*cs^wQvEJ z+Wd<{2Vma+cLI=!zf=GsfH4>_{%fy)E$eUIKY{ii0T{yqK?npR5a2*gT!1eppbQ0E znSZyyzc&A0?e}l`FON?DV)U=w{q4&H82|0}pFsJK0F1GLAP3?ru*H98!GA~a&o%LP z1ct!Xa{yYp-M?1S^k14lK@kCnKnXa#|CL4sO2Ds#|HuOtfPiuUWd2JY4^Sor;{O*s z{+IY)dHI_O^`9lc80>E!|Ff?A|7`K^9RJPyue$y11VK4tNgn#D!Pu~BT!~dxF-+lg__utO{a`?CWAN_y(`e#u8=>I40 z|Ed1JeE!uB+^_-vzLz)SN9iAv004hM1nlqc`OL)0(B06;#+1&&#KDrm(pcZd(ACb^ z!qkc0!Ok2IsURl~2ZR0h>i|wtLPQA|OZ)}Y-%Vciyh$7YAOuK?2&#CjPiK2(o0z&j z_8xg&bgM2cdC$(!R)aQ@1kn@{UkJ!dMN|Gvw~+Z2OG;g5u`9N?gKo@fH3ckDVYP^$ zC2IW2F*q~DI5p3*xgdT1xOLb->vBv(k$NrvOcg&r4D+E728H3vxcb$MNWe_oF z`IFcfflMp{t5`Ia+*?vZvYN~-)EkL55_WFFT0?zz`Iv^o@Qt!!X?w(Z`7G1HCM!0Q z2iEaAt+zjX%i#!wlY;X*K=*d6eLv&dvIG8 zBNU|Grm_|JHl{+miROPZE_jB(4W62A9;bo7Kg#~BnU420TPQszasTXE_Onoq?N31N zA3gIT_1|UsGpO&)=DZQk`IIb~RoZSptG?CxP(NW#4R$@f^09w-^l_gz)mE03CT4tq z;bFazdF^)cYl!196_1|)!-UpojEjqJbUKy8_7KG1#av{Wd2*p1Hu7=LY&6%FnFqf! zicbG-2aKEF=d>khMdv29Y8HGD;*vv{T3GG#VbGob8t`(WYrNQsI~Ye7chrsSJSBMh zTYDSt<9Fr#Qd-m>ames+hRn)y#yZYpc^TX#d3Y0ZPp&fRk3Rw$6&8}v$PSh1DojaH z?d%pl(N@nCBy09tj#;^NdntpU>1DRwE|0Nox}O$%sn#d6)k`K`8TU1u>! zVHC9bPko#6v;ll~ODqBglhf83{x@taO_Sjh4jec6gXgiqMdKn{32D98Q`=b=9LK%wq-5TxpH8?5)N8=oH?A!xO>ABf*vFE#K|J3J9 zDx@2p@8hg_&+7+5+`Hoj{E)rxwX7Vxo$~~Xnlheh?Zfe2$3;EXL}l3lLrrc_$~Yq zuIGhNs;N~>%DRo+!$o2qyoy?n-BQQ=%=DvuYe_+-{Y;7ldq20w=o-Jj5uo{JZ?3p- zbW$OS=D6gPgS<+?$-~VYOPce38_t;)Sfwd>`VmrO1>*>jNQ|qbP6&oTFy2&sMmB6X z2?dEeC;jaBE$$JI=gA^xT}~_ewCCDSyD^&KusrUx4O)AzpCKEg6w4tBg{hzqS z`WuiC=8`Po-;gT0H(HAics~}GaBt7QC<{BEJ?!CHmaRpV1SKjMX9|SvDkSHA+!b>H z@S&iv!QsLXVX>~kz)x<vre<3_FEAW1=|SQNX1PmJOO6;kIHOT9)EXtU0oGLn=exqx~Z^o@8%BQApYeJJyuX^Du`7C|A$ zBB6C^FYmB!-3PuT>gvy3FC(8v&}Py$L2FrCuC8RN?~o+^Avd>eD*Ml_iJv71jnsZj zgSfL7cV0nm3b2A`bx|t;D*<2eFF|L#buJv*M>)ft@sD`61gls^J5HoM55o|gSBIpL zWLH}sw*`$+y25ylo+@-}HnP6?^Oy5}{L(+!@1Ep*)|_lV$gCJm5bOz>O#X&4dsG3Y ze{0_La5`dfFRu%=xV})F10TB#yT_9S(wQnCw^ln;MmEn%a>~&tCzok9ygVLFH!ZP& zrJ0#j!s&T9#u7itLyusRLBd87YF4rLN41XBkZi`ZJc#KF(rYe4_wTeruwL(q-o1^{ zMDwfmM_VtSrd4)(-n0f)16vc}vF^%TUKQQv*^%@)DR-Pp?)`#(saGXAy=Q#HRjx;lp8|g`OS_M3BgWnsK6btEl43Kk4wib6dl4z?sSEN_Hf^*>;6Fxhg)*`-UHVd zS{9de{IoTNENNNW)?24J0x?IE;K!a;ibERzhU@D|qoszJW?M`G0(EPY!^6kyv{KF) zc2;Lsa6y^S^BRBACZhVd^JxM_>fwD&c-?>~Ytm?dl8^!`EkGsU;Cc(Ct03p%U=(M2 zqT_ed59ft@#1ap62sH4jIgls*VB8%QDxNL|TTl z$*e*S&D?fQ2ns^NAS8=2S%m^r1ml?Db$n;99hZeJt2DtVt3RM6x&|h)zkzxT;4g!mT5J_k#*F7%HSLr#32{gGO&CdsTpd>*y=IXnHI# zP=NwpF-?VpE49Y-z2<}TggIBM^nPSD_2cT2P_2uTUdBdwBO~|#6Je%~QCjiDx8)t0 z85hs6mTfTGMV#UKGx4bvA$u}xB2vtnVZe!oMZN+erGgdVem!>PCga7Ju|`}Ol<;SW z)t-37lT^e=Mlo9Ya$lH$_BsfI&LAO(cU{~9gb*S~T?h&yx??t!+%iqE79ig;N#CJ; z)gL}~@SD_lb7{+4a@SZ9H+8Idal9M_t7=-S^x(yd4Yw`Oe(TxB&ECE1-IV`xN%O4d z?Yw%wO=8iJZlQO|F2-Xc;j_IVW^N?f#fupKzWqjDr!3&~|n-f=ym`7V^y|2$qQBb1guEKG| zPcVlrBsv`hEu>Q&TE&&RVy-OO>PbQ?hLHLy=LN4^9Aw~v-3gSf0zDsv(&p{=VGADp zr=P@UFQZsZZFj?iT20!&!<$>2%N{18`{Yc_B(d<#m~>SE_Hovn~z$Bb!-Z8 zavbFm*kpHyRyi6ml_TmakyWDCp*5*}wF1J>T{ZZV?h}IMAgJ#8PmYzZL71W<`wE}( zC2Bq5X;O-ocNFeN2;O%XKlTp@?iqX@Hp|X;n+Rkh@-)rmyH@N=*2c<8A6a|nWsdmS z7*DWq;PKLK^PFn@ULeaZXyyrChcGx^r#Cpd8q>Tu){|v9n=`mczQ4y7ii^RXByUV@ zz0h0EY>XdoUf5bA;J?p2grB%=2!G>e;dTdEn=@Br(ZaT|))_D1DTjp}NuCI!WLk|N z4#q-Upi?vr94fs#Oqgfc0n;XC(3ElkBK zC#TMTUh?5`*-qo58}0M}jYh3zY?m}r_o8@H#*!7gKvQ~f(FEJpzx>)r^`k_kp;brq zGwssDz%s)3H1&4%xeqF%eKdk85HTn1wH^AhX=;@N1dNDCp7;LQtku0Tqu7?ScWcXS zy7v5-^K@A(`#F;o_p`9lb{2m+R~PQ#th&kFJ$$VqxxE`JwmsXdd-|-I;@qQg@$u_V zy;X}ji0;H7#}ODs8wsglwMr3{QCga1?aD!>;6WzgBBX;#A}a2fH9Qey?)IggJ2d~7 ztL^t421vH@hGrb^LdwQMm>JIb#f;_ySF<+b**KRRYEm+i(>#?;^2*~i2JJYlVGs%r zOk5YOcAbkMQGv8xbfZEp#Eu|Rda>jZFI8e;Zv!l)wK5Qjr@xE@G0DcD#h`i_SNFTX z6*iz-!x}|hokVg{22s(ATuC>oF)Vqnw7|~n>9j%2TZisT<Dv9nrZ$<6~iK{Qz(WBH_3 z8$-6mVxh9Ie~eIAH@xRNnm5#OA$$;&=E^VYCj19;)D#1rAcKzO+B{E;j5fQMl#5wM zZzz`@>-j=@MkNJJRFMw;=sVfG2*IK6ooqSH!vbAV%J7`=%5fZdKl_(>e!P3x^?Y8g z^*o|8<~lKl7(i_?RQ~j$sF7aFkE%TW{4{I*-33`FswRYiqXa?*zO4jTj;+(#m4~P- zsv<8lAmR+IX+% zE;b)fta7NNt^s0+qK1XvuTY6;Xz(1swyRAfcN?Z1sSzwZH4fi$5BF! zPUP=)G(s|Y-`>3}C1X()M7J8T1hjQCt%-K$>!z4CwPd7iF!@81;Q8Z8mifL~?_T(M z_t7?wdh3dv&l^oQ8gqEB<=vj`eFSJ4|c0Z!nm;I7ZIbXv5UN+&5~T3fDd2 zN6~jRQZm2nL@^*I_lMWpf2;GDfW~CPG-fg~DTr)QnTxulHNaVer?ezWUFV;hd&i|_ z)oS8-zjD8+Jn~H-S>cE5-w8fFNzq+I_mpX>tWpiVbc}(o{avlR-JhlBM;83R$Yojc z`a1lcC<+lIFbw~A>w%E|;rMWes-UAgj%J1q+DqM*mc=yp$n&Fx$FUcpI2p_w+8ll=c9)Uhr8gOct|!m7QW^{T4{`QR8$hCrHLp|4};qFCa4U)DhzkHj@PTN#wB`b1_S|m@7WBE z*0Xvs8r89iDQ=I^;2yBS&F-Nz{#_E`!gFLbFsL^?2b`nb!O>ejSoE2@2wRV=e2)jw z={Y%rm>Vl)qoAso!*lHEZSTAM9t)x7HwnEjx-&etp*}Z-1T1=dsdCG-qy+>ey2Os(`)Mnl;(SU&EcBXY9{? z=K3>8;&@XlpD%15e_H(IN|Z|Vq&sU?e7&WH`3ooE*H{SCqaNgi@V)7cNR?ES>c1+d zzB5s_F75V3t*RS3TG95<-3`*w$~DZZ{^_YYZ=(sKa6caq56?!4<4Kb5Ir7?i2$TMC zUf{oru>FbJfmKOKcEhdV2`zc^OW>v*~~BBbNF#?{jR{Laf7h8%HNekhjjA@q^s3Dc3oCf?^5d+nQa?T;7du9xi3$!mGO z*gCxjN4p>I+&u5c@<+-9Zy@4$H!+}ya8NF%Z9=C}^FWZOsSB)LoSgi;BH7-gRy5tOv>bjRU8 zB3y8syU(blSC@MOK%iQ+TUUu|&g(4gJE&G7`Xi8zVe}e=by-2^D2a`}lCH4utIjkHd+&w69{KdmF&>?@Bv@4wdV|LtgrG#+)di2#T2>meg;a``$x zqM?@?PQfMAJrnl&C#OeVTbFx8$OptK2H^&oN`YVwA8WKwDTWx|{?~JfZ_zLU=QypS zQAa(D&?BoK-a1zGi7v`sNQt~4+uJ0Q3r20xKCuyk!tsVenQrJ^)}N~n&=k|Rw3D|n z`GUd@y`Y4~NU+Nym?$k?qR&F{8W;`6fvU?Ro2*j$IL#~OKR*ZV8m&>I8)C)xKR^9c zE!?#>@#ls9R7v&rGep3{W<(LT+7r53iqYSxD#Xe_p~I<#czgoxF;FpsC6K_rVo5%l z4iY1YcRCB`Y8}0`QT&buzv0h%g)!*h*7@~z0-xByVWEc?V~oTkihjBr*WnO|CJ0_9 z$fCj|=E7uM^sXaGx2wCpNs>IumX3b)Q`A)dQNPJTG3R0Nv^Q=s@$sbYONGATnDIc# zA7xj0%4Sv(JN@|PE3L(y?!zYM+jGjO@lC`W)>kyxfdH5=a1uwHt5?(tX>Z{|YC&jO z-?N_#NOaE(gCwt5dFRT0cR~{AH89ZYHJgPO21xr?g$hvYZ%=0b8uOQ{=tWe~z z2F=Xz=x`>wi;UVo=A$YphcvS8Ae^p4SVII7Ri1vk=Iy=Y>4z$ql;n%%wCKa1Q&cBI z7}T^(Etx%?ZTkhh;&%i~l}KF~D(+39eZwVVmGC*OoxQyF^ogH@#g@4U4ntHCCiIZU zzo9NniFMy@Xz`o$*P)v+_Zmuq-|qb)dueA7^XGJ?18V1Y^yS;JuBDk=lbRDjHXO*c zoX|3Q%`|dbme;g5zcY4KYWzq|7fdEGEu%=VQBnHZvKim=(u%`#eBOA~Z1?xF+_!9E zP}Z}-@1sG=Px%AztlDt3KTfQDE-M)rd3$3?;Z%NZwCw^O(+>&Zt!SUXDKWWRBQHnbFR_9gZcgL{Z?jK-gZU(rEr}qm} zgqvA^dep+ZolP;>XB1_f*4kw*UVu@mGv}Y=KHQuX&y^|BfhVapuYKN0ETIiWf`yHT zcRCvfley>$j`z84<9L|ExT4wEGM>NsTsY3q4pF_+Wt4&l>l?^54o%d$o5ozB>7sVSx+rs9@G zQ>)Ximg#xz#G$oYM~iHyp2CJX_Dog{ci`*ZS{_xSy}iw~F&eavJu8p{5LCg2<{?U9Q!a>{-9Qd?#?%ps>|MFwTURN*9OlNimnNUzLB>=dv7^!-$YqZYIWSqT8&pZ zrWxKd=CgB~_6pg`=G(1!aLUiK(E1jW8g`S9Z838cQxo}oXwLRwMf1=jtv?`)z%O6| zJN*DlGB`;#J5-C!$;5N8({}u=w`+B3$^CmLZMNs~EomzWQOH|yYgU(Ok8>|tQdGmd ztu}2nG)(kFpdK458}YdbPorrfrS@cF*&~(rrmsG>t#BUsITZz2_xzG_6=;%k z@vu%kJvrrMw@)r&J9oyYA=tLzP(07ggovuC5NMO%O>`F`vW?Um4Ra(mr`KE>IglD! zxv^L;%P}?>)oXkj|B0(9fuhkotAUy8LeLP4)JUD3o?d?**;k)XeLuNDqlx*ZIM$f` z%{nbe|Y_G%d6eYdwQ;Sy^%f zu_5~@*?5(kmxd-Re#yXUvh}k4^hB-obpHpgPP-CQ0vZbCg%X<@`j-LV@8!F7)XL!> zN{q#p1xye)rv0c7BisdT12W^x@gX1tOr<$;ELP2>_EJqV(o1Y+UlP%nziBQUS8V>e zVL0Av`~JqzbLXWq22!wAndQ_)O!z#pxscV9+|k}-OR9y3nI>h5Xv`Cs9B?{w)WW}| z*OlI+foJ`aJAtWOU3Rf~zW_JA`z38oU*bSl;fQypWjOq-lh9wKHnU>H#_q&9hY+gC zIro%)`}&EF)V=42f0ahePS96x(><{5?c2@S;-j8-<5{k@g$jQcKfg;wdiMgZ`l;lk z>q0caI0UTz18`iem{MW;lyD?v*xpz)#-L2Q__7V!)7tL1LID#erwt zYk&USJlc0$Zh!JjTibawW5XfDGjebUwX;g#rZuqXrF-9xtd(kPP>vYXkToi{kx)_w zBI347AG>I=V&AUYxD+#`&7_2LAH7XOUUuOhdc1q*8M0!E!mLnXCz(=x-c@*ih#x&! zZz?<{xa;LuiW_CmC}u(65G_w;i8#!#A0NUE@?@`jX
    ZX~{24ZkrfIYg-4-s`Z+~h+DZ^+9cXZNp7kdwQ#p8;ij!(+&V%d ziwDTGEzhrgr}R9$*wyguZMs)d^zWUMP-1^rKaO^AzjMPgv|mAX9icE74~7kLJp5Vs z@v*LNU@NCnX)yr8z1*E9cqB4K!lD5tJtX`x<6b-m@wTA%g$Hbq zDO2@%eU$w}>&Tn~5HC&tMl>!kbin^a54MyZXeE@aP^Cf_Ou|HB%v3aKBu0)LCPrK- zjI`9BJ|Q4efty4M_Pal_lj@sV2{&7!l%_a76ki4K~F-FTDKx3T3W4fR{J-t+5+bG;()YG#cetM6$3ktmHcoY~UVwWA#vO4+Nf(9{&8xb5~l&$1)u)w>6khCR@ zWKpyunY(V6zYwE`&7;HNgCg%Dp%MFT^_y^wA*|tu3rA3MO|R31j0;rMy4r@%Wh#tf zMj^?fU`ql)f=q@aiw^?RDHSKsHgs+Ds-x&eIKD{tFZykoz%KIzf_VLm&I>&D!bGxs zv~_Y<^gsd)rHMlc?ug)#^y&V#=GJcPvxc}gxR)%2Hj%=7AQdALS8OAR92{%oes9fG zxdxd}S-6zJpAX0Ydp12VLA(s|f}ByrDDG8gC@(_?gCK_?Y!)8Fi4u3@;%gOBtNRte z#GH|Av>Sp*F)ZbcgVvJKU_fhCO~`hpuypowgh7OCf%Tln5o^(^l;db%=t>ZL+JD(D zj85nnF@y))UXg_Oi?wY;(W<);#WmOGyRn=e8yZD}<#M3{_IJcJkf9KJSXgrVhE^Us zgItz9BS2KHzUzG7?yROGkF!Ikg2#7qNxHcXCP>nphOiWyz z#)@p?VCyj1DY|AxCxD}*gwV>y$mp9FjfCd3Qbdlj%5lLmuo%wn50pOg1Ad8hR~=zbrNl1taY0~(JRB?FtIq1%&~-M>^-OX`CX3d@gcGym?106F6+GM zvp~GgkBWWM1*a^#C-c>h$-8gnIN$@Kvd!;FMXiP-+Z+D_DL)2*!0O~VW{dAOg3pBx zXb##zrtL_Sj!_6mjqlX4Q+g4=(z3jmqSPp__h{L?;RFoy*;lU zI&3(E_ntR@IIVr*_6B9}@nrNYhcpHklDTlAe_a%YOcwk^_^|L1c!xv$1$5y7NQ%md z)Cw5{{kx+O?G$aA5zy)9FNlD({>@Ry=D%?i`fvP%NP+E0|K%t28<3t30KicF-5WFp zThhJj^;KO*`K|MAU*9h-J?-s~6H$>-BnV0SRKQ?R=i=088Taz%Aj?rP6%*zYsK}JK zQsdM{86~JuH=>f}nW-fgEdV4^h|s^07$ZqT!1rXwxF4Urvyb?^@8g{>LzATss5e=> zXD-`jx6FN4+1N=D5F^EO#+FoojCUUl%^xyY^&E4d!ikST~fF zv^?5#lyR_?c9e2EKIRm5cqN>?aMtDZirZ&D-qiZ?d)@r>jcv0MPB-a14L{;j$lL9` zYjCbxHJu(aM{S!c(9_mWi6B%yKKW67J($V<7+2QH+k6oz{;=8oc3AV+Qxkx{X!(|L z2UdT-17~hF?`5~;=!|rIO||=T7%CjofW2WvL{TIa)$;@6o!h?4WRQwD=!96L_49*h zBUO(bzV!IwZNEB$w>O>F7(UVjH6v=S21}F~4HnKpb*WkgbvXOS51$*btgY9*P69sj z)G@aUjqd&PuWKNW^8OiPW9))7&;{t_05uU_#CnqmX5uVa|#pE<(?6R=ch)CYDl3W(U=nZoo%!D+;+EhJB=2Lz7xbbGFGwayLmNn&M(-xLvd$>DJBtOk-{2roikqM1mxo( ze?cM*~?qo`MvBx@Dl7lg;AqZKfO>{gSDU~ra|w?F-{9mbOZtl>r~{kO2g zh2cjyLjj;CtSgFzLkYvBSYjboMfuKh+0-rgpCz7U=9BU~t|yyIUTNFd(02m*+PR)* zR~6RkxT{Pva~j^BlLZR{CV;C5t~fAnu7zm5qa0Nu2u%nGMiT&ser z7#<*C2easm0xBe$GilBV;fl%}U0gBf5e3EJNEsm1U&UP0n*P}9O%*#KCoQSN0n4S{VLNyg zUz14Qdnwi=z66DA>bBT9^}7+2*$Eq?>(U;P=t##T*}I^VEvX4Zclc+wsGenS2$j|% zRgb!A2}GuHwcHZtkf{_V>F*@tEf$W`X6YaIRcSYGsiV+^ zGe2?@SXj;JJ&8y!B7dweFX{5r1v)cD?tsyY`qWTbT1YPS{H2Gp6_C>xiYl zF?^+8ski6R$8wzutaPx@N0p_u29fK;qLYC8TCa3|Tv_q2?~paD-b{L2 z=(z@l)G###Zd*DQOQ~|5TYNA`#4bQ5moC%tV<8q)(W##sM1mZgl-RKL%_l)wGzP0~ z;6%*%+P}po*1H@1)|asfctd$Sy0kEtL681qC7?fxkyN6Xo}CXyD!b`8y;cRyWtSbD zcckG>ix$n9B3pr)RUzC+VVcE*)sY8)H}CI9lFwN0ZEICewTju8{99@Tm1Z~o>-*B+ zhQ{-pQ&?C${@1;4K~O<#IJvr`U{#m-8fOC)K`0_snv1_VTUrELRL%FC$Te;8`cT_b zqU$v7)@jb#mUYL!;yj*3fYs4pUjMoZu;J?B|MC8I%TV87>-(#N+z8ev*|u}#_Sd1f zCELbSW^2GA`5oRH*(J|?@Y`MFl8^U!n&#so@1l4ABV3T8JT^0P`rYT_O$~eYdyjIS zt;$jNs^!gQQE;by=h;N``Ac|f{i(XRGsKqPrTEi;c@JctsCxFFrB0kB4wqvDH5<4~ z?R$hvj$iAG80&oZU-^tgbW$%}C5uVVxhSSR_y>>Oz95(uhw{BCh3Q{8ZB`U&1`;hxnHb3gpKF*-WH-IJm@Z|GCe`aLr`Se z$H5Q}3KNwm1=6C?rT8L^J5Otsu{rL}b4ivZpKZ4Jj<2JI>%VpL1>0#p5PkWw0nt0B z6*Wc|D^|0vT1!~SQ7uFsAs7?hKZvQX%Bx6`ITY!_Rl@;OBtBUGT=JTB?)B1mik?qy zde8nsj$m=zh;W;dwG#8$`(mGV_N&+aGsvr3%-uf6t4Od!DO#!|@x`wnW$&3yRt7Ny z77m_qal2pipi{j%!oGSEz1!{hIEGcb`emhuz2*2*KVYP<>&VH&j7%m+Q;PU>0T%}A z7ZzeTWPQW)debSw7R;D8L&kfO9P3S>REbPQl|Vnk-olc88Xb(owz}WFHh=qDn+t3c zbFbQ|AG57lhL)}+Jg3+fI!S{l<<6tYQF{+{)1!+c-2$CbB?e_$FY2d6HxngD><<%i z45<-R>7Z+#Z(kHCSn*J!(PzV*rrk~JBx)3g9%D@`1KpSfE&V%Z!bOV7Ms_@jha1J) z)$(N1|mSr=ZYRbn0XW2=d{md1!;6O5Ch(NFmGGdA#xS~L@pooHYzgj_Beqj*I zyoE|;0C7+?hd^|cz(W4ioD30B2!Kci+7o&_Yt-9*vy$uRY?J?F3u8`ivp7NPwAxj@ zMqTxF!`pv*>or?^b>{Tvmj6(}Cc)_B^ zR6p<;f&Kw0UF2K`RjMx_*)hrqH%7nR&ar;+9PL!R@<3sBI2^||?$cOrxAK~KgLF08 zf!}u4l^9xh`qKIHj~NT^{*ugjrs_!PjYr+fRUH5yG<_M_cb+XV2PM5yQMKe!*1eC` zeh%Ayn0Rl??Xo#P=}~l-cr)lRC}$IFa z`0DiM$#=fjeCvJ>Hw=W%fZyhnAwq>+618J(neP;-@+dJKD}Q7<}(0vn9RV zCndb=9Rv5=7we^cnf@>5-!Ab3KCzz>XY_SOF>0%hNzDl2zid;vmfs%DZk-2*Pj6pF zs3b0A`~nSwg9w5WmS6-4`yw3AO~7ZW`&$cUce+Zw+S}@Y#P`V6?bI43R6yTjLh5t+ z6#e%N1JC0HK*PE>?zjUE21L)_IQnDn9<6uuEkn1)yUxd~_qp?84ryi=6|DrRFMkLI zM13yYusS?+NsU&MW{0);9y?1l{+{<^DG8l!DYL~S=GAj9zkn&z_ypjKv{ zpTAKcQ51vz4Z_(2Hp?7+?uENx!sv@*l*l?`l8_7;A)$aFZk-WgUH<^_bpxqn&K_x? zY)%yUkgAyJ_%YuK#=ekHg&%nCJP5U8_2`<#Z8&t~afygdS}4=7wPMJ-Wv4p9gg z9toVI=DE99672Qa=-&~Q$-kGa;es~OI%_#Dj9z+Oos zbRpigBF2b^!Mzd)gG6DXG12kiVJGqF@9QhVn_|3_inLhaBiz_hz?ONmNg})=)=nT^ zUB{|sWp}<8neRmQ-hI}!RRqn4PamH-w*a^!>*6i$k@9E>As~-}dVFTWfqA z)sh)OQpJ~<)4p?tTqsfSV$qg@kLE|4#}lip$JqpeK8==bT1tPIrrS|tO3ldr7nUnQ z0P$A=L2!fFtF1v!NNYNsiHJg|PI-RxTdsJ)hzghO^=OTXMq;IH-lLd71O|ag)0HhS z>w_FH2T2e^L(m<17}u{x+}F=irabEMFS;jPSZRDR3$8Yp-6%}7L!@w)`CkAa-eJzQ z4VYWh)u-=ZrKk_XFL0*`%bJi;8tL$zF5{a7fN?NEi)N)O3ST$*UtWXjZFNGtHw+4d zPymLIBmFg~g7nHIL&Ix2WiEFgIm%pXUr|)c8$E!2KsnU24puQL6gUznxWSCc&raI} zhg2krZ>t(bTnT;PUv^^%aANNq{14U%DUPsI{BXssRU^uofoq;9 z6FIz%#CvI1*iBK&L={8|lmH<_G?gw!(g4u_p9lYq)zgkU$QdZhYqOg`PTUez)FZoO zH9KiHtG*m#!*!CJaYAA)d!(KhuaI>$hLKVkuO00%I7%W=d1@)xIF z6?`^+`=#SJ$}4r=?};LP@oQ!FWF2h4s}d}5Km|1v0JQ~1oNZcUGdVGiiX0drfN5aH zXdcjI9sv~rJ^|Rrl=>PphdLO*&F8orB&*N+*{mBRop115{TCJtNx^bZVCVS ztc*XT$ZWek7OZMs-so{hfO}FS;AwGxJhcd^U5SWplJa%l=E0YVQu>cl=yga-k14k1kJ=GFt#kuy2!!MAi;UX|$TTTBOO=|c2@lbG+uADiyetmP{DaU&B|5w)IDp8zbA#2vH1RP&C zA~P)UG-8ns@v(tFor|Mg9-6#Ox|)UL(r!`;_uD?6EG{=Gl0qG*qpCukXK@3eERPPg zL+BM0Hy3zZUue`=85dX`YA!_1@Dhq-Q^=tK^~iGLoIfq6B#h|e?wVx1Aiqyq!#Keh zmLr8=woE3hg)T3Yco zflqrSCRAml4z1aTa#WZDBA~P&5e>h;R9y%WVsU2ZRHn1+!jOSp^6YlxsbtGUvhrw) z+&k{yp@wuo6#c9k3xxphNJm<87ES@5fzC$rK&Us_X5&&AKxHMB8jWe3oYlsQ=@qy1Bh`8hFP01h|3Ir zF7$IB;^q5a(W-S@L9$>~eZ-U@H5)N|hx)-_C{~91q)9qf*m)ILNZ$@2W@EV+?PrJ) zu_jK+cE=%q6l@b47^45?dOg!aw+&rf{;9eJG1_~CJ9W&3_*~yYl$Sw`grvbElr&nH zBj1204%=LCL>8TGIj%$5qgafMh7FV3G+^oy;8Gal zN`n{UA~+;pDn653kR0|{VUjn)9Khm(h~zd?GQ-K}?XjH>iSM;bZh1W^_K{?eG(UKWtR}4GSEu#AW%@LCmR!b!)W)iT`aa z?P^ayHy8o`LlL)Yw2Vm+%NhWI8XAD8A<|TDC6+&q6p9GLNl6Vr4&XJVTGYz?5)V_3 zbW@rjJ8Ob}G-YR*$h{|)E%L|abVPlo0|sYHBbI;D8|}h-SDoX+_sHl|G#mu{?$Lhu z@?($s?yhDeA>0@A=zRe>ggm|z-w%lmjd0tB7jt0XS%;5M!cH5;^lLG0(~uO12q@AF zpt((M^vVN56G6-sN-!j$B7n6e86ZdQd^7z_rRg=5)q>FD^}t;1{jT`-!JoGKF={7h zJ_+yPw)y_lhcx>8*4z*0xgPfDym-R1SC*RP+CV-eB!V)F$|Ny*Ai$ki2tOBkAQs93 z&t#zl3P9#BYZQ<#fgv&k3Tik}_`L!>Vv$r+6zc0Cew1`iGiKY?t1(ZG`CxU>Wf%gw7oA+|XsbY{yib{q-RV@U>CuS0)KZPno9Wv#_iM>qN7 zDM@>LbzLqh^rU0rflwhLLSkOceUBS$;#?jUfT98_n>I9rz&Abt$>95;x5HKq5yD!< zv(^LtIRHgc1rukT8PYpWKemyl# zi>1A_%$1AluQ*nrEikn^YfzvtqG(Qa1-;~fwim(hey2_o{r;MnAVo)Y=aEOSemO!Q zrD88B$yJVQOSBSRht5Dd7H0~ZToic;xKIDFRwQ7Hom1HzJg?N;?jFM}#0L|rJuDDn8Y+y92 zS+rbiBW-2)Y?P*^RD~qO&55piyIMx_ty9+AW^(uH=MQ1{eU*?JxU*&E)M9oqU69WB;`mD@_5E4~D~jB7n~esRt3;rvZH z-L)4Q&L1>rLO>G3)CJqL?u6VJT`9VF%X**81H17u{0lGT6w;sj?s8H8{uB;eC*NhS zw=}KS->&&WwF^l=*T52wh7P>!#5geN?(%&bGW3St&^9MhOpo-F`u;OF{Jj(H%i0Z~ zW3?UfP29yfD<=#mv!vzDD3ZJ&R7yz($nvulZ*lCf(TP9{fce;ASx}Z{a=v+$#v1tSGBTCpI|1T4<_G%{X=7^j&iHt z^aL1EuI2?bw|oya389EdghAtJoW0L*E!Dy>j3dR^rQd!q^KEi^f|sz|B@XpQEgKKV zbf#BG*tiap!1i74=(0n%Mum@Gm{c-N)(3}Kp_fY;6c?x9UDz&eg1IH?u-s5fX;fs? zuEBy{+ak46jop$?qhXWY6f4NUnYp|IR5K;|JU};+&fA2 ztU8~Y17ww69VMB4^xvaD_in!J-9TGdFxbKQE8|sq3P>8@|X7?r7Oqpe_i*Bi-VZu z5!hNvQY6BOM3Of!I0>wV^>a~bTd)o*fIyD9p ziBxpFuSS}z7Ve3hzji!1F_UF~y0D8#i{ieTL-_@>AYs6T66V7Dp$J0?EW`Xkp+t5< z5Q$c_WPx*{Uj284-zX0U^v8yt^e^jbarBh+@RXDtKN&63MKy*qoFLUl`bL0Wzy^5@ z9c8qfy<`MBh>8`K$q+7~iqR3wHu?^B^b{^=1PovYhz=_zFER%k0HHC^Wt(+FZa=zw zIY^ahv5H8Usg%OM#0g`HTjY{b?ja&r2wpDym|cOJT3GO7rhPQx5m&4h)K}GFfIjU0 zl3zh)2Za8L@O$O54V5&k!)u%1g#qq3k0v!bKl*{8@D240(JXj)XUcdiVI>`fWdSwWc5WSthDFJm)7^02D>l%ojXTy z+tXq;<$}NGW*=f=?7TnkpI-x;g=5wpz9il9pVOX+V@7ZAD(kj2?H|D%#^b9bxu9_Y{iVOn>2}r)XbCd$cporD9@y#GUy6CBUQD?%`Wb0N_6t`Tv#x-TzOT z#s8@R8hkEc@ZTEXPO=;K|L`ow=LcfL!4FGM4*zSL7^C03h~a31hT4PW9~oww z%sf>wJt(_)@Y&Q#yJYpY*@-ct_9kI?3Rr?!HNb$e9~}&-`K1Z(RCI>{4S(n_s49bz zj9a!7Ai4Guzcyt2{MOv@tnF3(u*zNKjBu|)qg)w#sS*%CTS3sL{qT$JeBBFGji297 z$M14O?EIJXey$!9v{Yd8aaf+xUdhh+eAyaGL}90Cm2{i;aSt78R-TKOiu@jQ?$W8miA1upL$XAY_ z8F3nPUE*k%%J<11d+}EaA%$Ru;A}tuxEmjNc&H_9zMA?ZM+7Z04II&wyOgE>WVsX{ zKl4$1_|AlLn^?^}@mzSVyoFqjvl;GlJb@2~sIj^$P`A&}ahPp7L6b4GJ8MY)}bV`f9MkQ)pB2ZcW1uX zMX+u}lbebPQw-3_Jm4D1LDYv~NaGlIO= zMFpd=@OUH%v%hHbI^JIZD5r|U^e5X_8KX$vcmA4NRLzeLna^52bhNCFt`a|6$>}}r zv+!LuL!#;NASAa4i}X8@CGLLerN&gk0cGJNctnwl78Odg*qv0Q*qO~A6^(-FP!V%6 zVTO$M;F9iQCLqd-D8Z7$bV8@F^vPyej7%q<%A0osfrB_SJ3 zg|ns&Mn?GoD;vM(@nDwm(V5B8wxJ6pwp>Ba=(jDjU5xpdDs2IOLa`+JWB8juCC4485ygtW-o#no2O)@*T z9yO+;EHuO)Y>1~Zn1>{^aWAOB0xQ{_5#JbVr68DbkpRq6vAIaFFk}D`Kaf92Q?!1f z_A$ZbI7a(8z)hB39$#O)$dK^xUXb&CFXIF@*vtrPLD zpLAPck0#b6X8~yZ!CAWRT-Iwy&%KiRR6?%D0Rm1B_?;Hpt^%H?4SoD|v-OZai2{Jt z>5^U0Ktn|={GZ&(2T^5upBimZu(e%G9XY8%I(#Ip!bt!k1m#K@ z6l#)cDUZQ;V4HR*pqKHYr0%wTqn_o^6&J)yk5K}p$xOfud8q5Q)zt;NX0{P%VFa6K zPUPy)HAwG(FE@0LUTunGrH~SC(`ANs$aSO4_1hJltd%8l2mlaB4k0fz#U?RPyGVt{ zHJD=#bSVE8UW&@cT+LiP(XD}O%vjFCJlL%TTgDjaAf+(O0!Nb!S8KS5f4=&t?#u#cH~s-N!|Ne(JDk=- zc0bUq(M!FXNmesqyKTGCj|YHC!a~89+`v7wY(xlzWn^?*e0+ysujXI7NdBYowiI_- zxVRJi-MAVj`cc!&E4{dgR*aFZ)eAlOD1#chWlN-$*FXAB!aq3NifV~BMWrwf@DK-? zn+suip*!t&-ahnrvJ1J4>%8G0!Zr3b@;R4jWa0CMTn!f<{JL%WmB|!A5Y&daW!-QF zWZ8ys+0pb8O1-4?xMJbB*n zbg|*=(hmpc76ne~8*Gr#QItR%JryW`tkz|JmjZgH_nusHkrzK48NDfPu(5;j9U%qi zR5{y12w^IUj6aAsiB6bg1Xqj*3s)mw%D_NT5+2@?S*ou29Dlmw%$~7_3v(NHg0=jv zY64;bXdTxfS^X#On75n*@SS@IKoJ`s3G%YMSVJxvtl?C~iz`Ioby>PSlK&7yNKRZO z0)S@NHPRIO-|9Pfxz#b*d~mais{C2F47uPN$=GpsO&NFkyYrz{<5^2>F>A_1>Vj<% zR!t6JT7Q|F<9FfXCk%f}7klbQDZ1Kj1$jUfW0>B7PuWivu` zgr|(~--eH62;aQZcjMtZ#IZ&I5r2TEZ2s^-ngfzae zSFH38QUJI3dNi4xTX$oRB(pAY7`bb|`7_*?5kS5|$y&Fd=G$uM^BD1onfqqymr4S} zLqgemUn`p!vRoUx?Hi(-)|0lZBUm?S2N<0+C)ph^t&TAgg(x#x#yES=sGS`no%?c$`S&2 zA!Ucv+|RhJdyR17U!VOunDJoqGF1&Y`ph9Kkn67`OB+7AIHgpOGg1!>on3Je^C}+T zW-pN>(p9IjHgW1aQN=ND4U`ShQ1ou0Wk(n#QWw1R01RY-Sd{vkY(GfYxIsKv7HEsF zb(&%yD`ZElMbQ!!Mm1)ONuc0_0>6XI$|n+P6tjF@*SltM2c2LRl)L6grWL%LWT+wh zF>FA#3oyf@e+E=8&p4yngcg1hbI%kmm~|D&Czov`ZAj-*4jCUGAO(ix+9yNgMpO_=8JPx2~epd$a<2W|UuT z-jbB&)3zhQ3%qp(h{o*o3xY@mf`oWBv6j1=c7a)A&n|t4&J<|c&9dEYf=?X$t>-eT zyDV#uP2a0sA8ny>;f!U2Loq*bk8C|15m(rOPf!zD<({J30!0^>Rhpse)z?tC#3A{F z*8_y#Y9ofIIrT~QTVp8wzuz@Gho&V_J@%_rX+k9fqa5HY(4f~r@ni*1p~BgX+BT%v zCb-1*(RB|xrUOVrD5r$!chW0A4=0{LI72Sg=e>`Q+>R}9GIUv&8DKV`g^_N9GFriv zbFo$~WWsc2QEOpRpyI^*cLbrgg<4+$)anJlK)-2mgUhWyIm?5sVv(GfLs*{yAoAb~ zi=*0a^j=!D^UDT#)+93YAWxW9(U0<^=CCepz&tYO;;u`zCQez z+aL%%9h+zJ&jI9w~6d?Js%n6}OO>WhftB8AufN@kAi zOL+57r+hmm07UA7ZTt`Jeq7&w|G?PYeA#iynIcgu`@QqjvhG6OPH?n&rl zI2TJMw0QWR;*D$VLkX5^=-&X;Tv>A@@{i*ZXp#iNI^RhL>_7Nb8_yXoorTp=A1{~t zCm|Bha@B)%+0yIHGvacxNNFL&o!-YAs7z07-iMM+0b>eVR{Bq*;f7pas1>sXL70Q< zAc+}9T>SOWN*xlE^lP@Y=J)Z(7#{Vvk6+zzzE}W7VKk0+GM^x)v`uZyvltr7_m>%Y z5&5$S%8Kh}C2+;cGJXhq7@Nj&t&iCXF~8@Xs6C$LaJ(5`;v5PuB1`K(EWDP5>c<1m z%6(+yuLickD7hII#7kZuB%{ppRdtHePhlV^R z%nxOok?qWdlC?MDGx#J9Sda_wxQF^>tD_N>|D_0Zc;W{_{Z<@onn;sQr60-t;%QHc zW6wI7KY88y>=G{Zmwv%`fA@riMJph-``6;d$0XfW%_BP)I(YpIj*LtsIe=o1^c-{6 z9`IlUGnsGfUbOWkat<~>7PLNqyTDTLTTk^^yO1?v*c#H9-Jm{TTJV6NPcDM1Fhkg? z|HM^rsP|a*vC{A?DyxY|hOMvwE@{9!>Fq3-TqihH>+o-N7pkG2=4H+VJWQbJi9va0 zTc3!!fpfEHzUg?<98?afpLlo6;_1sP zX7!lYXYlqT?J=O#DMFVa7dDZ!kAdVi+bRU+b&P!>QeM?f-wzHh|AI`{DE+n67^1)h zi?t4z0QHM^=|OwDgH%jhkB+I%_FR~0fumP~plVku%;n@p&D_Fw-*oISlFVfy%jt#? zPk<2s2a>K%NSuW2*?Z5GwG7m=?iLBLq45Z>vku>UU_LU2FkRygEoiFfp=uHs7HX>y zMyxU^Z1VmXa;u(RsnF9S1K{FCLTp(CZm`8Wu(`S;9x&pHD6nHS_hCx= zYT$-*hls|x$6nU#Pv==@^2bs>@Yz=*Z-tMMQ1n(<*g!)l$nmFjVdW^~$?q)_>Ek@N zb@z*l7|#o!Jsb?pKP~vf%)yc9du?z}qF`nkP#}<= zvWifIbXz1AWG*K6KGsrUo61IIf74|^vx3lYTCJJhgp|NK(@$2ywv+m;17V5MUyeLk zD>R>)S&29bmMiAe!-F%PW5wCV3`!QVB#E(wDRqyqnspz9D&E z1*xGXR$J~<;}fR}$KxKcLrY5}s=!f*CMNyGf^viwjisP10PR!yE0T}FgIEG%;eha1 zwN;_3m{Z{L1!iXi4Xd8$92JRF4N8|ux`ME8T5h3Mk@>dS7!v5(M{^>=B5;1R2o{6& zDMu;n787eJ3BC@S<0qT1$4$;%1j06VDv(9D_lsr_D{E-uSN2vyRYs{{>COC zP=>;T-~dJ07-I9KpUx_d4BWDBBC#*laXyZVE$VL-R!!b+FH75 z#y6>;v_(xMI~a+keq0i1X7==Ky@o0H*g`Pd^4vTk{m!q&0ZtbtP{>&9tNDda_LbzU#L49+H zI7Kw_4Dak?^+6Bv-8zI0PjjdIP1D8wX9a~WTW|w*42rXYfM!L)$SY!{;7@r%WIsrn zoXp$N73vU>$2hZ;t@izgj*||)3nE1L!5D9~?^)?@fr4vl+4GFD1IF}Vo5rTUk_kQ0 zN)<_xaruaxyG@sT2?F00=X}dRM{vdBU0GV!fX}7iH%?+VYtpFZ4REtZ#*a@G7sK-m{umje2+*;%gKH8x94lmDjHOg^1vOCM?fSKOUHUvO=|N^* zRX-3=8BHpZ6bX`+3M~-95Hq`trgyAGAXbJv5h5@aB>GGUNcqu?#0&u%r>xLLI4q~x zSTcT)I5K>sJkKe$QR`ZGRy3*4LYpZOi+=>gQ=?d%#!~A++?Dz@;O~ctb2GdEx^dUB zuZw>N;D>cWU~ZK?NQ~Jr10+_>)WM`dkkq=Nh<5Hy0X2kXFg!!q_c^hqj#hmWP_c@- zZRdP>TnL^r($-eNHtNYT)Q*&p{)vGa8gOyzfWlO7?C4$^oO<>xl4T_{XV{q!Y6HtVlgCn0;q&QrW6>J>gD$T4V zo>7E1rHRU%I$!N%W519iEytdjFjWugvAHbM#g_Z`a>79<%zlmss4o-nq>`<;SEIpL zMyXFzSe&Bg>VstyBS7Y!mie=wo$O+>V5U1bZjCjvvP;{9D>rm%mqZoMK6unmiuqtX zk+0s9A9yu`jcW{0tLUvDGg+j24K^UiNmvcMAWZf9Q|EMFADAS0FSj_@%!xRJ!t#Wk z5Ly0671PNZ7hgY&NhVvEvM;-K&4r(G&&*|$l zDBr58el>7I&yOq8Uzg1}C+c1B>4r}Uw$}zVs)7pBe3L@xhOv{W zU$Ong%UB>o{w3h--o5x>!=66cu66Ao@HZatMkcC}DWc_OC)2IC3J;nVZ^O*lsEDT- z49}X1i^nxt!!8H+HWQI%vxPW!dp!^aC1Z-1e&$jMlveL#(_gq(YV06$*dqh+%3XRV zw2WXlh!=*0M|@}kGM6vnE@dg>#7j>?6X-FqYV236Y8D&qhH_J2#bo9&`9;_-s^Bq6 zy_+HOnyg^->JCF{F79czt~h*qAQ>vV3#W$wMti-slg_pO0H~UUHi%+@)R*_?S-88D zai{l-4*g%!{+8)NRs^oWP~s2cPv(IC1gBPrO<#vE$Z=k@TL@I*brq1al$hBfky`XG z1*>z#^YVeA0>avHGy}q}v2PC!4Pv0}dNNlDKW&0;ww9~f?WdsE*&!owQ&c2Z?De3M zCA%Sv>Ic^$!{zRAcY}e8YRA6yslxDcBYm29D^FKTNj;Q3zi+IKufwUc-!6GZmW ze98Exb|le6qVoa9s88eva_b}=1DeP+0V!ls{AF8N#NzOjs%-4icm|6$8{iC_PT~OL znBDCk>bn~>J||^1W@&~2#d1EUUbq%t?(73&p*SP(dlK?KqFiOxuXu@zL6K zDw9C56x~1HiYt(EQEsPt7AeG7)`|^M@ctMZL)T=xN-am%5ricX^{ikP%-{wupttsF zjw|jGn)&-0?tV}FF};A#Oc+RtImch_ozBdBU7VV^wAl(CXXs z#6vF`4O3tK!0ZG97JGp0y%Mzb53m93MQJ!PK}@TFwYG;jKK+c4IXDkz^JIg z?)+!}t2Aq&yx!LOY?~7>c9)uKdKqdN9|=PVBu+0(;A0-saE9z)BJHMi!Q?)KCe*EI zrW3&vLA`5*(~ex^b==T=a-JqYLn&Se_mVDZ2eKbSqN6OG$FFQZMsGiyG&Ta^ys+`!!Npdm#XuX5iEy%60*50$u~(4o15kN1rH2S&R-SbD+i3@a8!Q zEtbc;IY;SN?B}mFfWSL)aqlJEf4aoUtb%QdUs@>d&B7aAwcf~wX_nywdG4`=@Or$$ zc2z*q3E}zr6y-j|k~JXcNdzFboA<{bSY811a&;9XRu68+aOwJuRtexJ*tElXo_YWT z@8Zhrr}QMn;QYd7a7%we;?&b`O#mYrTjh2*PDqf#Xsx?7^;5X${Thq62*CiEV4EXJ z_Lc^-AKiSY9dKsvJ3lhRpoB$jf8M>0^-dCJvjBonaL2}nAx$m+k>%kW4~ysh70D(i z?A&at-mLrxOZ)Q*wYc^Q>9yUyXmD}18H}-U3x8UBbDmQDCL>K2h|00dgWJ4+FSN6$ zo)xu!V%LMhcN+MW_=Dmonv8h#?mfe_!QAvPAW!WieW}!*M8U6fVybv z{}5@crdunZ(~@K% zUov8kTOXarI<}gakS*v-uJ`+6IcyV8u7M{10M` zZcgbF4C@rFqwtl;7{2Xd$R!?EdBB&FjEzm+--3C-L}lWBAV^drTspfL0Kl&LPd^a2 zJjB$zx7rjVaFxy|CaQq)lew(ZaAjT3;n>YaQ&7avtKL*sbQbE?b)`et_|WF?c6PX1 zO5g)t{iEX-RCnLMNz#etJ?r$iWTD@Gg8F19n^jxgw^Q&qbTXY5X`XgfH%gB1N{dvR zE*6!Vs7+Q138AE2 zg`>;Zpm7{UOb8V^)>5k&BoiR*lm=>&|Jzw+vaMm}YRqQ|xSSP3HnU^>*k!&Qd!VjE z$q@UMi;}^Mo8~bhI${=BG})fq&viN7v)S~u!Lr-<>yI+%{Nr||p$xLnRf5|nl|zn& z9k2D98$inMEv2O&RHDCGHDW4SVvq_sXky-E@GP0jh+6!$Z8y)QKlrD+Nzr}23^M2E zs~w*IdSwFiJ0Le`4TR9yPTH)E{XCPSA%o^iiZbUP^=`@>PKcCH8M;=p>UUzu_I7|` z#T+|J_wn2xstB56^s?iVU|BJ?rEy*_jr=H@pQ6sksbvpZ(GEMC<85$M!(RBUtf(wU zPQKqJlZ_%?H*aoopN<(EciBxn#$9mvi#|IJ(jd&djET@m5*5?t4N-x}$;{1&VWSwW zvHO+X=QS%UI1i^>z$5sfIZTze7hSurM%5o5n%=lW9@wD@MA2rz5i`D_N&?V%_Ue4 z;2@(Kk7&h~G)&wXA z(T7(U@`j|}U`*||tAtSL@($}n2Zg3*HlLNVQhU;Sw4u+V_f|wXkmaZAH<#3GqXrAr zkniL4;m%%xffwnJq1wg)<~@o)krG0nrZ6UE2N!oY9t8TAW^YTglS0*5PzM(s8s&hw z$MdpXCfI8(;DO}B1&ZE7kf$3}-o|Mv4z9NwEpA|87K%tNR6e7=0Lk*JZ%WpN7yvCY zjBQ2pS->lEmS%;|`6F-H#xk*{HR@?{ zqNHcgJqQqIB)lN8s6wUKf+Uzw)Ri?0sUaLr=z;wn=bC<%ioiA?3=^2zCeN{;?ojr< zU$ZN^F1Qn}t_1qn+L+TU3@Nudv>^a>dvP%{jIl4R_JHq+Y2gOU^~!IgA$BPaRUlff z*G9P6c7&8zmIvowEgjo%offdbQa|lOK!E*4jPYt&Jfb6KatvfkWhY7cfe>3x)LLSQp46xRfg}? z|5V827X3XLb(#WB949c#qw^BuHAQ~*E|gtea?Q@Q=1fCeU1t2?@ARSzbSJhx>TnYZ zERmcP&ctpeS-*r=9hzIl*xeB#Ghsm@udw-&eq&`GMb@b0*nzJ2t9L|eK(~4mVwR-y zsT7MAlX^8b1-M(fEFXfWFw@9Ra2isF&6pX1Y$b7ZQzjH6wCRHv{W6KVdl&M7bN%GC z@XaY!&NigxN8h#u_CZSKGUhtSXn$N~GY}d1er)b#+~?%}l@w z_m^OF-D>Q_QD~$eVp>%?OgD*DvPX89eVTO$&|O73sfEQy=+54vD|6)~G#FV(31^?8 z+V_yqh|XV08MJ3GFcjgLA+S0(X>AS<-dq&N{@T~w8BkLlcgps?OW0f*cyut8yz11u zZ@H{945btTwfw58za7vUYFi9;N3Oo|vS>np^yb%A-=zQSpx@1^U~B&=`4+mm~Z6n zNS@Xr2@7>+PwR0}NEvG93f{Bs;%)d5qr4ly{&!9=v5- zyusq-rK`(Img7a-_N~SND|!>S;beg>R5sHGL;G17uZzQrzPlzJ|E8|1JZ|$Yir2sz zX>H=fp58%GWGcvf3x5K;HzKC4gOE=~KN8!aZ0~slwtt{il$z+{09ah1YDUCHoEB18 zi!tr?=p!YD(P|s}f(k2H9q#!3K1sgixSVJMg`)wd`NSwWCi53AUr`Qu{Hje?)OxiK zWVFu(fBSQ{`Ul0v(?9(zp?w6|45(rggk0tr%K}S8-8fm}4o`6{;J70g6jc(!C&`+0Hrl^TW8KgzM?RKFS#Hei#QMx7W2W_ z$&l^KSYLAo$!7ZzRQmhfZq03+C#bl3c*!GB79=W)5Tt$H4pZcFAH{u0Q`8%LQZt%) z!)#-U`YC%tJnesnAcKQSygos1Hb!U?$10$~#K8yThUO{?R9o|u z2XAmgdB0apz`(2NwzP=Sn!zpz_`xpd1+?M~uF7j)zG49YZn`jY2$0EZ0?;M$J)G6L z0?^8Sidnp`Vmje9WE}H;UGFehf5l6LL#=!oX7HGt8kC3DGP!yz%fUC$VOD8`wrM#R z8i-RG^<+>_`z0o1fdgyMv(>Z2Y}Kx(*k0SGT)W@RI&jKh{)m}GoRzb=Kh|{ZiWQLr zRHeVzMpDHXE_NIkEwm3S<^-rB6}%o3(siH*<`>3li@`>5FYLW!utcZ)+8Xwg3%kG+ zZ+9IxHMb#xH=cOvyZ0J1Y}-v1bCoEmM@^Utw%O`N2_la8rsCFDUb4Csegpgd#CmXR zM-ru-v<5!IISurxgd<~rzc_i=ZB{W$Xdv|itx!fX8>*)uoe*L(E@2}!rhK)IcM~yp zb*T05<(X)gjnl()Wek-m9lSo~nZVQi2-MTZ&ETC^K`>~mP_+|<-FY8BzqW#1Xj zq?$#Ex#Kzt-#H6GK0EM7H*the=+y{7t+EY5T3NKC++lh#X{T>Lj0 zjgU7J;j2~hS81mu01!yZNJxYx|MCrD+Q?<~l*FJ*8P?`x#kz4ug)~HJ0$@EsB`t>( zQ{fuDp0@P&glNFA@A-%85D`jmg2I;}BK4SdT2@abE?eE^b$0HCYpW~{&naJa7S})?Mv7?@sy3qo8(r*u`pMjk}@ZiV~63_6R?@J z5jbQ0*FR#6Cn5(l?z^`>>+80y_ag20QEe~YFLL1?>9q^SwN++m<73bCHn*?r(ysFH zs<^uv_I|krtfKSCJ8-9%$ynVG>EloX2;-KiQ~+or=v+X&CIrNitlP%G47j4ypIS@`0160&jHSo33NZ&L-!8FLo6;A=##5JGo#?6*@ zENji>fqD0jUtf1#we{FjI*zn)HXYyk-0x1@#N+2cFlddDg2T!()4Itlt_L(9_D^M- z)uUhz(?6WUgf_;wHX@ucit?iQ)FNR_aO{Ct?%UcabRrP)ezdn)$>L;zp`@{N!tOZ{ z!QE(n+}A>rejjNQ41P<#E5V5(5?NLvGXi)pJqs06r&}{S(Y)}M|=OcHl8D} z(jzCKf1HVS-e%oKqWHFjvS zwyJ1!QpBFHz)d5a|52&Nu$SKXo&KH9?*ntlbe4(lEbN6ugWUHuO7irFTY8}O1#Y*v z1!0oU;lPI~)sr&a)&xP)?vIZe0YG+J8LFAjwU%DTJACACuF-YZWk#)r=MvO%JY3pH zWGdPi8r8DF&|Ov|f$087kv3MsA+UX{vhXk-+Na|K0Zx>MJ31~OgH~OFnY9*=V4G;6WUv6r-^PHFr zceIaR!viXIT_-Cvf2x{&WpwIvs_0w|^9sHs;k`$Vn!ygP1q5}|7>><7?1 z30ZF%kr|9@?1z{Hs=X*oC$dn;UhkoCylwt{r_M@xWi#o#`*(5~u$dnD@%FdMp~p@E z8W5;-VkX-oPNJK+^oesRGM=OtMe{87h&gF+u>q>}?14Rv{$_8nICf zr;r;wm_~tavpR>N#HJ(lui^*&4l^9-PXmUAt7~nAG$pbGKc{?`P zz!da4FNz3T z7Hccx)guhZcnYW)2M)bBQULBZ0D>}F*Ih9YWu239DK{LZ7&nS2JQiHBu*XVj?YaV? zAC>3V1R$l-BJ~wtZ|v@8;2XIGGy|Ug@pzq{1_8nIVVcshP)3Uy9HQA1F^;6bP_o~9 z+(a%bKNhgBJH(JklOA4Ryte|`SlrHaS}$q{a81r59LY++8uX9Mv!|V5W7h;ck{UyQ zh<4^N=CAQLmlc}hwDlYiiWiZ5p8MFQu^4mJv&55e<$MSi>4Ffcxa%urR|27Uj)<&C2x*h;Do$LWIQ8DNy70Baci zI~)|u6KF`Rq$2Dv5rcagPv`1DN`q6_Ndl-M0Sv z!)OL`O|afyurJcdv(4;px-ygeD2jZ5l!!7E83h?upesgB{P4ir+qD;ZI27F?Uyx$O zABQE8kN^$)7EbbdRjWKN`*^4O+KtM8YzRTN8cE#f{sqdQd;`V^tp<9Tf7P+%Pb*V{ zQ9>J5RT)r@E9cJkUw3M~etLit1Gle+s!6hfpJw%TIc@N=;m)Lhby%*r9bOEFi z8@4A341EX&s;lau8VkXc`ersWm)V%RDT*sA=7*wjX?QZAs{EkP- zX@+C8^d)6gJKarDkn2}Ye9K?Wjv6hJwu>u{@5FjbzF;|edG6A->aExXxOo|5QJL54 zbgZf|kn_tO7X<0r<-=?_k(5llJ5+zMmAX-w7*Y)1B3jizlK7dYZS5Y*cw;1iMzi85d@};aATynqbRhj^( zFs&OC7Cwo^g3t1F6@;~Zr$CLZLI!js{uYF<4~rj=&K|fV`8xj`{&(9P=I2CcH~>(s zNOZRqv*Ye__OuS%f2NIpPKCENbaMLX#ANc*JxEQ?n&2CzPn+H+uW*MOUha&u-qobE zQPc&RPO2Hm;Sd1)EC3Hfh?EU`q*>QyHndxDp@l&8`^nh*L&7#YitPY)rVw|4A(@K# zrOH8yaf+yDJKZ^=q*vn;&5deLHMiq084nv{r(;fi%Tigs@4z>fu8z0f8s+iUhvAHY z?-zpY-d@+eeO(eh?lj-#$aA9#QXG`KVii1yIcQYER@pE|^heA`lG*kJlG#~Q zVLl6C6TRC!*)Z)WI?%Sj3kZFPE5^O# zvJ$umKI!HK0n)m$?Fk;yAMqbinvF+RzOBQ_f$~yotnq;}!ohnU65ub)r3(TOvh=Ax zJZ}{rRJ$h<_IonOSGg1D6nEw`IQ?I zN>fzZYam*ZE~sSre-87nfPG}Ae_?UsrF|6)ZiDj}KVC#@5(2;Ro)$4`>aTSozThJr ze$jMWd;fZ>yJY&MC9~NZ_nU9_bwtM-#uRqg?Q)tdh(&J$~0ir)_g_^4hO6;wN_S8GVLef7!HY(^4(%%^X=K@#-z#_O_46k}J=GDL zna}t8{p0ukcw6RbYl8ebS$X15h-xC^hE;p*M59B(pX9-#K3%d{&`W@ohP=W;;xAUU zC7&@vNMB&YKy!nQdi|Zgy$@)7Zi6+M60D}{!*klfHZis;)PR~%gK9={kU64e5}Z5* zD+>o^o&UY+Cf8$uT09gd{W3<5;+`f6SAf&zD^binZ27eU`%_md-(%w!(h8MyOZMQ$ zj;dz8L0{L+>UQ~{P$PhLw=#WLE<4Z^w!U6fP75mV+U1j#5w%E5WJFYA5&BQvZVu7{ z>Js@ka*iyUVnkN=e^5*YFO{6EYOVXe;B44~7v>PlCZL6b*MdT<=2@mTeXlv$vKJGK zK~K~dh9SKaSoAk^p#8XPok7m(!=-ZdkvRPm&dJx!X0ji290`n7cW~U0S{Jsn6c*v_ z30x!S|8xrhpj!}G$_KppK%i`3s66n28pzrVX`*?-2BB?jVsimu=7`j?M$q23lfsN; z{uru@o%vd^TMAi=8RjPg$%r6)qk9#Xxf2YJx$vd&SmWP`=@bmTg0w&vLJ)aIn3Cu9%Xt zp5J{+`-?E=3>g8&e=Zrz3I9sZEuy94-fDJLM1EGdqy8_SOh0&lh>kyBaGGI|6TZ#eV# zBTc8VEKTu=qQQ#kB*^I}K|LqfY7OL-#!mi^yYnr~pSfNB$vmCMe=Ba#&F(J&>(yHJ}bBCV`EMGKk(C@%Y`&+}eHS!^a zX>WcwMy2!5s4FAQ`Y;nr=wTL-df02MuZo=z=1Z88XkC~RnF3BxA_BSlhfp@VP~uuiiqKCFb)Irnkug%{k^Nu;?>7*><5C?xW3V_;CKy zF|Lrja%~k-yD$5YDuuMz+pO6lYCc0PLJuoBP-g8$t%ytH|GCP)CWFp;l9*RJ~ zNdh){D58-E^e74}K#DljB4mAAdw05A7JvC5zXucng{C%2+|?3rsZl_nviJEu-qn$F zobb{rrzwgb9IPU7Pv-^J#lk++BQ7WHrigijbJ~852VaxYk?ABAH75QF!Oh#v1Bdf= zcau57#JdyR+;O|clE^+I!8mCK5fwL?yc=f0P& z#*YprMR&Z)Gzv{h>3e3qk-t({J>F6s_!PC~`)k9{5xeP%OT^}pE@^1qgc*eWNvphf z$|3%zOTFt2{n-`rbH?iqMq|vrhz{dxC(9bTDeJ`_t~Dc+`9=chn!*XEBO$eTPsqnp z58tVpph7)U}HD6N+)pAE9iEWBn9ubNH4q0O3F4G_5`cWw`uaZfP`en0uX{%C|GoA_^K8aV_f~zwlo-|Nw*Bz~C558(#JaVFl#8g* z%cQ&X1`xM8d27(ll8;{oX#dyl?&klL_xqn0wB0= z;hQDlnGGa!ZwcOfP&e?u0p5b_N5KLcTMWe)yFIs3DbAIx3IcKOgIjX5cWs+?u#=I* zcHH)H37f<=&26}yh9_*tZ8f*ugn>ZbfIr1QTHS8PZ8gf^#SyKx;dZpm;PU<_|4t`} zO=52Nmh%K};!c~1&BRW?dv~X;#dZR3&*Uy~n-LnzL-_B4w>_J;rn!A}GtS@Io(*(a UJ4+!j_YpDRS^_F{jK=--Z|F-KRsaA1 literal 0 HcmV?d00001 diff --git a/android/DIR_METADATA b/android/DIR_METADATA new file mode 100644 index 000000000000..cdc2d6fb6eb6 --- /dev/null +++ b/android/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/android/COMMON_METADATA" diff --git a/android/OWNERS b/android/OWNERS new file mode 100644 index 000000000000..94fa76830228 --- /dev/null +++ b/android/OWNERS @@ -0,0 +1,6 @@ +agrieve@chromium.org +bjoyce@chromium.org +mheikal@chromium.org +pasko@chromium.org +smaier@chromium.org +wnwen@chromium.org diff --git a/android/PRESUBMIT.py b/android/PRESUBMIT.py new file mode 100644 index 000000000000..8348558c1fbd --- /dev/null +++ b/android/PRESUBMIT.py @@ -0,0 +1,137 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Presubmit script for android buildbot. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for +details on the presubmit API built into depot_tools. +""" + +USE_PYTHON3 = True + + +def CommonChecks(input_api, output_api): + # These tools don't run on Windows so these tests don't work and give many + # verbose and cryptic failure messages. Linting the code is also skipped on + # Windows because it will fail due to os differences. + if input_api.sys.platform == 'win32': + return [] + + build_android_dir = input_api.PresubmitLocalPath() + + def J(*dirs): + """Returns a path relative to presubmit directory.""" + return input_api.os_path.join(build_android_dir, *dirs) + + build_pys = [ + r'gn/.*\.py$', + r'gyp/.*\.py$', + ] + tests = [] + # yapf likes formatting the extra_paths_list to be less readable. + # yapf: disable + tests.extend( + input_api.canned_checks.GetPylint( + input_api, + output_api, + pylintrc='pylintrc', + files_to_skip=[ + r'.*_pb2\.py' + ] + build_pys, + extra_paths_list=[ + J(), + J('gyp'), + J('buildbot'), + J('..', 'util', 'lib', 'common'), + J('..', '..', 'third_party', 'catapult', 'common', + 'py_trace_event'), + J('..', '..', 'third_party', 'catapult', 'common', 'py_utils'), + J('..', '..', 'third_party', 'catapult', 'devil'), + J('..', '..', 'third_party', 'catapult', 'tracing'), + J('..', '..', 'third_party', 'depot_tools'), + J('..', '..', 'third_party', 'colorama', 'src'), + J('..', '..', 'build'), + ], + version='2.7')) + tests.extend( + input_api.canned_checks.GetPylint( + input_api, + output_api, + files_to_check=build_pys, + files_to_skip=[ + r'.*_pb2\.py', + r'.*_pb2\.py', + r'.*create_unwind_table\.py', + r'.*create_unwind_table_tests\.py', + ], + extra_paths_list=[J('gyp'), J('gn')], + version='2.7')) + + tests.extend( + input_api.canned_checks.GetPylint( + input_api, + output_api, + files_to_check=[ + r'.*create_unwind_table\.py', + r'.*create_unwind_table_tests\.py', + ], + extra_paths_list=[J('gyp'), J('gn')], + version='2.7')) + # yapf: enable + + # Disabled due to http://crbug.com/410936 + #output.extend(input_api.canned_checks.RunUnitTestsInDirectory( + #input_api, output_api, J('buildbot', 'tests', skip_shebang_check=True))) + + pylib_test_env = dict(input_api.environ) + pylib_test_env.update({ + 'PYTHONPATH': build_android_dir, + 'PYTHONDONTWRITEBYTECODE': '1', + }) + tests.extend( + input_api.canned_checks.GetUnitTests( + input_api, + output_api, + unit_tests=[ + J('.', 'list_class_verification_failures_test.py'), + J('pylib', 'constants', 'host_paths_unittest.py'), + J('pylib', 'gtest', 'gtest_test_instance_test.py'), + J('pylib', 'instrumentation', + 'instrumentation_test_instance_test.py'), + J('pylib', 'local', 'device', 'local_device_gtest_run_test.py'), + J('pylib', 'local', 'device', + 'local_device_instrumentation_test_run_test.py'), + J('pylib', 'local', 'device', 'local_device_test_run_test.py'), + J('pylib', 'local', 'machine', + 'local_machine_junit_test_run_test.py'), + J('pylib', 'output', 'local_output_manager_test.py'), + J('pylib', 'output', 'noop_output_manager_test.py'), + J('pylib', 'output', 'remote_output_manager_test.py'), + J('pylib', 'results', 'json_results_test.py'), + J('pylib', 'utils', 'chrome_proxy_utils_test.py'), + J('pylib', 'utils', 'decorators_test.py'), + J('pylib', 'utils', 'device_dependencies_test.py'), + J('pylib', 'utils', 'dexdump_test.py'), + J('pylib', 'utils', 'gold_utils_test.py'), + J('pylib', 'utils', 'test_filter_test.py'), + J('gyp', 'dex_test.py'), + J('gyp', 'util', 'build_utils_test.py'), + J('gyp', 'util', 'manifest_utils_test.py'), + J('gyp', 'util', 'md5_check_test.py'), + J('gyp', 'util', 'resource_utils_test.py'), + ], + env=pylib_test_env, + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True)) + + return input_api.RunTests(tests) + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/android/adb_chrome_public_command_line b/android/adb_chrome_public_command_line new file mode 100755 index 000000000000..068493465b14 --- /dev/null +++ b/android/adb_chrome_public_command_line @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current Chrome flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the Chrome +# flags. For example: +# adb_chrome_public_command_line --enable-webgl +# +# To remove all Chrome flags, pass an empty string for the flags: +# adb_chrome_public_command_line "" + +exec $(dirname $0)/adb_command_line.py --name chrome-command-line "$@" diff --git a/android/adb_command_line.py b/android/adb_command_line.py new file mode 100755 index 000000000000..8557085d5381 --- /dev/null +++ b/android/adb_command_line.py @@ -0,0 +1,97 @@ +#!/usr/bin/env vpython3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility for reading / writing command-line flag files on device(s).""" + + +import argparse +import logging +import sys + +import devil_chromium + +from devil.android import device_errors +from devil.android import device_utils +from devil.android import flag_changer +from devil.android.tools import script_common +from devil.utils import cmd_helper +from devil.utils import logging_common + + +def CheckBuildTypeSupportsFlags(device, command_line_flags_file): + is_webview = command_line_flags_file == 'webview-command-line' + if device.IsUserBuild() and is_webview: + raise device_errors.CommandFailedError( + 'WebView only respects flags on a userdebug or eng device, yours ' + 'is a user build.', device) + if device.IsUserBuild(): + logging.warning( + 'Your device (%s) is a user build; Chrome may or may not pick up ' + 'your commandline flags. Check your ' + '"command_line_on_non_rooted_enabled" preference, or switch ' + 'devices.', device) + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.usage = '''%(prog)s --name FILENAME [--device SERIAL] [flags...] + +No flags: Prints existing command-line file. +Empty string: Deletes command-line file. +Otherwise: Writes command-line file. + +''' + parser.add_argument('--name', required=True, + help='Name of file where to store flags on the device.') + parser.add_argument('-e', '--executable', dest='executable', default='chrome', + help='(deprecated) No longer used.') + script_common.AddEnvironmentArguments(parser) + script_common.AddDeviceArguments(parser) + logging_common.AddLoggingArguments(parser) + + args, remote_args = parser.parse_known_args() + devil_chromium.Initialize(adb_path=args.adb_path) + logging_common.InitializeLogging(args) + + devices = device_utils.DeviceUtils.HealthyDevices(device_arg=args.devices, + default_retries=0) + all_devices = device_utils.DeviceUtils.parallel(devices) + + if not remote_args: + # No args == do not update, just print flags. + remote_args = None + action = '' + elif len(remote_args) == 1 and not remote_args[0]: + # Single empty string arg == delete flags + remote_args = [] + action = 'Deleted command line file. ' + else: + action = 'Wrote command line file. ' + + def update_flags(device): + CheckBuildTypeSupportsFlags(device, args.name) + changer = flag_changer.FlagChanger(device, args.name) + if remote_args is not None: + flags = changer.ReplaceFlags(remote_args) + else: + flags = changer.GetCurrentFlags() + return (device, device.build_description, flags) + + updated_values = all_devices.pMap(update_flags).pGet(None) + + print('%sCurrent flags (in %s):' % (action, args.name)) + for d, desc, flags in updated_values: + if flags: + # Shell-quote flags for easy copy/paste as new args on the terminal. + quoted_flags = ' '.join(cmd_helper.SingleQuote(f) for f in sorted(flags)) + else: + quoted_flags = '( empty )' + print(' %s (%s): %s' % (d, desc, quoted_flags)) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/adb_gdb b/android/adb_gdb new file mode 100755 index 000000000000..885d597032f8 --- /dev/null +++ b/android/adb_gdb @@ -0,0 +1,942 @@ +#!/bin/bash +# +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# + +# A generic script used to attach to a running Chromium process and +# debug it. Most users should not use this directly, but one of the +# wrapper scripts like adb_gdb_content_shell +# +# Use --help to print full usage instructions. +# + +PROGNAME=$(basename "$0") +PROGDIR=$(dirname "$0") + +# Force locale to C to allow recognizing output from subprocesses. +LC_ALL=C + +# Location of Chromium-top-level sources. +CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null) + +TMPDIR= +GDBSERVER_PIDFILE= +TARGET_GDBSERVER= +COMMAND_PREFIX= +COMMAND_SUFFIX= + +clean_exit () { + if [ "$TMPDIR" ]; then + GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null) + if [ "$GDBSERVER_PID" ]; then + log "Killing background gdbserver process: $GDBSERVER_PID" + kill -9 $GDBSERVER_PID >/dev/null 2>&1 + rm -f "$GDBSERVER_PIDFILE" + fi + if [ "$TARGET_GDBSERVER" ]; then + log "Removing target gdbserver binary: $TARGET_GDBSERVER." + "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" \ + "$TARGET_DOMAIN_SOCKET" "$COMMAND_SUFFIX" >/dev/null 2>&1 + fi + log "Cleaning up: $TMPDIR" + rm -rf "$TMPDIR" + fi + trap "" EXIT + exit $1 +} + +# Ensure clean exit on Ctrl-C or normal exit. +trap "clean_exit 1" INT HUP QUIT TERM +trap "clean_exit \$?" EXIT + +panic () { + echo "ERROR: $@" >&2 + exit 1 +} + +fail_panic () { + if [ $? != 0 ]; then panic "$@"; fi +} + +log () { + if [ "$VERBOSE" -gt 0 ]; then + echo "$@" + fi +} + +DEFAULT_PULL_LIBS_DIR="/tmp/adb-gdb-support-$USER" +IDE_DIR="$DEFAULT_PULL_LIBS_DIR" + +# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX +# environment variables. This is only for cosmetic reasons, i.e. to +# display proper + +# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME +PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")} + +ADB= +ANNOTATE= +CGDB= +GDBINIT= +GDBSERVER= +HELP= +IDE= +NDK_DIR= +NO_PULL_LIBS= +PACKAGE_NAME= +PID= +PORT= +PROGRAM_NAME="activity" +PULL_LIBS= +PULL_LIBS_DIR= +ATTACH_DELAY=1 +SU_PREFIX= +SYMBOL_DIR= +TARGET_ARCH= +TOOLCHAIN= +VERBOSE=0 + +for opt; do + optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)') + case $opt in + --adb=*) + ADB=$optarg + ;; + --device=*) + export ANDROID_SERIAL=$optarg + ;; + --annotate=3) + ANNOTATE=$optarg + ;; + --gdbserver=*) + GDBSERVER=$optarg + ;; + --gdb=*) + GDB=$optarg + ;; + --help|-h|-?) + HELP=true + ;; + --ide) + IDE=true + ;; + --ndk-dir=*) + NDK_DIR=$optarg + ;; + --no-pull-libs) + NO_PULL_LIBS=true + ;; + --package-name=*) + PACKAGE_NAME=$optarg + ;; + --pid=*) + PID=$optarg + ;; + --port=*) + PORT=$optarg + ;; + --program-name=*) + PROGRAM_NAME=$optarg + ;; + --pull-libs) + PULL_LIBS=true + ;; + --pull-libs-dir=*) + PULL_LIBS_DIR=$optarg + ;; + --script=*) + GDBINIT=$optarg + ;; + --attach-delay=*) + ATTACH_DELAY=$optarg + ;; + --su-prefix=*) + SU_PREFIX=$optarg + ;; + --symbol-dir=*) + SYMBOL_DIR=$optarg + ;; + --output-directory=*) + CHROMIUM_OUTPUT_DIR=$optarg + ;; + --target-arch=*) + TARGET_ARCH=$optarg + ;; + --toolchain=*) + TOOLCHAIN=$optarg + ;; + --cgdb) + CGDB=cgdb + ;; + --cgdb=*) + CGDB=$optarg + ;; + --verbose) + VERBOSE=$(( $VERBOSE + 1 )) + ;; + -*) + panic "Unknown option $opt, see --help." >&2 + ;; + *) + if [ "$PACKAGE_NAME" ]; then + panic "You can only provide a single package name as argument!\ + See --help." + fi + PACKAGE_NAME=$opt + ;; + esac +done + +if [ "$HELP" ]; then + if [ "$ADB_GDB_PROGNAME" ]; then + # Assume wrapper scripts all provide a default package name. + cat <] + +Attach gdb to a running Android $PROGRAM_NAME process. + +If provided, must be the name of the Android application's +package name to be debugged. You can also use --package-name= to +specify it. +EOF + fi + + cat < to specify an alternative NDK installation +directory. + +The script tries to find the most recent version of the debug version of +shared libraries under one of the following directories: + + \$CHROMIUM_SRC//lib/ (used by GYP builds) + \$CHROMIUM_SRC//lib.unstripped/ (used by GN builds) + +Where is determined by CHROMIUM_OUTPUT_DIR, or --output-directory. + +You can set the path manually via --symbol-dir. + +The script tries to extract the target architecture from your target device, +but if this fails, will default to 'arm'. Use --target-arch= to force +its value. + +Otherwise, the script will complain, but you can use the --gdbserver, +--gdb and --symbol-lib options to specify everything manually. + +An alternative to --gdb= is to use --toollchain= to specify +the path to the host target-specific cross-toolchain. + +You will also need the 'adb' tool in your path. Otherwise, use the --adb +option. The script will complain if there is more than one device connected +and a device is not specified with either --device or ANDROID_SERIAL). + +The first time you use it on a device, the script will pull many system +libraries required by the process into a temporary directory. This +is done to strongly improve the debugging experience, like allowing +readable thread stacks and more. The libraries are copied to the following +directory by default: + + $DEFAULT_PULL_LIBS_DIR/ + +But you can use the --pull-libs-dir= option to specify an +alternative. The script can detect when you change the connected device, +and will re-pull the libraries only in this case. You can however force it +with the --pull-libs option. + +Any local .gdbinit script will be ignored, but it is possible to pass a +gdb command script with the --script= option. Note that its commands +will be passed to gdb after the remote connection and library symbol +loading have completed. + +Valid options: + --help|-h|-? Print this message. + --verbose Increase verbosity. + + --cgdb[=] Use cgdb (an interface for gdb that shows the code). + --symbol-dir= Specify directory with symbol shared libraries. + --output-directory= Specify the output directory (e.g. "out/Debug"). + --package-name= Specify package name (alternative to 1st argument). + --program-name= Specify program name (cosmetic only). + --pid= Specify application process pid. + --attach-delay= Seconds to wait for gdbserver to attach to the + remote process before starting gdb. Default 1. + may be a float if your sleep(1) supports it. + --annotate= Enable gdb annotation. + --script= Specify extra GDB init script. + + --gdbserver= Specify target gdbserver binary. + --gdb= Specify host gdb client binary. + --target-arch= Specify NDK target arch. + --adb= Specify host ADB binary. + --device= ADB device serial to use (-s flag). + --port= Specify the tcp port to use. + --ide Forward gdb port, but do not enter gdb console. + + --su-prefix= Prepend to 'adb shell' commands that are + run by this script. This can be useful to use + the 'su' program on rooted production devices. + e.g. --su-prefix="su -c" + + --pull-libs Force system libraries extraction. + --no-pull-libs Do not extract any system library. + --libs-dir= Specify system libraries extraction directory. + +EOF + exit 0 +fi + +if [ -z "$PACKAGE_NAME" ]; then + panic "Please specify a package name on the command line. See --help." +fi + +if [[ -z "$SYMBOL_DIR" && -z "$CHROMIUM_OUTPUT_DIR" ]]; then + if [[ -e "build.ninja" ]]; then + CHROMIUM_OUTPUT_DIR=$PWD + else + panic "Please specify an output directory by using one of: + --output-directory=out/Debug + CHROMIUM_OUTPUT_DIR=out/Debug + Setting working directory to an output directory. + See --help." + fi +fi + +if ls *.so >/dev/null 2>&1; then + panic ".so files found in your working directory. These will conflict with" \ + "library lookup logic. Change your working directory and try again." +fi + +# Detect the build type and symbol directory. This is done by finding +# the most recent sub-directory containing debug shared libraries under +# $CHROMIUM_OUTPUT_DIR. +# +# Out: nothing, but this sets SYMBOL_DIR +# +detect_symbol_dir () { + # GYP places unstripped libraries under out/lib + # GN places them under out/lib.unstripped + local PARENT_DIR="$CHROMIUM_OUTPUT_DIR" + if [[ ! -e "$PARENT_DIR" ]]; then + PARENT_DIR="$CHROMIUM_SRC/$PARENT_DIR" + fi + SYMBOL_DIR="$PARENT_DIR/lib.unstripped" + if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then + SYMBOL_DIR="$PARENT_DIR/lib" + if [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then + panic "Could not find any symbols under \ +$PARENT_DIR/lib{.unstripped}. Please build the program first!" + fi + fi + log "Auto-config: --symbol-dir=$SYMBOL_DIR" +} + +if [ -z "$SYMBOL_DIR" ]; then + detect_symbol_dir +elif [[ -z "$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null)" ]]; then + panic "Could not find any symbols under $SYMBOL_DIR" +fi + +if [ -z "$NDK_DIR" ]; then + ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python3 -c \ + 'from pylib.constants import ANDROID_NDK_ROOT; print(ANDROID_NDK_ROOT,)') +else + if [ ! -d "$NDK_DIR" ]; then + panic "Invalid directory: $NDK_DIR" + fi + if [ ! -f "$NDK_DIR/ndk-build" ]; then + panic "Not a valid NDK directory: $NDK_DIR" + fi + ANDROID_NDK_ROOT=$NDK_DIR +fi + +if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then + panic "Unknown --script file: $GDBINIT" +fi + +# Check that ADB is in our path +if [ -z "$ADB" ]; then + ADB=$(which adb 2>/dev/null) + if [ -z "$ADB" ]; then + panic "Can't find 'adb' tool in your path. Install it or use \ +--adb=" + fi + log "Auto-config: --adb=$ADB" +fi + +# Check that it works minimally +ADB_VERSION=$($ADB version 2>/dev/null) +echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge" +if [ $? != 0 ]; then + panic "Your 'adb' tool seems invalid, use --adb= to specify a \ +different one: $ADB" +fi + +# If there are more than one device connected, and ANDROID_SERIAL is not +# defined, print an error message. +NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l) +if [ "$NUM_DEVICES_PLUS2" -gt 3 -a -z "$ANDROID_SERIAL" ]; then + echo "ERROR: There is more than one Android device connected to ADB." + echo "Please define ANDROID_SERIAL to specify which one to use." + exit 1 +fi + +# Run a command through adb shell, strip the extra \r from the output +# and return the correct status code to detect failures. This assumes +# that the adb shell command prints a final \n to stdout. +# $1+: command to run +# Out: command's stdout +# Return: command's status +# Note: the command's stderr is lost +adb_shell () { + local TMPOUT="$(mktemp)" + local LASTLINE RET + local ADB=${ADB:-adb} + + # The weird sed rule is to strip the final \r on each output line + # Since 'adb shell' never returns the command's proper exit/status code, + # we force it to print it as '%%' in the temporary output file, + # which we will later strip from it. + $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \ + sed -e 's![[:cntrl:]]!!g' > $TMPOUT + # Get last line in log, which contains the exit code from the command + LASTLINE=$(sed -e '$!d' $TMPOUT) + # Extract the status code from the end of the line, which must + # be '%%'. + RET=$(echo "$LASTLINE" | \ + awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }') + # Remove the status code from the last line. Note that this may result + # in an empty line. + LASTLINE=$(echo "$LASTLINE" | \ + awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }') + # The output itself: all lines except the status code. + sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE" + # Remove temp file. + rm -f $TMPOUT + # Exit with the appropriate status. + return $RET +} + +# Find the target architecture from a local shared library. +# This returns an NDK-compatible architecture name. +# out: NDK Architecture name, or empty string. +get_gyp_target_arch () { + # ls prints a broken pipe error when there are a lot of libs. + local RANDOM_LIB=$(ls "$SYMBOL_DIR"/lib*.so 2>/dev/null| head -n1) + local SO_DESC=$(file $RANDOM_LIB) + case $ARCH in + *32-bit*ARM,*) echo "arm";; + *64-bit*ARM,*) echo "arm64";; + *32-bit*Intel,*) echo "x86";; + *x86-64,*) echo "x86_64";; + *32-bit*MIPS,*) echo "mips";; + *) echo ""; + esac +} + +if [ -z "$TARGET_ARCH" ]; then + TARGET_ARCH=$(get_gyp_target_arch) + if [ -z "$TARGET_ARCH" ]; then + TARGET_ARCH=arm + fi +else + # Nit: accept Chromium's 'ia32' as a valid target architecture. This + # script prefers the NDK 'x86' name instead because it uses it to find + # NDK-specific files (host gdb) with it. + if [ "$TARGET_ARCH" = "ia32" ]; then + TARGET_ARCH=x86 + log "Auto-config: --arch=$TARGET_ARCH (equivalent to ia32)" + fi +fi + +# Detect the NDK system name, i.e. the name used to identify the host. +# out: NDK system name (e.g. 'linux' or 'darwin') +get_ndk_host_system () { + local HOST_OS + if [ -z "$NDK_HOST_SYSTEM" ]; then + HOST_OS=$(uname -s) + case $HOST_OS in + Linux) NDK_HOST_SYSTEM=linux;; + Darwin) NDK_HOST_SYSTEM=darwin;; + *) panic "You can't run this script on this system: $HOST_OS";; + esac + fi + echo "$NDK_HOST_SYSTEM" +} + +# Detect the NDK host architecture name. +# out: NDK arch name (e.g. 'x86' or 'x86_64') +get_ndk_host_arch () { + local HOST_ARCH HOST_OS + if [ -z "$NDK_HOST_ARCH" ]; then + HOST_OS=$(get_ndk_host_system) + HOST_ARCH=$(uname -p) + if [ "$HOST_ARCH" = "unknown" ]; then + # In case where "-p" returns "unknown" just use "-m" (machine hardware + # name). According to this patch from Fedora "-p" is equivalent to "-m" + # anyway: https://goo.gl/Pd47x3 + HOST_ARCH=$(uname -m) + fi + case $HOST_ARCH in + i?86) NDK_HOST_ARCH=x86;; + x86_64|amd64) NDK_HOST_ARCH=x86_64;; + *) panic "You can't run this script on this host architecture: $HOST_ARCH";; + esac + # Darwin trick: "uname -p" always returns i386 on 64-bit installations. + if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then + # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts + # implementations of the tool. See http://b.android.com/53769 + HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64") + if [ "$HOST_64BITS" ]; then + NDK_HOST_ARCH=x86_64 + fi + fi + fi + echo "$NDK_HOST_ARCH" +} + +# Convert an NDK architecture name into a GNU configure triplet. +# $1: NDK architecture name (e.g. 'arm') +# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi') +get_arch_gnu_config () { + case $1 in + arm) + echo "arm-linux-androideabi" + ;; + arm64) + echo "aarch64-linux-android" + ;; + x86) + echo "i686-linux-android" + ;; + x86_64) + echo "x86_64-linux-android" + ;; + mips) + echo "mipsel-linux-android" + ;; + *) + echo "$ARCH-linux-android" + ;; + esac +} + +# Convert an NDK architecture name into a toolchain name prefix +# $1: NDK architecture name (e.g. 'arm') +# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi') +get_arch_toolchain_prefix () { + # Return the configure triplet, except for x86 and x86_64! + if [ "$1" = "x86" -o "$1" = "x86_64" ]; then + echo "$1" + else + get_arch_gnu_config $1 + fi +} + +# Find a NDK toolchain prebuilt file or sub-directory. +# This will probe the various arch-specific toolchain directories +# in the NDK for the needed file. +# $1: NDK install path +# $2: NDK architecture name +# $3: prebuilt sub-path to look for. +# Out: file path, or empty if none is found. +get_ndk_toolchain_prebuilt () { + local NDK_DIR="${1%/}" + local ARCH="$2" + local SUBPATH="$3" + local NAME="$(get_arch_toolchain_prefix $ARCH)" + local FILE TARGET + FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH + if [ ! -f "$FILE" ]; then + FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH + if [ ! -f "$FILE" ]; then + FILE= + fi + fi + echo "$FILE" +} + +# $1: NDK install path +get_ndk_host_gdb_client() { + local NDK_DIR="$1" + local HOST_OS HOST_ARCH + + HOST_OS=$(get_ndk_host_system) + HOST_ARCH=$(get_ndk_host_arch) + echo "$NDK_DIR/prebuilt/$HOST_OS-$HOST_ARCH/bin/gdb" +} + +# $1: NDK install path +# $2: target architecture. +get_ndk_gdbserver () { + local NDK_DIR="$1" + local ARCH=$2 + local BINARY + + # The location has moved after NDK r8 + BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver + if [ ! -f "$BINARY" ]; then + BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver) + fi + echo "$BINARY" +} + +# Find host GDB client binary +if [ -z "$GDB" ]; then + GDB=$(get_ndk_host_gdb_client "$ANDROID_NDK_ROOT") + if [ -z "$GDB" ]; then + panic "Can't find Android gdb client in your path, check your \ +--toolchain or --gdb path." + fi + log "Host gdb client: $GDB" +fi + +# Find gdbserver binary, we will later push it to /data/local/tmp +# This ensures that both gdbserver and $GDB talk the same binary protocol, +# otherwise weird problems will appear. +# +if [ -z "$GDBSERVER" ]; then + GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH") + if [ -z "$GDBSERVER" ]; then + panic "Can't find NDK gdbserver binary. use --gdbserver to specify \ +valid one!" + fi + log "Auto-config: --gdbserver=$GDBSERVER" +fi + +# A unique ID for this script's session. This needs to be the same in all +# sub-shell commands we're going to launch, so take the PID of the launcher +# process. +TMP_ID=$$ + +# Temporary directory, will get cleaned up on exit. +TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID +mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/* + +GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid + +# Return the timestamp of a given file, as number of seconds since epoch. +# $1: file path +# Out: file timestamp +get_file_timestamp () { + stat -c %Y "$1" 2>/dev/null +} + +# Allow several concurrent debugging sessions +APP_DATA_DIR=$(adb_shell run-as $PACKAGE_NAME /system/bin/sh -c pwd) +fail_panic "Failed to run-as $PACKAGE_NAME, is the app debuggable?" +TARGET_GDBSERVER="$APP_DATA_DIR/gdbserver-adb-gdb-$TMP_ID" +TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID + +# Select correct app_process for architecture. +case $TARGET_ARCH in + arm|x86|mips) GDBEXEC=app_process32;; + arm64|x86_64) GDBEXEC=app_process64; SUFFIX_64_BIT=64;; + *) panic "Unknown app_process for architecture!";; +esac + +# Default to app_process if bit-width specific process isn't found. +adb_shell ls /system/bin/$GDBEXEC > /dev/null +if [ $? != 0 ]; then + GDBEXEC=app_process +fi + +# Detect AddressSanitizer setup on the device. In that case app_process is a +# script, and the real executable is app_process.real. +GDBEXEC_ASAN=app_process.real +adb_shell ls /system/bin/$GDBEXEC_ASAN > /dev/null +if [ $? == 0 ]; then + GDBEXEC=$GDBEXEC_ASAN +fi + +ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR +if [[ -n "$ANDROID_SERIAL" ]]; then + DEFAULT_PULL_LIBS_DIR="$DEFAULT_PULL_LIBS_DIR/$ANDROID_SERIAL-$SUFFIX_64_BIT" +fi +PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR} + +HOST_FINGERPRINT= +DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint) +[[ "$DEVICE_FINGERPRINT" ]] || panic "Failed to get the device fingerprint" +log "Device build fingerprint: $DEVICE_FINGERPRINT" + +if [ ! -f "$PULL_LIBS_DIR/build.fingerprint" ]; then + log "Auto-config: --pull-libs (no cached libraries)" + PULL_LIBS=true +else + HOST_FINGERPRINT=$(< "$PULL_LIBS_DIR/build.fingerprint") + log "Host build fingerprint: $HOST_FINGERPRINT" + if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then + log "Auto-config: --no-pull-libs (fingerprint match)" + NO_PULL_LIBS=true + else + log "Auto-config: --pull-libs (fingerprint mismatch)" + PULL_LIBS=true + fi +fi + +# If requested, work for M-x gdb. The gdb indirections make it +# difficult to pass --annotate=3 to the gdb binary itself. +if [ "$ANNOTATE" ]; then + GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE" +fi + +# Get the PID from the first argument or else find the PID of the +# browser process. +if [ -z "$PID" ]; then + PROCESSNAME=$PACKAGE_NAME + if [ -z "$PID" ]; then + PID=$(adb_shell ps | \ + awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1) + fi + if [ -z "$PID" ]; then + panic "Can't find application process PID." + fi + log "Found process PID: $PID" +fi + +# Determine if 'adb shell' runs as root or not. +# If so, we can launch gdbserver directly, otherwise, we have to +# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable. +# +if [ "$SU_PREFIX" ]; then + # Need to check that this works properly. + SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log + adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1 + if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then + echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:" + echo "$ adb shell $SU_PREFIX \"echo foo\"" + cat $SU_PREFIX_TEST_LOG + exit 1 + fi + COMMAND_PREFIX="$SU_PREFIX \"" + COMMAND_SUFFIX="\"" +else + SHELL_UID=$("$ADB" shell cat /proc/self/status | \ + awk '$1 == "Uid:" { print $2; }') + log "Shell UID: $SHELL_UID" + if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then + COMMAND_PREFIX="run-as $PACKAGE_NAME" + COMMAND_SUFFIX= + else + COMMAND_PREFIX= + COMMAND_SUFFIX= + fi +fi +log "Command prefix: '$COMMAND_PREFIX'" +log "Command suffix: '$COMMAND_SUFFIX'" + +mkdir -p "$PULL_LIBS_DIR" +fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR" + +# Pull device's system libraries that are mapped by our process. +# Pulling all system libraries is too long, so determine which ones +# we need by looking at /proc/$PID/maps instead +if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then + echo "Extracting system libraries into: $PULL_LIBS_DIR" + MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX) + if [ $? != 0 ]; then + echo "ERROR: Could not list process's memory mappings." + if [ "$SU_PREFIX" ]; then + panic "Are you sure your --su-prefix is correct?" + else + panic "Use --su-prefix if the application is not debuggable." + fi + fi + # Remove the fingerprint file in case pulling one of the libs fails. + rm -f "$PULL_LIBS_DIR/build.fingerprint" + SYSTEM_LIBS=$(echo "$MAPPINGS" | \ + awk '$6 ~ /\/(system|apex|vendor)\/.*\.so$/ { print $6; }' | sort -u) + for SYSLIB in /system/bin/linker$SUFFIX_64_BIT $SYSTEM_LIBS; do + echo "Pulling from device: $SYSLIB" + DST_FILE=$PULL_LIBS_DIR$SYSLIB + DST_DIR=$(dirname "$DST_FILE") + mkdir -p "$DST_DIR" && "$ADB" pull $SYSLIB "$DST_FILE" 2>/dev/null + fail_panic "Could not pull $SYSLIB from device !?" + done + echo "Writing the device fingerprint" + echo "$DEVICE_FINGERPRINT" > "$PULL_LIBS_DIR/build.fingerprint" +fi + +# Pull the app_process binary from the device. +log "Pulling $GDBEXEC from device" +"$ADB" pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null +fail_panic "Could not retrieve $GDBEXEC from the device!" + +# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4 +# so we can add them to solib-search-path later. +SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \ + grep -v "^$" | tr '\n' ':') +SOLIB_DIRS=${SOLIB_DIRS%:} # Strip trailing : + +# Applications with minSdkVersion >= 24 will have their data directories +# created with rwx------ permissions, preventing adbd from forwarding to +# the gdbserver socket. +adb_shell $COMMAND_PREFIX chmod a+x $APP_DATA_DIR $COMMAND_SUFFIX + +# Push gdbserver to the device +log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER" +"$ADB" push $GDBSERVER $TMP_TARGET_GDBSERVER >/dev/null && \ + adb_shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER $COMMAND_SUFFIX && \ + adb_shell rm $TMP_TARGET_GDBSERVER +fail_panic "Could not copy gdbserver to the device!" + +if [ -z "$PORT" ]; then + # Random port to allow multiple concurrent sessions. + PORT=$(( $RANDOM % 1000 + 5039 )) +fi +HOST_PORT=$PORT +TARGET_DOMAIN_SOCKET=$APP_DATA_DIR/gdb-socket-$HOST_PORT + +# Setup network redirection +log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_DOMAIN_SOCKET)" +"$ADB" forward tcp:$HOST_PORT localfilesystem:$TARGET_DOMAIN_SOCKET +fail_panic "Could not setup network redirection from \ +host:localhost:$HOST_PORT to device:$TARGET_DOMAIN_SOCKET" + +# Start gdbserver in the background +# Note that using run-as requires the package to be debuggable. +# +# If not, this will fail horribly. The alternative is to run the +# program as root, which requires of course root privileges. +# Maybe we should add a --root option to enable this? +# + +for i in 1 2; do + log "Starting gdbserver in the background:" + GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log + log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER \ + --once +$TARGET_DOMAIN_SOCKET \ + --attach $PID $COMMAND_SUFFIX" + "$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER \ + --once +$TARGET_DOMAIN_SOCKET \ + --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1 & + GDBSERVER_PID=$! + echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE + log "background job pid: $GDBSERVER_PID" + + # Sleep to allow gdbserver to attach to the remote process and be + # ready to connect to. + log "Sleeping ${ATTACH_DELAY}s to ensure gdbserver is alive" + sleep "$ATTACH_DELAY" + log "Job control: $(jobs -l)" + STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }') + if [ "$STATE" != "Running" ]; then + pid_msg=$(grep "is already traced by process" $GDBSERVER_LOG 2>/dev/null) + if [[ -n "$pid_msg" ]]; then + old_pid=${pid_msg##* } + old_pid=${old_pid//[$'\r\n']} # Trim trailing \r. + echo "Killing previous gdb server process (pid=$old_pid)" + adb_shell $COMMAND_PREFIX kill -9 $old_pid $COMMAND_SUFFIX + continue + fi + echo "ERROR: GDBServer either failed to run or attach to PID $PID!" + echo "Here is the output from gdbserver (also try --verbose for more):" + echo "===== gdbserver.log start =====" + cat $GDBSERVER_LOG + echo ="===== gdbserver.log end ======" + exit 1 + fi + break +done + +# Generate a file containing useful GDB initialization commands +readonly COMMANDS=$TMPDIR/gdb.init +log "Generating GDB initialization commands file: $COMMANDS" +cat > "$COMMANDS" < timeout_seconds: + print("Error: unable to connect to device.") + print(e) + return False + time.sleep(min(0.25, time_left)) + +print("Connecting to :$HOST_PORT...") +if target_remote_with_retry(':$HOST_PORT', 5): + print("Attached! Reading symbols (takes ~30 seconds).") +end +EOF + +if [ "$GDBINIT" ]; then + cat "$GDBINIT" >> "$COMMANDS" +fi + +if [ "$VERBOSE" -gt 0 ]; then + echo "### START $COMMANDS" + cat "$COMMANDS" + echo "### END $COMMANDS" +fi + +if [ "$IDE" ]; then + mkdir -p "$IDE_DIR" + SYM_GDB="$IDE_DIR/gdb" + SYM_EXE="$IDE_DIR/app_process" + SYM_INIT="$IDE_DIR/gdbinit" + ln -sf "$TMPDIR/$GDBEXEC" "$SYM_EXE" + ln -sf "$COMMANDS" "$SYM_INIT" + # gdb doesn't work when symlinked, so create a wrapper. + echo + cat > $SYM_GDB < [] + +This script will repeatedly poll adb for new devices and save logcats +inside the directory, which it attempts to create. The +script will run until killed by an external signal. To test, run the +script in a shell and -C it after a while. It should be +resilient across phone disconnects and reconnects and start the logcat +early enough to not miss anything. +""" + + +import logging +import os +import re +import shutil +import signal +import subprocess +import sys +import time + +# Map from device_id -> (process, logcat_num) +devices = {} + + +class TimeoutException(Exception): + """Exception used to signal a timeout.""" + + +class SigtermError(Exception): + """Exception used to catch a sigterm.""" + + +def StartLogcatIfNecessary(device_id, adb_cmd, base_dir): + """Spawns a adb logcat process if one is not currently running.""" + process, logcat_num = devices[device_id] + if process: + if process.poll() is None: + # Logcat process is still happily running + return + logging.info('Logcat for device %s has died', device_id) + error_filter = re.compile('- waiting for device -') + for line in process.stderr: + if not error_filter.match(line): + logging.error(device_id + ': ' + line) + + logging.info('Starting logcat %d for device %s', logcat_num, + device_id) + logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num) + logcat_file = open(os.path.join(base_dir, logcat_filename), 'w') + process = subprocess.Popen([adb_cmd, '-s', device_id, + 'logcat', '-v', 'threadtime'], + stdout=logcat_file, + stderr=subprocess.PIPE) + devices[device_id] = (process, logcat_num + 1) + + +def GetAttachedDevices(adb_cmd): + """Gets the device list from adb. + + We use an alarm in this function to avoid deadlocking from an external + dependency. + + Args: + adb_cmd: binary to run adb + + Returns: + list of devices or an empty list on timeout + """ + signal.alarm(2) + try: + out, err = subprocess.Popen([adb_cmd, 'devices'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate() + if err: + logging.warning('adb device error %s', err.strip()) + return re.findall('^(\\S+)\tdevice$', out.decode('latin1'), re.MULTILINE) + except TimeoutException: + logging.warning('"adb devices" command timed out') + return [] + except (IOError, OSError): + logging.exception('Exception from "adb devices"') + return [] + finally: + signal.alarm(0) + + +def main(base_dir, adb_cmd='adb'): + """Monitor adb forever. Expects a SIGINT (Ctrl-C) to kill.""" + # We create the directory to ensure 'run once' semantics + if os.path.exists(base_dir): + print('adb_logcat_monitor: %s already exists? Cleaning' % base_dir) + shutil.rmtree(base_dir, ignore_errors=True) + + os.makedirs(base_dir) + logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'), + level=logging.INFO, + format='%(asctime)-2s %(levelname)-8s %(message)s') + + # Set up the alarm for calling 'adb devices'. This is to ensure + # our script doesn't get stuck waiting for a process response + def TimeoutHandler(_signum, _unused_frame): + raise TimeoutException() + signal.signal(signal.SIGALRM, TimeoutHandler) + + # Handle SIGTERMs to ensure clean shutdown + def SigtermHandler(_signum, _unused_frame): + raise SigtermError() + signal.signal(signal.SIGTERM, SigtermHandler) + + logging.info('Started with pid %d', os.getpid()) + pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID') + + try: + with open(pid_file_path, 'w') as f: + f.write(str(os.getpid())) + while True: + for device_id in GetAttachedDevices(adb_cmd): + if not device_id in devices: + subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c']) + devices[device_id] = (None, 0) + + for device in devices: + # This will spawn logcat watchers for any device ever detected + StartLogcatIfNecessary(device, adb_cmd, base_dir) + + time.sleep(5) + except SigtermError: + logging.info('Received SIGTERM, shutting down') + except: # pylint: disable=bare-except + logging.exception('Unexpected exception in main.') + finally: + for process, _ in devices.values(): + if process: + try: + process.terminate() + except OSError: + pass + os.remove(pid_file_path) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + if 2 <= len(sys.argv) <= 3: + print('adb_logcat_monitor: Initializing') + if len(sys.argv) == 2: + sys.exit(main(sys.argv[1])) + sys.exit(main(sys.argv[1], sys.argv[2])) + + print('Usage: %s []' % sys.argv[0]) diff --git a/android/adb_logcat_printer.py b/android/adb_logcat_printer.py new file mode 100755 index 000000000000..7f3c52aa74ea --- /dev/null +++ b/android/adb_logcat_printer.py @@ -0,0 +1,222 @@ +#!/usr/bin/env python3 +# +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Shutdown adb_logcat_monitor and print accumulated logs. + +To test, call './adb_logcat_printer.py ' where + contains 'adb logcat -v threadtime' files named as +logcat__ + +The script will print the files to out, and will combine multiple +logcats from a single device if there is overlap. + +Additionally, if a /LOGCAT_MONITOR_PID exists, the script +will attempt to terminate the contained PID by sending a SIGINT and +monitoring for the deletion of the aforementioned file. +""" +# pylint: disable=W0702 + +import argparse +import io +import logging +import os +import re +import signal +import sys +import time + + +# Set this to debug for more verbose output +LOG_LEVEL = logging.INFO + + +def CombineLogFiles(list_of_lists, logger): + """Splices together multiple logcats from the same device. + + Args: + list_of_lists: list of pairs (filename, list of timestamped lines) + logger: handler to log events + + Returns: + list of lines with duplicates removed + """ + cur_device_log = [''] + for cur_file, cur_file_lines in list_of_lists: + # Ignore files with just the logcat header + if len(cur_file_lines) < 2: + continue + common_index = 0 + # Skip this step if list just has empty string + if len(cur_device_log) > 1: + try: + line = cur_device_log[-1] + # Used to make sure we only splice on a timestamped line + if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line): + common_index = cur_file_lines.index(line) + else: + logger.warning('splice error - no timestamp in "%s"?', line.strip()) + except ValueError: + # The last line was valid but wasn't found in the next file + cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****'] + logger.info('Unable to splice %s. Incomplete logcat?', cur_file) + + cur_device_log += ['*'*30 + ' %s' % cur_file] + cur_device_log.extend(cur_file_lines[common_index:]) + + return cur_device_log + + +def FindLogFiles(base_dir): + """Search a directory for logcat files. + + Args: + base_dir: directory to search + + Returns: + Mapping of device_id to a sorted list of file paths for a given device + """ + logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$') + # list of tuples (, , ) + filtered_list = [] + for cur_file in os.listdir(base_dir): + matcher = logcat_filter.match(cur_file) + if matcher: + filtered_list += [(matcher.group(1), int(matcher.group(2)), + os.path.join(base_dir, cur_file))] + filtered_list.sort() + file_map = {} + for device_id, _, cur_file in filtered_list: + if device_id not in file_map: + file_map[device_id] = [] + + file_map[device_id] += [cur_file] + return file_map + + +def GetDeviceLogs(log_filenames, logger): + """Read log files, combine and format. + + Args: + log_filenames: mapping of device_id to sorted list of file paths + logger: logger handle for logging events + + Returns: + list of formatted device logs, one for each device. + """ + device_logs = [] + + for device, device_files in log_filenames.items(): + logger.debug('%s: %s', device, str(device_files)) + device_file_lines = [] + for cur_file in device_files: + with open(cur_file) as f: + device_file_lines += [(cur_file, f.read().splitlines())] + combined_lines = CombineLogFiles(device_file_lines, logger) + # Prepend each line with a short unique ID so it's easy to see + # when the device changes. We don't use the start of the device + # ID because it can be the same among devices. Example lines: + # AB324: foo + # AB324: blah + device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)] + return device_logs + + +def ShutdownLogcatMonitor(base_dir, logger): + """Attempts to shutdown adb_logcat_monitor and blocks while waiting.""" + try: + monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID') + with open(monitor_pid_path) as f: + monitor_pid = int(f.readline()) + + logger.info('Sending SIGTERM to %d', monitor_pid) + os.kill(monitor_pid, signal.SIGTERM) + i = 0 + while True: + time.sleep(.2) + if not os.path.exists(monitor_pid_path): + return + if not os.path.exists('/proc/%d' % monitor_pid): + logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid) + return + logger.info('Waiting for logcat process to terminate.') + i += 1 + if i >= 10: + logger.warning('Monitor pid did not terminate. Continuing anyway.') + return + + except (ValueError, IOError, OSError): + logger.exception('Error signaling logcat monitor - continuing') + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument( + '--output-path', + help='Output file path (if unspecified, prints to stdout)') + parser.add_argument('log_dir') + args = parser.parse_args(argv) + base_dir = args.log_dir + + log_stringio = io.StringIO() + logger = logging.getLogger('LogcatPrinter') + logger.setLevel(LOG_LEVEL) + sh = logging.StreamHandler(log_stringio) + sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s' + ' %(message)s')) + logger.addHandler(sh) + + if args.output_path: + if not os.path.exists(os.path.dirname(args.output_path)): + logger.warning('Output dir %s doesn\'t exist. Creating it.', + os.path.dirname(args.output_path)) + os.makedirs(os.path.dirname(args.output_path)) + output_file = open(args.output_path, 'w') + logger.info( + 'Dumping logcat to local file %s. If running in a build, ' + 'this file will likely will be uploaded to google storage ' + 'in a later step. It can be downloaded from there.', args.output_path) + else: + output_file = sys.stdout + + try: + # Wait at least 5 seconds after base_dir is created before printing. + # + # The idea is that 'adb logcat > file' output consists of 2 phases: + # 1 Dump all the saved logs to the file + # 2 Stream log messages as they are generated + # + # We want to give enough time for phase 1 to complete. There's no + # good method to tell how long to wait, but it usually only takes a + # second. On most bots, this code path won't occur at all, since + # adb_logcat_monitor.py command will have spawned more than 5 seconds + # prior to called this shell script. + try: + sleep_time = 5 - (time.time() - os.path.getctime(base_dir)) + except OSError: + sleep_time = 5 + if sleep_time > 0: + logger.warning('Monitor just started? Sleeping %.1fs', sleep_time) + time.sleep(sleep_time) + + assert os.path.exists(base_dir), '%s does not exist' % base_dir + ShutdownLogcatMonitor(base_dir, logger) + separator = '\n' + '*' * 80 + '\n\n' + for log in GetDeviceLogs(FindLogFiles(base_dir), logger): + output_file.write(log) + output_file.write(separator) + with open(os.path.join(base_dir, 'eventlog')) as f: + output_file.write('\nLogcat Monitor Event Log\n') + output_file.write(f.read()) + except: + logger.exception('Unexpected exception') + + logger.info('Done.') + sh.flush() + output_file.write('\nLogcat Printer Event Log\n') + output_file.write(log_stringio.getvalue()) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/adb_profile_chrome b/android/adb_profile_chrome new file mode 100755 index 000000000000..27ecb6d7cf36 --- /dev/null +++ b/android/adb_profile_chrome @@ -0,0 +1,9 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Start / stop profiling in chrome. +CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult +exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome "$@" diff --git a/android/adb_profile_chrome_startup b/android/adb_profile_chrome_startup new file mode 100755 index 000000000000..bb639b9d39eb --- /dev/null +++ b/android/adb_profile_chrome_startup @@ -0,0 +1,9 @@ +#!/bin/bash +# +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Start / stop profiling for chrome startup. +CATAPULT_DIR="$(dirname "$0")"/../../third_party/catapult +exec "${CATAPULT_DIR}"/systrace/bin/adb_profile_chrome_startup "$@" diff --git a/android/adb_reverse_forwarder.py b/android/adb_reverse_forwarder.py new file mode 100755 index 000000000000..c78f44d2ec2b --- /dev/null +++ b/android/adb_reverse_forwarder.py @@ -0,0 +1,87 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Command line tool for forwarding ports from a device to the host. + +Allows an Android device to connect to services running on the host machine, +i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder| +to be built. +""" + +import argparse +import sys +import time + +import devil_chromium + +from devil.android import device_denylist +from devil.android import device_utils +from devil.android import forwarder +from devil.utils import run_tests_helper + +from pylib import constants + + +def main(argv): + parser = argparse.ArgumentParser( + usage='Usage: %(prog)s [options] device_port ' + 'host_port [device_port_2 host_port_2] ...', + description=__doc__) + parser.add_argument( + '-v', '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level (multiple times for more)') + parser.add_argument( + '--device', + help='Serial number of device we should use.') + parser.add_argument('--denylist-file', help='Device denylist JSON file.') + parser.add_argument( + '--debug', + action='store_const', + const='Debug', + dest='build_type', + default='Release', + help='DEPRECATED: use --output-directory instead.') + parser.add_argument( + '--output-directory', + help='Path to the root build directory.') + parser.add_argument( + 'ports', + nargs='+', + type=int, + help='Port pair to reverse forward.') + + args = parser.parse_args(argv) + run_tests_helper.SetLogLevel(args.verbose_count) + + if len(args.ports) < 2 or len(args.ports) % 2: + parser.error('Need even number of port pairs') + + port_pairs = list(zip(args.ports[::2], args.ports[1::2])) + + if args.build_type: + constants.SetBuildType(args.build_type) + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + devil_chromium.Initialize(output_directory=constants.GetOutDirectory()) + + denylist = (device_denylist.Denylist(args.denylist_file) + if args.denylist_file else None) + device = device_utils.DeviceUtils.HealthyDevices(denylist=denylist, + device_arg=args.device)[0] + try: + forwarder.Forwarder.Map(port_pairs, device) + while True: + time.sleep(60) + except KeyboardInterrupt: + sys.exit(0) + finally: + forwarder.Forwarder.UnmapAllDevicePorts(device) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/adb_system_webengine_command_line b/android/adb_system_webengine_command_line new file mode 100755 index 000000000000..2dce6d25fd18 --- /dev/null +++ b/android/adb_system_webengine_command_line @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_system_webengine_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_system_webengine_command_line "" + +exec $(dirname $0)/adb_command_line.py --name weblayer-command-line "$@" diff --git a/android/adb_system_webview_command_line b/android/adb_system_webview_command_line new file mode 100755 index 000000000000..6b9fb4ee70c6 --- /dev/null +++ b/android/adb_system_webview_command_line @@ -0,0 +1,16 @@ +#!/bin/bash +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# If no flags are given, prints the current content shell flags. +# +# Otherwise, the given flags are used to REPLACE (not modify) the content shell +# flags. For example: +# adb_system_webview_command_line --enable-webgl +# +# To remove all content shell flags, pass an empty string for the flags: +# adb_system_webview_command_line "" + +exec $(dirname $0)/adb_command_line.py --name webview-command-line "$@" diff --git a/android/android_only_explicit_jni_exports.lst b/android/android_only_explicit_jni_exports.lst new file mode 100644 index 000000000000..eb7b1f2bed68 --- /dev/null +++ b/android/android_only_explicit_jni_exports.lst @@ -0,0 +1,13 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Linker script that exports only JNI_OnLoad. +# Should be used for libraries that do explicit JNI registration. + +{ + global: + JNI_OnLoad; + local: + *; +}; diff --git a/android/android_only_jni_exports.lst b/android/android_only_jni_exports.lst new file mode 100644 index 000000000000..c44cb9b9232f --- /dev/null +++ b/android/android_only_jni_exports.lst @@ -0,0 +1,13 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Linker script that exports only symbols required for JNI to work. + +{ + global: + JNI_OnLoad; + Java_*; + local: + *; +}; diff --git a/android/apk_operations.py b/android/apk_operations.py new file mode 100755 index 000000000000..2838240e7d46 --- /dev/null +++ b/android/apk_operations.py @@ -0,0 +1,2147 @@ +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Using colorama.Fore/Back/Style members +# pylint: disable=no-member + + +import argparse +import collections +import json +import logging +import os +import pipes +import posixpath +import random +import re +import shlex +import shutil +import subprocess +import sys +import tempfile +import textwrap +import zipfile + +import adb_command_line +import devil_chromium +from devil import devil_env +from devil.android import apk_helper +from devil.android import device_errors +from devil.android import device_utils +from devil.android import flag_changer +from devil.android.sdk import adb_wrapper +from devil.android.sdk import build_tools +from devil.android.sdk import intent +from devil.android.sdk import version_codes +from devil.utils import run_tests_helper + +_DIR_SOURCE_ROOT = os.path.normpath( + os.path.join(os.path.dirname(__file__), '..', '..')) +_JAVA_HOME = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current') + +with devil_env.SysPath( + os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')): + import colorama + +from incremental_install import installer +from pylib import constants +from pylib.symbols import deobfuscator +from pylib.utils import simpleperf +from pylib.utils import app_bundle_utils + +with devil_env.SysPath( + os.path.join(_DIR_SOURCE_ROOT, 'build', 'android', 'gyp')): + import bundletool + +BASE_MODULE = 'base' + + +def _Colorize(text, style=''): + return (style + + text + + colorama.Style.RESET_ALL) + + +def _InstallApk(devices, apk, install_dict): + def install(device): + if install_dict: + installer.Install(device, install_dict, apk=apk, permissions=[]) + else: + device.Install(apk, permissions=[], allow_downgrade=True, reinstall=True) + + logging.info('Installing %sincremental apk.', '' if install_dict else 'non-') + device_utils.DeviceUtils.parallel(devices).pMap(install) + + +# A named tuple containing the information needed to convert a bundle into +# an installable .apks archive. +# Fields: +# bundle_path: Path to input bundle file. +# bundle_apk_path: Path to output bundle .apks archive file. +# aapt2_path: Path to aapt2 tool. +# keystore_path: Path to keystore file. +# keystore_password: Password for the keystore file. +# keystore_alias: Signing key name alias within the keystore file. +# system_image_locales: List of Chromium locales to include in system .apks. +BundleGenerationInfo = collections.namedtuple( + 'BundleGenerationInfo', + 'bundle_path,bundle_apks_path,aapt2_path,keystore_path,keystore_password,' + 'keystore_alias,system_image_locales') + + +def _GenerateBundleApks(info, + output_path=None, + minimal=False, + minimal_sdk_version=None, + mode=None, + optimize_for=None): + """Generate an .apks archive from a bundle on demand. + + Args: + info: A BundleGenerationInfo instance. + output_path: Path of output .apks archive. + minimal: Create the minimal set of apks possible (english-only). + minimal_sdk_version: When minimal=True, use this sdkVersion. + mode: Build mode, either None, or one of app_bundle_utils.BUILD_APKS_MODES. + optimize_for: Override split config, either None, or one of + app_bundle_utils.OPTIMIZE_FOR_OPTIONS. + """ + logging.info('Generating .apks file') + app_bundle_utils.GenerateBundleApks( + info.bundle_path, + # Store .apks file beside the .aab file by default so that it gets cached. + output_path or info.bundle_apks_path, + info.aapt2_path, + info.keystore_path, + info.keystore_password, + info.keystore_alias, + system_image_locales=info.system_image_locales, + mode=mode, + minimal=minimal, + minimal_sdk_version=minimal_sdk_version, + optimize_for=optimize_for) + + +def _InstallBundle(devices, apk_helper_instance, modules, fake_modules): + + def Install(device): + device.Install(apk_helper_instance, + permissions=[], + modules=modules, + fake_modules=fake_modules, + allow_downgrade=True, + reinstall=True) + + # Basic checks for |modules| and |fake_modules|. + # * |fake_modules| cannot include 'base'. + # * If |fake_modules| is given, ensure |modules| includes 'base'. + # * They must be disjoint (checked by device.Install). + modules_set = set(modules) if modules else set() + fake_modules_set = set(fake_modules) if fake_modules else set() + if BASE_MODULE in fake_modules_set: + raise Exception('\'-f {}\' is disallowed.'.format(BASE_MODULE)) + if fake_modules_set and BASE_MODULE not in modules_set: + raise Exception( + '\'-f FAKE\' must be accompanied by \'-m {}\''.format(BASE_MODULE)) + + logging.info('Installing bundle.') + device_utils.DeviceUtils.parallel(devices).pMap(Install) + + +def _UninstallApk(devices, install_dict, package_name): + def uninstall(device): + if install_dict: + installer.Uninstall(device, package_name) + else: + device.Uninstall(package_name) + device_utils.DeviceUtils.parallel(devices).pMap(uninstall) + + +def _IsWebViewProvider(apk_helper_instance): + meta_data = apk_helper_instance.GetAllMetadata() + meta_data_keys = [pair[0] for pair in meta_data] + return 'com.android.webview.WebViewLibrary' in meta_data_keys + + +def _SetWebViewProvider(devices, package_name): + + def switch_provider(device): + if device.build_version_sdk < version_codes.NOUGAT: + logging.error('No need to switch provider on pre-Nougat devices (%s)', + device.serial) + else: + device.SetWebViewImplementation(package_name) + + device_utils.DeviceUtils.parallel(devices).pMap(switch_provider) + + +def _NormalizeProcessName(debug_process_name, package_name): + if not debug_process_name: + debug_process_name = package_name + elif debug_process_name.startswith(':'): + debug_process_name = package_name + debug_process_name + elif '.' not in debug_process_name: + debug_process_name = package_name + ':' + debug_process_name + return debug_process_name + + +def _ResolveActivity(device, package_name, category, action): + # E.g.: + # Activity Resolver Table: + # Schemes: + # http: + # 67e97c0 org.chromium.pkg/.MainActivityfilter c91d43e + # Action: "android.intent.action.VIEW" + # Category: "android.intent.category.DEFAULT" + # Category: "android.intent.category.BROWSABLE" + # Scheme: "http" + # Scheme: "https" + # + # Non-Data Actions: + # android.intent.action.MAIN: + # 67e97c0 org.chromium.pkg/.MainActivity filter 4a34cf9 + # Action: "android.intent.action.MAIN" + # Category: "android.intent.category.LAUNCHER" + lines = device.RunShellCommand(['dumpsys', 'package', package_name], + check_return=True) + + # Extract the Activity Resolver Table: section. + start_idx = next((i for i, l in enumerate(lines) + if l.startswith('Activity Resolver Table:')), None) + if start_idx is None: + if not device.IsApplicationInstalled(package_name): + raise Exception('Package not installed: ' + package_name) + raise Exception('No Activity Resolver Table in:\n' + '\n'.join(lines)) + line_count = next(i for i, l in enumerate(lines[start_idx + 1:]) + if l and not l[0].isspace()) + data = '\n'.join(lines[start_idx:start_idx + line_count]) + + # Split on each Activity entry. + entries = re.split(r'^ [0-9a-f]+ ', data, flags=re.MULTILINE) + + def activity_name_from_entry(entry): + assert entry.startswith(package_name), 'Got: ' + entry + activity_name = entry[len(package_name) + 1:].split(' ', 1)[0] + if activity_name[0] == '.': + activity_name = package_name + activity_name + return activity_name + + # Find the one with the text we want. + category_text = f'Category: "{category}"' + action_text = f'Action: "{action}"' + matched_entries = [ + e for e in entries[1:] if category_text in e and action_text in e + ] + + if not matched_entries: + raise Exception(f'Did not find {category_text}, {action_text} in\n{data}') + if len(matched_entries) > 1: + # When there are multiple matches, look for the one marked as default. + # Necessary for Monochrome, which also has MonochromeLauncherActivity. + default_entries = [ + e for e in matched_entries if 'android.intent.category.DEFAULT' in e + ] + matched_entries = default_entries or matched_entries + + # See if all matches point to the same activity. + activity_names = {activity_name_from_entry(e) for e in matched_entries} + + if len(activity_names) > 1: + raise Exception('Found multiple launcher activities:\n * ' + + '\n * '.join(sorted(activity_names))) + return next(iter(activity_names)) + + +def _LaunchUrl(devices, + package_name, + argv=None, + command_line_flags_file=None, + url=None, + wait_for_java_debugger=False, + debug_process_name=None, + nokill=None): + if argv and command_line_flags_file is None: + raise Exception('This apk does not support any flags.') + + debug_process_name = _NormalizeProcessName(debug_process_name, package_name) + + if url is None: + category = 'android.intent.category.LAUNCHER' + action = 'android.intent.action.MAIN' + else: + category = 'android.intent.category.BROWSABLE' + action = 'android.intent.action.VIEW' + + def launch(device): + activity = _ResolveActivity(device, package_name, category, action) + # --persistent is required to have Settings.Global.DEBUG_APP be set, which + # we currently use to allow reading of flags. https://crbug.com/784947 + if not nokill: + cmd = ['am', 'set-debug-app', '--persistent', debug_process_name] + if wait_for_java_debugger: + cmd[-1:-1] = ['-w'] + # Ignore error since it will fail if apk is not debuggable. + device.RunShellCommand(cmd, check_return=False) + + # The flags are first updated with input args. + if command_line_flags_file: + changer = flag_changer.FlagChanger(device, command_line_flags_file) + flags = [] + if argv: + adb_command_line.CheckBuildTypeSupportsFlags(device, + command_line_flags_file) + flags = shlex.split(argv) + try: + changer.ReplaceFlags(flags) + except device_errors.AdbShellCommandFailedError: + logging.exception('Failed to set flags') + + launch_intent = intent.Intent(action=action, + activity=activity, + data=url, + package=package_name) + logging.info('Sending launch intent for %s', activity) + device.StartActivity(launch_intent) + + device_utils.DeviceUtils.parallel(devices).pMap(launch) + if wait_for_java_debugger: + print('Waiting for debugger to attach to process: ' + + _Colorize(debug_process_name, colorama.Fore.YELLOW)) + + +def _ChangeFlags(devices, argv, command_line_flags_file): + if argv is None: + _DisplayArgs(devices, command_line_flags_file) + else: + flags = shlex.split(argv) + def update(device): + adb_command_line.CheckBuildTypeSupportsFlags(device, + command_line_flags_file) + changer = flag_changer.FlagChanger(device, command_line_flags_file) + changer.ReplaceFlags(flags) + device_utils.DeviceUtils.parallel(devices).pMap(update) + + +def _TargetCpuToTargetArch(target_cpu): + if target_cpu == 'x64': + return 'x86_64' + if target_cpu == 'mipsel': + return 'mips' + return target_cpu + + +def _RunGdb(device, package_name, debug_process_name, pid, output_directory, + target_cpu, port, ide, verbose): + if not pid: + debug_process_name = _NormalizeProcessName(debug_process_name, package_name) + pid = device.GetApplicationPids(debug_process_name, at_most_one=True) + if not pid: + # Attaching gdb makes the app run so slow that it takes *minutes* to start + # up (as of 2018). Better to just fail than to start & attach. + raise Exception('App not running.') + + gdb_script_path = os.path.dirname(__file__) + '/adb_gdb' + cmd = [ + gdb_script_path, + '--package-name=%s' % package_name, + '--output-directory=%s' % output_directory, + '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(), + '--device=%s' % device.serial, + '--pid=%s' % pid, + '--port=%d' % port, + ] + if ide: + cmd.append('--ide') + # Enable verbose output of adb_gdb if it's set for this script. + if verbose: + cmd.append('--verbose') + if target_cpu: + cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu)) + logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd)) + print(_Colorize('All subsequent output is from adb_gdb script.', + colorama.Fore.YELLOW)) + os.execv(gdb_script_path, cmd) + + +def _PrintPerDeviceOutput(devices, results, single_line=False): + for d, result in zip(devices, results): + if not single_line and d is not devices[0]: + sys.stdout.write('\n') + sys.stdout.write( + _Colorize('{} ({}):'.format(d, d.build_description), + colorama.Fore.YELLOW)) + sys.stdout.write(' ' if single_line else '\n') + yield result + + +def _RunMemUsage(devices, package_name, query_app=False): + cmd_args = ['dumpsys', 'meminfo'] + if not query_app: + cmd_args.append('--local') + + def mem_usage_helper(d): + ret = [] + for process in sorted(_GetPackageProcesses(d, package_name)): + meminfo = d.RunShellCommand(cmd_args + [str(process.pid)]) + ret.append((process.name, '\n'.join(meminfo))) + return ret + + parallel_devices = device_utils.DeviceUtils.parallel(devices) + all_results = parallel_devices.pMap(mem_usage_helper).pGet(None) + for result in _PrintPerDeviceOutput(devices, all_results): + if not result: + print('No processes found.') + else: + for name, usage in sorted(result): + print(_Colorize('==== Output of "dumpsys meminfo %s" ====' % name, + colorama.Fore.GREEN)) + print(usage) + + +def _DuHelper(device, path_spec, run_as=None): + """Runs "du -s -k |path_spec|" on |device| and returns parsed result. + + Args: + device: A DeviceUtils instance. + path_spec: The list of paths to run du on. May contain shell expansions + (will not be escaped). + run_as: Package name to run as, or None to run as shell user. If not None + and app is not android:debuggable (run-as fails), then command will be + run as root. + + Returns: + A dict of path->size in KiB containing all paths in |path_spec| that exist + on device. Paths that do not exist are silently ignored. + """ + # Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*} + # 144 /data/data/org.chromium.chrome/cache + # 8 /data/data/org.chromium.chrome/files + # + # du: .*: No such file or directory + + # The -d flag works differently across android version, so use -s instead. + # Without the explicit 2>&1, stderr and stdout get combined at random :(. + cmd_str = 'du -s -k ' + path_spec + ' 2>&1' + lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True, + check_return=False) + output = '\n'.join(lines) + # run-as: Package 'com.android.chrome' is not debuggable + if output.startswith('run-as:'): + # check_return=False needed for when some paths in path_spec do not exist. + lines = device.RunShellCommand(cmd_str, as_root=True, shell=True, + check_return=False) + ret = {} + try: + for line in lines: + # du: .*: No such file or directory + if line.startswith('du:'): + continue + size, subpath = line.split(None, 1) + ret[subpath] = int(size) + return ret + except ValueError: + logging.error('du command was: %s', cmd_str) + logging.error('Failed to parse du output:\n%s', output) + raise + + +def _RunDiskUsage(devices, package_name): + # Measuring dex size is a bit complicated: + # https://source.android.com/devices/tech/dalvik/jit-compiler + # + # For KitKat and below: + # dumpsys package contains: + # dataDir=/data/data/org.chromium.chrome + # codePath=/data/app/org.chromium.chrome-1.apk + # resourcePath=/data/app/org.chromium.chrome-1.apk + # nativeLibraryPath=/data/app-lib/org.chromium.chrome-1 + # To measure odex: + # ls -l /data/dalvik-cache/data@app@org.chromium.chrome-1.apk@classes.dex + # + # For Android L and M (and maybe for N+ system apps): + # dumpsys package contains: + # codePath=/data/app/org.chromium.chrome-1 + # resourcePath=/data/app/org.chromium.chrome-1 + # legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib + # To measure odex: + # # Option 1: + # /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.dex + # /data/dalvik-cache/arm/data@app@org.chromium.chrome-1@base.apk@classes.vdex + # ls -l /data/dalvik-cache/profiles/org.chromium.chrome + # (these profiles all appear to be 0 bytes) + # # Option 2: + # ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex + # + # For Android N+: + # dumpsys package contains: + # dataDir=/data/user/0/org.chromium.chrome + # codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w== + # resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w== + # legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib + # Instruction Set: arm + # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk + # status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f + # ilter=quicken] + # Instruction Set: arm64 + # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk + # status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q + # uicken] + # To measure odex: + # ls -l /data/app/.../oat/arm/base.odex + # ls -l /data/app/.../oat/arm/base.vdex (optional) + # To measure the correct odex size: + # cmd package compile -m speed org.chromium.chrome # For webview + # cmd package compile -m speed-profile org.chromium.chrome # For others + def disk_usage_helper(d): + package_output = '\n'.join(d.RunShellCommand( + ['dumpsys', 'package', package_name], check_return=True)) + # Does not return error when apk is not installed. + if not package_output or 'Unable to find package:' in package_output: + return None + + # Ignore system apks that have updates installed. + package_output = re.sub(r'Hidden system packages:.*?^\b', '', + package_output, flags=re.S | re.M) + + try: + data_dir = re.search(r'dataDir=(.*)', package_output).group(1) + code_path = re.search(r'codePath=(.*)', package_output).group(1) + lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)', + package_output).group(1) + except AttributeError as e: + raise Exception('Error parsing dumpsys output: ' + package_output) from e + + if code_path.startswith('/system'): + logging.warning('Measurement of system image apks can be innacurate') + + compilation_filters = set() + # Match "compilation_filter=value", where a line break can occur at any spot + # (refer to examples above). + awful_wrapping = r'\s*'.join('compilation_filter=') + for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output): + compilation_filters.add(re.sub(r'\s+', '', m.group(1))) + # Starting Android Q, output looks like: + # arm: [status=speed-profile] [reason=install] + for m in re.finditer(r'\[status=(.+?)\]', package_output): + compilation_filters.add(m.group(1)) + compilation_filter = ','.join(sorted(compilation_filters)) + + data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name) + # Measure code_cache separately since it can be large. + code_cache_sizes = {} + code_cache_dir = next( + (k for k in data_dir_sizes if k.endswith('/code_cache')), None) + if code_cache_dir: + data_dir_sizes.pop(code_cache_dir) + code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir, + run_as=package_name) + + apk_path_spec = code_path + if not apk_path_spec.endswith('.apk'): + apk_path_spec += '/*.apk' + apk_sizes = _DuHelper(d, apk_path_spec) + if lib_path.endswith('/lib'): + # Shows architecture subdirectory. + lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path) + else: + lib_sizes = _DuHelper(d, lib_path) + + # Look at all possible locations for odex files. + odex_paths = [] + for apk_path in apk_sizes: + mangled_apk_path = apk_path[1:].replace('/', '@') + apk_basename = posixpath.basename(apk_path)[:-4] + for ext in ('dex', 'odex', 'vdex', 'art'): + # Easier to check all architectures than to determine active ones. + for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'): + odex_paths.append( + '%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext)) + # No app could possibly have more than 6 dex files. + for suffix in ('', '2', '3', '4', '5'): + odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % ( + arch, mangled_apk_path, suffix, ext)) + # This path does not have |arch|, so don't repeat it for every arch. + if arch == 'arm': + odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % ( + mangled_apk_path, suffix)) + + odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths)) + + return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes, + compilation_filter) + + def print_sizes(desc, sizes): + print('%s: %d KiB' % (desc, sum(sizes.values()))) + for path, size in sorted(sizes.items()): + print(' %s: %s KiB' % (path, size)) + + parallel_devices = device_utils.DeviceUtils.parallel(devices) + all_results = parallel_devices.pMap(disk_usage_helper).pGet(None) + for result in _PrintPerDeviceOutput(devices, all_results): + if not result: + print('APK is not installed.') + continue + + (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes, + compilation_filter) = result + total = sum(sum(sizes.values()) for sizes in result[:-1]) + + print_sizes('Apk', apk_sizes) + print_sizes('App Data (non-code cache)', data_dir_sizes) + print_sizes('App Data (code cache)', code_cache_sizes) + print_sizes('Native Libs', lib_sizes) + show_warning = compilation_filter and 'speed' not in compilation_filter + compilation_filter = compilation_filter or 'n/a' + print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes) + if show_warning: + logging.warning('For a more realistic odex size, run:') + logging.warning(' %s compile-dex [speed|speed-profile]', sys.argv[0]) + print('Total: %s KiB (%.1f MiB)' % (total, total / 1024.0)) + + +class _LogcatProcessor: + ParsedLine = collections.namedtuple( + 'ParsedLine', + ['date', 'invokation_time', 'pid', 'tid', 'priority', 'tag', 'message']) + + class NativeStackSymbolizer: + """Buffers lines from native stacks and symbolizes them when done.""" + # E.g.: #06 pc 0x0000d519 /apex/com.android.runtime/lib/libart.so + # E.g.: #01 pc 00180c8d /data/data/.../lib/libbase.cr.so + _STACK_PATTERN = re.compile(r'\s*#\d+\s+(?:pc )?(0x)?[0-9a-f]{8,16}\s') + + def __init__(self, stack_script_context, print_func): + # To symbolize native stacks, we need to pass all lines at once. + self._stack_script_context = stack_script_context + self._print_func = print_func + self._crash_lines_buffer = None + + def _FlushLines(self): + """Prints queued lines after sending them through stack.py.""" + if self._crash_lines_buffer is None: + return + + crash_lines = self._crash_lines_buffer + self._crash_lines_buffer = None + with tempfile.NamedTemporaryFile(mode='w') as f: + f.writelines(x[0].message + '\n' for x in crash_lines) + f.flush() + proc = self._stack_script_context.Popen( + input_file=f.name, stdout=subprocess.PIPE) + lines = proc.communicate()[0].splitlines() + + for i, line in enumerate(lines): + parsed_line, dim = crash_lines[min(i, len(crash_lines) - 1)] + d = parsed_line._asdict() + d['message'] = line + parsed_line = _LogcatProcessor.ParsedLine(**d) + self._print_func(parsed_line, dim) + + def AddLine(self, parsed_line, dim): + # Assume all lines from DEBUG are stacks. + # Also look for "stack-looking" lines to catch manual stack prints. + # It's important to not buffer non-stack lines because stack.py does not + # pass them through. + is_crash_line = parsed_line.tag == 'DEBUG' or (self._STACK_PATTERN.match( + parsed_line.message)) + + if is_crash_line: + if self._crash_lines_buffer is None: + self._crash_lines_buffer = [] + self._crash_lines_buffer.append((parsed_line, dim)) + return + + self._FlushLines() + + self._print_func(parsed_line, dim) + + + # Logcat tags for messages that are generally relevant but are not from PIDs + # associated with the apk. + _ALLOWLISTED_TAGS = { + 'ActivityManager', # Shows activity lifecycle messages. + 'ActivityTaskManager', # More activity lifecycle messages. + 'AndroidRuntime', # Java crash dumps + 'AppZygoteInit', # Android's native application zygote support. + 'DEBUG', # Native crash dump. + } + + # Matches messages only on pre-L (Dalvik) that are spammy and unimportant. + _DALVIK_IGNORE_PATTERN = re.compile('|'.join([ + r'^Added shared lib', + r'^Could not find ', + r'^DexOpt:', + r'^GC_', + r'^Late-enabling CheckJNI', + r'^Link of class', + r'^No JNI_OnLoad found in', + r'^Trying to load lib', + r'^Unable to resolve superclass', + r'^VFY:', + r'^WAIT_', + ])) + + def __init__(self, + device, + package_name, + stack_script_context, + deobfuscate=None, + verbose=False, + exit_on_match=None, + extra_package_names=None): + self._device = device + self._package_name = package_name + self._extra_package_names = extra_package_names or [] + self._verbose = verbose + self._deobfuscator = deobfuscate + if exit_on_match is not None: + self._exit_on_match = re.compile(exit_on_match) + else: + self._exit_on_match = None + self._found_exit_match = False + self._native_stack_symbolizer = _LogcatProcessor.NativeStackSymbolizer( + stack_script_context, self._PrintParsedLine) + # Process ID for the app's main process (with no :name suffix). + self._primary_pid = None + # Set of all Process IDs that belong to the app. + self._my_pids = set() + # Set of all Process IDs that we've parsed at some point. + self._seen_pids = set() + # Start proc 22953:com.google.chromeremotedesktop/ + self._pid_pattern = re.compile(r'Start proc (\d+):{}/'.format(package_name)) + # START u0 {act=android.intent.action.MAIN \ + # cat=[android.intent.category.LAUNCHER] \ + # flg=0x10000000 pkg=com.google.chromeremotedesktop} from uid 2000 + self._start_pattern = re.compile(r'START .*(?:cmp|pkg)=' + package_name) + + self.nonce = 'Chromium apk_operations.py nonce={}'.format(random.random()) + # Holds lines buffered on start-up, before we find our nonce message. + self._initial_buffered_lines = [] + self._UpdateMyPids() + # Give preference to PID reported by "ps" over those found from + # _start_pattern. There can be multiple "Start proc" messages from prior + # runs of the app. + self._found_initial_pid = self._primary_pid is not None + # Retrieve any additional patterns that are relevant for the User. + self._user_defined_highlight = None + user_regex = os.environ.get('CHROMIUM_LOGCAT_HIGHLIGHT') + if user_regex: + self._user_defined_highlight = re.compile(user_regex) + if not self._user_defined_highlight: + print(_Colorize( + 'Rejecting invalid regular expression: {}'.format(user_regex), + colorama.Fore.RED + colorama.Style.BRIGHT)) + + def _UpdateMyPids(self): + # We intentionally do not clear self._my_pids to make sure that the + # ProcessLine method below also includes lines from processes which may + # have already exited. + self._primary_pid = None + for package_name in [self._package_name] + self._extra_package_names: + for process in _GetPackageProcesses(self._device, package_name): + # We take only the first "main" process found in order to account for + # possibly forked() processes. + if ':' not in process.name and self._primary_pid is None: + self._primary_pid = process.pid + self._my_pids.add(process.pid) + + def _GetPidStyle(self, pid, dim=False): + if pid == self._primary_pid: + return colorama.Fore.WHITE + if pid in self._my_pids: + # TODO(wnwen): Use one separate persistent color per process, pop LRU + return colorama.Fore.YELLOW + if dim: + return colorama.Style.DIM + return '' + + def _GetPriorityStyle(self, priority, dim=False): + # pylint:disable=no-self-use + if dim: + return '' + style = colorama.Fore.BLACK + if priority in ('E', 'F'): + style += colorama.Back.RED + elif priority == 'W': + style += colorama.Back.YELLOW + elif priority == 'I': + style += colorama.Back.GREEN + elif priority == 'D': + style += colorama.Back.BLUE + return style + + def _ParseLine(self, line): + tokens = line.split(None, 6) + + def consume_token_or_default(default): + return tokens.pop(0) if len(tokens) > 0 else default + + def consume_integer_token_or_default(default): + if len(tokens) == 0: + return default + + try: + return int(tokens.pop(0)) + except ValueError: + return default + + date = consume_token_or_default('') + invokation_time = consume_token_or_default('') + pid = consume_integer_token_or_default(-1) + tid = consume_integer_token_or_default(-1) + priority = consume_token_or_default('') + tag = consume_token_or_default('') + original_message = consume_token_or_default('') + + # Example: + # 09-19 06:35:51.113 9060 9154 W GCoreFlp: No location... + # 09-19 06:01:26.174 9060 10617 I Auth : [ReflectiveChannelBinder]... + # Parsing "GCoreFlp:" vs "Auth :", we only want tag to contain the word, + # and we don't want to keep the colon for the message. + if tag and tag[-1] == ':': + tag = tag[:-1] + elif len(original_message) > 2: + original_message = original_message[2:] + return self.ParsedLine( + date, invokation_time, pid, tid, priority, tag, original_message) + + def _PrintParsedLine(self, parsed_line, dim=False): + if self._exit_on_match and self._exit_on_match.search(parsed_line.message): + self._found_exit_match = True + + tid_style = colorama.Style.NORMAL + user_match = self._user_defined_highlight and ( + re.search(self._user_defined_highlight, parsed_line.tag) + or re.search(self._user_defined_highlight, parsed_line.message)) + + # Make the main thread bright. + if not dim and parsed_line.pid == parsed_line.tid: + tid_style = colorama.Style.BRIGHT + pid_style = self._GetPidStyle(parsed_line.pid, dim) + msg_style = pid_style if not user_match else (colorama.Fore.GREEN + + colorama.Style.BRIGHT) + # We have to pad before adding color as that changes the width of the tag. + pid_str = _Colorize('{:5}'.format(parsed_line.pid), pid_style) + tid_str = _Colorize('{:5}'.format(parsed_line.tid), tid_style) + tag = _Colorize('{:8}'.format(parsed_line.tag), + pid_style + ('' if dim else colorama.Style.BRIGHT)) + priority = _Colorize(parsed_line.priority, + self._GetPriorityStyle(parsed_line.priority)) + messages = [parsed_line.message] + if self._deobfuscator: + messages = self._deobfuscator.TransformLines(messages) + for message in messages: + message = _Colorize(message, msg_style) + sys.stdout.write('{} {} {} {} {} {}: {}\n'.format( + parsed_line.date, parsed_line.invokation_time, pid_str, tid_str, + priority, tag, message)) + + def _TriggerNonceFound(self): + # Once the nonce is hit, we have confidence that we know which lines + # belong to the current run of the app. Process all of the buffered lines. + if self._primary_pid: + for args in self._initial_buffered_lines: + self._native_stack_symbolizer.AddLine(*args) + self._initial_buffered_lines = None + self.nonce = None + + def FoundExitMatch(self): + return self._found_exit_match + + def ProcessLine(self, line): + if not line or line.startswith('------'): + return + + if self.nonce and self.nonce in line: + self._TriggerNonceFound() + + nonce_found = self.nonce is None + + log = self._ParseLine(line) + if log.pid not in self._seen_pids: + self._seen_pids.add(log.pid) + if nonce_found: + # Update list of owned PIDs each time a new PID is encountered. + self._UpdateMyPids() + + # Search for "Start proc $pid:$package_name/" message. + if not nonce_found: + # Capture logs before the nonce. Start with the most recent "am start". + if self._start_pattern.match(log.message): + self._initial_buffered_lines = [] + + # If we didn't find the PID via "ps", then extract it from log messages. + # This will happen if the app crashes too quickly. + if not self._found_initial_pid: + m = self._pid_pattern.match(log.message) + if m: + # Find the most recent "Start proc" line before the nonce. + # Track only the primary pid in this mode. + # The main use-case is to find app logs when no current PIDs exist. + # E.g.: When the app crashes on launch. + self._primary_pid = m.group(1) + self._my_pids.clear() + self._my_pids.add(m.group(1)) + + owned_pid = log.pid in self._my_pids + if owned_pid and not self._verbose and log.tag == 'dalvikvm': + if self._DALVIK_IGNORE_PATTERN.match(log.message): + return + + if owned_pid or self._verbose or (log.priority == 'F' or # Java crash dump + log.tag in self._ALLOWLISTED_TAGS): + if nonce_found: + self._native_stack_symbolizer.AddLine(log, not owned_pid) + else: + self._initial_buffered_lines.append((log, not owned_pid)) + + +def _RunLogcat(device, + package_name, + stack_script_context, + deobfuscate, + verbose, + exit_on_match=None, + extra_package_names=None): + logcat_processor = _LogcatProcessor(device, + package_name, + stack_script_context, + deobfuscate, + verbose, + exit_on_match=exit_on_match, + extra_package_names=extra_package_names) + device.RunShellCommand(['log', logcat_processor.nonce]) + for line in device.adb.Logcat(logcat_format='threadtime'): + try: + logcat_processor.ProcessLine(line) + if logcat_processor.FoundExitMatch(): + return + except: + sys.stderr.write('Failed to process line: ' + line + '\n') + # Skip stack trace for the common case of the adb server being + # restarted. + if 'unexpected EOF' in line: + sys.exit(1) + raise + + +def _GetPackageProcesses(device, package_name): + my_names = (package_name, package_name + '_zygote') + return [ + p for p in device.ListProcesses(package_name) + if p.name in my_names or p.name.startswith(package_name + ':') + ] + + +def _RunPs(devices, package_name): + parallel_devices = device_utils.DeviceUtils.parallel(devices) + all_processes = parallel_devices.pMap( + lambda d: _GetPackageProcesses(d, package_name)).pGet(None) + for processes in _PrintPerDeviceOutput(devices, all_processes): + if not processes: + print('No processes found.') + else: + proc_map = collections.defaultdict(list) + for p in processes: + proc_map[p.name].append(str(p.pid)) + for name, pids in sorted(proc_map.items()): + print(name, ','.join(pids)) + + +def _RunShell(devices, package_name, cmd): + if cmd: + parallel_devices = device_utils.DeviceUtils.parallel(devices) + outputs = parallel_devices.RunShellCommand( + cmd, run_as=package_name).pGet(None) + for output in _PrintPerDeviceOutput(devices, outputs): + for line in output: + print(line) + else: + adb_path = adb_wrapper.AdbWrapper.GetAdbPath() + cmd = [adb_path, '-s', devices[0].serial, 'shell'] + # Pre-N devices do not support -t flag. + if devices[0].build_version_sdk >= version_codes.NOUGAT: + cmd += ['-t', 'run-as', package_name] + else: + print('Upon entering the shell, run:') + print('run-as', package_name) + print() + os.execv(adb_path, cmd) + + +def _RunCompileDex(devices, package_name, compilation_filter): + cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter, + package_name] + parallel_devices = device_utils.DeviceUtils.parallel(devices) + outputs = parallel_devices.RunShellCommand(cmd, timeout=120).pGet(None) + for output in _PrintPerDeviceOutput(devices, outputs): + for line in output: + print(line) + + +def _RunProfile(device, package_name, host_build_directory, pprof_out_path, + process_specifier, thread_specifier, events, extra_args): + simpleperf.PrepareDevice(device) + device_simpleperf_path = simpleperf.InstallSimpleperf(device, package_name) + with tempfile.NamedTemporaryFile() as fh: + host_simpleperf_out_path = fh.name + + with simpleperf.RunSimpleperf(device, device_simpleperf_path, package_name, + process_specifier, thread_specifier, + events, extra_args, host_simpleperf_out_path): + sys.stdout.write('Profiler is running; press Enter to stop...\n') + sys.stdin.read(1) + sys.stdout.write('Post-processing data...\n') + + simpleperf.ConvertSimpleperfToPprof(host_simpleperf_out_path, + host_build_directory, pprof_out_path) + print(textwrap.dedent(""" + Profile data written to %(s)s. + + To view profile as a call graph in browser: + pprof -web %(s)s + + To print the hottest methods: + pprof -top %(s)s + + pprof has many useful customization options; `pprof --help` for details. + """ % {'s': pprof_out_path})) + + +class _StackScriptContext: + """Maintains temporary files needed by stack.py.""" + + def __init__(self, + output_directory, + apk_path, + bundle_generation_info, + quiet=False): + self._output_directory = output_directory + self._apk_path = apk_path + self._bundle_generation_info = bundle_generation_info + self._staging_dir = None + self._quiet = quiet + + def _CreateStaging(self): + # In many cases, stack decoding requires APKs to map trace lines to native + # libraries. Create a temporary directory, and either unpack a bundle's + # APKS into it, or simply symlink the standalone APK into it. This + # provides an unambiguous set of APK files for the stack decoding process + # to inspect. + logging.debug('Creating stack staging directory') + self._staging_dir = tempfile.mkdtemp() + bundle_generation_info = self._bundle_generation_info + + if bundle_generation_info: + # TODO(wnwen): Use apk_helper instead. + _GenerateBundleApks(bundle_generation_info) + logging.debug('Extracting .apks file') + with zipfile.ZipFile(bundle_generation_info.bundle_apks_path, 'r') as z: + files_to_extract = [ + f for f in z.namelist() if f.endswith('-master.apk') + ] + z.extractall(self._staging_dir, files_to_extract) + elif self._apk_path: + # Otherwise an incremental APK and an empty apks directory is correct. + output = os.path.join(self._staging_dir, os.path.basename(self._apk_path)) + os.symlink(self._apk_path, output) + + def Close(self): + if self._staging_dir: + logging.debug('Clearing stack staging directory') + shutil.rmtree(self._staging_dir) + self._staging_dir = None + + def Popen(self, input_file=None, **kwargs): + if self._staging_dir is None: + self._CreateStaging() + stack_script = os.path.join( + constants.host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH, + 'stack.py') + cmd = [ + stack_script, '--output-directory', self._output_directory, + '--apks-directory', self._staging_dir + ] + if self._quiet: + cmd.append('--quiet') + if input_file: + cmd.append(input_file) + logging.info('Running stack.py') + return subprocess.Popen(cmd, universal_newlines=True, **kwargs) + + +def _GenerateAvailableDevicesMessage(devices): + devices_obj = device_utils.DeviceUtils.parallel(devices) + descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None) + msg = 'Available devices:\n' + for d, desc in zip(devices, descriptions): + msg += ' %s (%s)\n' % (d, desc) + return msg + + +# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here. +def _GenerateMissingAllFlagMessage(devices): + return ('More than one device available. Use --all to select all devices, ' + + 'or use --device to select a device by serial.\n\n' + + _GenerateAvailableDevicesMessage(devices)) + + +def _DisplayArgs(devices, command_line_flags_file): + def flags_helper(d): + changer = flag_changer.FlagChanger(d, command_line_flags_file) + return changer.GetCurrentFlags() + + parallel_devices = device_utils.DeviceUtils.parallel(devices) + outputs = parallel_devices.pMap(flags_helper).pGet(None) + print('Existing flags per-device (via /data/local/tmp/{}):'.format( + command_line_flags_file)) + for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True): + quoted_flags = ' '.join(pipes.quote(f) for f in flags) + print(quoted_flags or 'No flags set.') + + +def _DeviceCachePath(device, output_directory): + file_name = 'device_cache_%s.json' % device.serial + return os.path.join(output_directory, file_name) + + +def _LoadDeviceCaches(devices, output_directory): + if not output_directory: + return + for d in devices: + cache_path = _DeviceCachePath(d, output_directory) + if os.path.exists(cache_path): + logging.debug('Using device cache: %s', cache_path) + with open(cache_path) as f: + d.LoadCacheData(f.read()) + # Delete the cached file so that any exceptions cause it to be cleared. + os.unlink(cache_path) + else: + logging.debug('No cache present for device: %s', d) + + +def _SaveDeviceCaches(devices, output_directory): + if not output_directory: + return + for d in devices: + cache_path = _DeviceCachePath(d, output_directory) + with open(cache_path, 'w') as f: + f.write(d.DumpCacheData()) + logging.info('Wrote device cache: %s', cache_path) + + +class _Command: + name = None + description = None + long_description = None + needs_package_name = False + needs_output_directory = False + needs_apk_helper = False + supports_incremental = False + accepts_command_line_flags = False + accepts_args = False + need_device_args = True + all_devices_by_default = False + calls_exec = False + supports_multiple_devices = True + + def __init__(self, from_wrapper_script, is_bundle, is_test_apk): + self._parser = None + self._from_wrapper_script = from_wrapper_script + self.args = None + self.apk_helper = None + self.additional_apk_helpers = None + self.install_dict = None + self.devices = None + self.is_bundle = is_bundle + self.is_test_apk = is_test_apk + self.bundle_generation_info = None + # Only support incremental install from APK wrapper scripts. + if is_bundle or not from_wrapper_script: + self.supports_incremental = False + + def RegisterBundleGenerationInfo(self, bundle_generation_info): + self.bundle_generation_info = bundle_generation_info + + def _RegisterExtraArgs(self, group): + pass + + def RegisterArgs(self, parser): + subp = parser.add_parser( + self.name, help=self.description, + description=self.long_description or self.description, + formatter_class=argparse.RawDescriptionHelpFormatter) + self._parser = subp + subp.set_defaults(command=self) + if self.need_device_args: + subp.add_argument('--all', + action='store_true', + default=self.all_devices_by_default, + help='Operate on all connected devices.',) + subp.add_argument('-d', + '--device', + action='append', + default=[], + dest='devices', + help='Target device for script to work on. Enter ' + 'multiple times for multiple devices.') + subp.add_argument('-v', + '--verbose', + action='count', + default=0, + dest='verbose_count', + help='Verbose level (multiple times for more)') + group = subp.add_argument_group('%s arguments' % self.name) + + if self.needs_package_name: + # Three cases to consider here, since later code assumes + # self.args.package_name always exists, even if None: + # + # - Called from a bundle wrapper script, the package_name is already + # set through parser.set_defaults(), so don't call add_argument() + # to avoid overriding its value. + # + # - Called from an apk wrapper script. The --package-name argument + # should not appear, but self.args.package_name will be gleaned from + # the --apk-path file later. + # + # - Called directly, then --package-name is required on the command-line. + # + if not self.is_bundle: + group.add_argument( + '--package-name', + help=argparse.SUPPRESS if self._from_wrapper_script else ( + "App's package name.")) + + if self.needs_apk_helper or self.needs_package_name: + # Adding this argument to the subparser would override the set_defaults() + # value set by on the parent parser (even if None). + if not self._from_wrapper_script and not self.is_bundle: + group.add_argument( + '--apk-path', required=self.needs_apk_helper, help='Path to .apk') + + if self.supports_incremental: + group.add_argument('--incremental', + action='store_true', + default=False, + help='Always install an incremental apk.') + group.add_argument('--non-incremental', + action='store_true', + default=False, + help='Always install a non-incremental apk.') + + # accepts_command_line_flags and accepts_args are mutually exclusive. + # argparse will throw if they are both set. + if self.accepts_command_line_flags: + group.add_argument( + '--args', help='Command-line flags. Use = to assign args.') + + if self.accepts_args: + group.add_argument( + '--args', help='Extra arguments. Use = to assign args') + + if not self._from_wrapper_script and self.accepts_command_line_flags: + # Provided by wrapper scripts. + group.add_argument( + '--command-line-flags-file', + help='Name of the command-line flags file') + + self._RegisterExtraArgs(group) + + def _CreateApkHelpers(self, args, incremental_apk_path, install_dict): + """Returns true iff self.apk_helper was created and assigned.""" + if self.apk_helper is None: + if args.apk_path: + self.apk_helper = apk_helper.ToHelper(args.apk_path) + elif incremental_apk_path: + self.install_dict = install_dict + self.apk_helper = apk_helper.ToHelper(incremental_apk_path) + elif self.is_bundle: + _GenerateBundleApks(self.bundle_generation_info) + self.apk_helper = apk_helper.ToHelper( + self.bundle_generation_info.bundle_apks_path) + if args.additional_apk_paths and self.additional_apk_helpers is None: + self.additional_apk_helpers = [ + apk_helper.ToHelper(apk_path) + for apk_path in args.additional_apk_paths + ] + return self.apk_helper is not None + + def ProcessArgs(self, args): + self.args = args + # Ensure these keys always exist. They are set by wrapper scripts, but not + # always added when not using wrapper scripts. + args.__dict__.setdefault('apk_path', None) + args.__dict__.setdefault('incremental_json', None) + + incremental_apk_path = None + install_dict = None + if args.incremental_json and not (self.supports_incremental and + args.non_incremental): + with open(args.incremental_json) as f: + install_dict = json.load(f) + incremental_apk_path = os.path.join(args.output_directory, + install_dict['apk_path']) + if not os.path.exists(incremental_apk_path): + incremental_apk_path = None + + if self.supports_incremental: + if args.incremental and args.non_incremental: + self._parser.error('Must use only one of --incremental and ' + '--non-incremental') + elif args.non_incremental: + if not args.apk_path: + self._parser.error('Apk has not been built.') + elif args.incremental: + if not incremental_apk_path: + self._parser.error('Incremental apk has not been built.') + args.apk_path = None + + if args.apk_path and incremental_apk_path: + self._parser.error('Both incremental and non-incremental apks exist. ' + 'Select using --incremental or --non-incremental') + + + # Gate apk_helper creation with _CreateApkHelpers since for bundles it takes + # a while to unpack the apks file from the aab file, so avoid this slowdown + # for simple commands that don't need apk_helper. + if self.needs_apk_helper: + if not self._CreateApkHelpers(args, incremental_apk_path, install_dict): + self._parser.error('App is not built.') + + if self.needs_package_name and not args.package_name: + if self._CreateApkHelpers(args, incremental_apk_path, install_dict): + args.package_name = self.apk_helper.GetPackageName() + elif self._from_wrapper_script: + self._parser.error('App is not built.') + else: + self._parser.error('One of --package-name or --apk-path is required.') + + self.devices = [] + if self.need_device_args: + abis = None + if self._CreateApkHelpers(args, incremental_apk_path, install_dict): + abis = self.apk_helper.GetAbis() + self.devices = device_utils.DeviceUtils.HealthyDevices( + device_arg=args.devices, + enable_device_files_cache=bool(args.output_directory), + default_retries=0, + abis=abis) + # TODO(agrieve): Device cache should not depend on output directory. + # Maybe put into /tmp? + _LoadDeviceCaches(self.devices, args.output_directory) + + try: + if len(self.devices) > 1: + if not self.supports_multiple_devices: + self._parser.error(device_errors.MultipleDevicesError(self.devices)) + if not args.all and not args.devices: + self._parser.error(_GenerateMissingAllFlagMessage(self.devices)) + # Save cache now if command will not get a chance to afterwards. + if self.calls_exec: + _SaveDeviceCaches(self.devices, args.output_directory) + except: + _SaveDeviceCaches(self.devices, args.output_directory) + raise + + +class _DevicesCommand(_Command): + name = 'devices' + description = 'Describe attached devices.' + all_devices_by_default = True + + def Run(self): + print(_GenerateAvailableDevicesMessage(self.devices)) + + +class _PackageInfoCommand(_Command): + name = 'package-info' + description = 'Show various attributes of this app.' + need_device_args = False + needs_package_name = True + needs_apk_helper = True + + def Run(self): + # Format all (even ints) as strings, to handle cases where APIs return None + print('Package name: "%s"' % self.args.package_name) + print('versionCode: %s' % self.apk_helper.GetVersionCode()) + print('versionName: "%s"' % self.apk_helper.GetVersionName()) + print('minSdkVersion: %s' % self.apk_helper.GetMinSdkVersion()) + print('targetSdkVersion: %s' % self.apk_helper.GetTargetSdkVersion()) + print('Supported ABIs: %r' % self.apk_helper.GetAbis()) + + +class _InstallCommand(_Command): + name = 'install' + description = 'Installs the APK or bundle to one or more devices.' + needs_apk_helper = True + supports_incremental = True + default_modules = [] + + def _RegisterExtraArgs(self, group): + if self.is_bundle: + group.add_argument( + '-m', + '--module', + action='append', + default=self.default_modules, + help='Module to install. Can be specified multiple times.') + group.add_argument( + '-f', + '--fake', + action='append', + default=[], + help='Fake bundle module install. Can be specified multiple times. ' + 'Requires \'-m {0}\' to be given, and \'-f {0}\' is illegal.'.format( + BASE_MODULE)) + # Add even if |self.default_modules| is empty, for consistency. + group.add_argument('--no-module', + action='append', + choices=self.default_modules, + default=[], + help='Module to exclude from default install.') + + def Run(self): + if self.additional_apk_helpers: + for additional_apk_helper in self.additional_apk_helpers: + _InstallApk(self.devices, additional_apk_helper, None) + if self.is_bundle: + modules = list( + set(self.args.module) - set(self.args.no_module) - + set(self.args.fake)) + _InstallBundle(self.devices, self.apk_helper, modules, self.args.fake) + else: + _InstallApk(self.devices, self.apk_helper, self.install_dict) + + +class _UninstallCommand(_Command): + name = 'uninstall' + description = 'Removes the APK or bundle from one or more devices.' + needs_package_name = True + + def Run(self): + _UninstallApk(self.devices, self.install_dict, self.args.package_name) + + +class _SetWebViewProviderCommand(_Command): + name = 'set-webview-provider' + description = ("Sets the device's WebView provider to this APK's " + "package name.") + needs_package_name = True + needs_apk_helper = True + + def Run(self): + if not _IsWebViewProvider(self.apk_helper): + raise Exception('This package does not have a WebViewLibrary meta-data ' + 'tag. Are you sure it contains a WebView implementation?') + _SetWebViewProvider(self.devices, self.args.package_name) + + +class _LaunchCommand(_Command): + name = 'launch' + description = ('Sends a launch intent for the APK or bundle after first ' + 'writing the command-line flags file.') + needs_package_name = True + accepts_command_line_flags = True + all_devices_by_default = True + + def _RegisterExtraArgs(self, group): + group.add_argument('-w', '--wait-for-java-debugger', action='store_true', + help='Pause execution until debugger attaches. Applies ' + 'only to the main process. To have renderers wait, ' + 'use --args="--renderer-wait-for-java-debugger"') + group.add_argument('--debug-process-name', + help='Name of the process to debug. ' + 'E.g. "privileged_process0", or "foo.bar:baz"') + group.add_argument('--nokill', action='store_true', + help='Do not set the debug-app, nor set command-line ' + 'flags. Useful to load a URL without having the ' + 'app restart.') + group.add_argument('url', nargs='?', help='A URL to launch with.') + + def Run(self): + if self.is_test_apk: + raise Exception('Use the bin/run_* scripts to run test apks.') + _LaunchUrl(self.devices, + self.args.package_name, + argv=self.args.args, + command_line_flags_file=self.args.command_line_flags_file, + url=self.args.url, + wait_for_java_debugger=self.args.wait_for_java_debugger, + debug_process_name=self.args.debug_process_name, + nokill=self.args.nokill) + + +class _StopCommand(_Command): + name = 'stop' + description = 'Force-stops the app.' + needs_package_name = True + all_devices_by_default = True + + def Run(self): + device_utils.DeviceUtils.parallel(self.devices).ForceStop( + self.args.package_name) + + +class _ClearDataCommand(_Command): + name = 'clear-data' + descriptions = 'Clears all app data.' + needs_package_name = True + all_devices_by_default = True + + def Run(self): + device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState( + self.args.package_name) + + +class _ArgvCommand(_Command): + name = 'argv' + description = 'Display and optionally update command-line flags file.' + needs_package_name = True + accepts_command_line_flags = True + all_devices_by_default = True + + def Run(self): + _ChangeFlags(self.devices, self.args.args, + self.args.command_line_flags_file) + + +class _GdbCommand(_Command): + name = 'gdb' + description = 'Runs //build/android/adb_gdb with apk-specific args.' + long_description = description + """ + +To attach to a process other than the APK's main process, use --pid=1234. +To list all PIDs, use the "ps" command. + +If no apk process is currently running, sends a launch intent. +""" + needs_package_name = True + needs_output_directory = True + calls_exec = True + supports_multiple_devices = False + + def Run(self): + _RunGdb(self.devices[0], self.args.package_name, + self.args.debug_process_name, self.args.pid, + self.args.output_directory, self.args.target_cpu, self.args.port, + self.args.ide, bool(self.args.verbose_count)) + + def _RegisterExtraArgs(self, group): + pid_group = group.add_mutually_exclusive_group() + pid_group.add_argument('--debug-process-name', + help='Name of the process to attach to. ' + 'E.g. "privileged_process0", or "foo.bar:baz"') + pid_group.add_argument('--pid', + help='The process ID to attach to. Defaults to ' + 'the main process for the package.') + group.add_argument('--ide', action='store_true', + help='Rather than enter a gdb prompt, set up the ' + 'gdb connection and wait for an IDE to ' + 'connect.') + # Same default port that ndk-gdb.py uses. + group.add_argument('--port', type=int, default=5039, + help='Use the given port for the GDB connection') + + +class _LogcatCommand(_Command): + name = 'logcat' + description = 'Runs "adb logcat" with filters relevant the current APK.' + long_description = description + """ + +"Relevant filters" means: + * Log messages from processes belonging to the apk, + * Plus log messages from log tags: ActivityManager|DEBUG, + * Plus fatal logs from any process, + * Minus spamy dalvikvm logs (for pre-L devices). + +Colors: + * Primary process is white + * Other processes (gpu, renderer) are yellow + * Non-apk processes are grey + * UI thread has a bolded Thread-ID + +Java stack traces are detected and deobfuscated (for release builds). + +To disable filtering, (but keep coloring), use --verbose. +""" + needs_package_name = True + supports_multiple_devices = False + + def Run(self): + deobfuscate = None + if self.args.proguard_mapping_path and not self.args.no_deobfuscate: + deobfuscate = deobfuscator.Deobfuscator(self.args.proguard_mapping_path) + + stack_script_context = _StackScriptContext( + self.args.output_directory, + self.args.apk_path, + self.bundle_generation_info, + quiet=True) + + extra_package_names = [] + if self.is_test_apk and self.additional_apk_helpers: + for additional_apk_helper in self.additional_apk_helpers: + extra_package_names.append(additional_apk_helper.GetPackageName()) + + try: + _RunLogcat(self.devices[0], + self.args.package_name, + stack_script_context, + deobfuscate, + bool(self.args.verbose_count), + self.args.exit_on_match, + extra_package_names=extra_package_names) + except KeyboardInterrupt: + pass # Don't show stack trace upon Ctrl-C + finally: + stack_script_context.Close() + if deobfuscate: + deobfuscate.Close() + + def _RegisterExtraArgs(self, group): + if self._from_wrapper_script: + group.add_argument('--no-deobfuscate', action='store_true', + help='Disables ProGuard deobfuscation of logcat.') + else: + group.set_defaults(no_deobfuscate=False) + group.add_argument('--proguard-mapping-path', + help='Path to ProGuard map (enables deobfuscation)') + group.add_argument('--exit-on-match', + help='Exits logcat when a message matches this regex.') + + +class _PsCommand(_Command): + name = 'ps' + description = 'Show PIDs of any APK processes currently running.' + needs_package_name = True + all_devices_by_default = True + + def Run(self): + _RunPs(self.devices, self.args.package_name) + + +class _DiskUsageCommand(_Command): + name = 'disk-usage' + description = 'Show how much device storage is being consumed by the app.' + needs_package_name = True + all_devices_by_default = True + + def Run(self): + _RunDiskUsage(self.devices, self.args.package_name) + + +class _MemUsageCommand(_Command): + name = 'mem-usage' + description = 'Show memory usage of currently running APK processes.' + needs_package_name = True + all_devices_by_default = True + + def _RegisterExtraArgs(self, group): + group.add_argument('--query-app', action='store_true', + help='Do not add --local to "dumpsys meminfo". This will output ' + 'additional metrics (e.g. Context count), but also cause memory ' + 'to be used in order to gather the metrics.') + + def Run(self): + _RunMemUsage(self.devices, self.args.package_name, + query_app=self.args.query_app) + + +class _ShellCommand(_Command): + name = 'shell' + description = ('Same as "adb shell ", but runs as the apk\'s uid ' + '(via run-as). Useful for inspecting the app\'s data ' + 'directory.') + needs_package_name = True + + @property + def calls_exec(self): + return not self.args.cmd + + @property + def supports_multiple_devices(self): + return not self.args.cmd + + def _RegisterExtraArgs(self, group): + group.add_argument( + 'cmd', nargs=argparse.REMAINDER, help='Command to run.') + + def Run(self): + _RunShell(self.devices, self.args.package_name, self.args.cmd) + + +class _CompileDexCommand(_Command): + name = 'compile-dex' + description = ('Applicable only for Android N+. Forces .odex files to be ' + 'compiled with the given compilation filter. To see existing ' + 'filter, use "disk-usage" command.') + needs_package_name = True + all_devices_by_default = True + + def _RegisterExtraArgs(self, group): + group.add_argument( + 'compilation_filter', + choices=['verify', 'quicken', 'space-profile', 'space', + 'speed-profile', 'speed'], + help='For WebView/Monochrome, use "speed". For other apks, use ' + '"speed-profile".') + + def Run(self): + _RunCompileDex(self.devices, self.args.package_name, + self.args.compilation_filter) + + +class _PrintCertsCommand(_Command): + name = 'print-certs' + description = 'Print info about certificates used to sign this APK.' + need_device_args = False + needs_apk_helper = True + + def _RegisterExtraArgs(self, group): + group.add_argument( + '--full-cert', + action='store_true', + help=("Print the certificate's full signature, Base64-encoded. " + "Useful when configuring an Android image's " + "config_webview_packages.xml.")) + + def Run(self): + keytool = os.path.join(_JAVA_HOME, 'bin', 'keytool') + pem_certificate_pattern = re.compile( + r'-+BEGIN CERTIFICATE-+([\r\n0-9A-Za-z+/=]+)-+END CERTIFICATE-+[\r\n]*') + if self.is_bundle: + # Bundles are not signed until converted to .apks. The wrapper scripts + # record which key will be used to sign though. + with tempfile.NamedTemporaryFile() as f: + logging.warning('Bundles are not signed until turned into .apk files.') + logging.warning('Showing signing info based on associated keystore.') + cmd = [ + keytool, '-exportcert', '-keystore', + self.bundle_generation_info.keystore_path, '-storepass', + self.bundle_generation_info.keystore_password, '-alias', + self.bundle_generation_info.keystore_alias, '-file', f.name + ] + subprocess.check_output(cmd, stderr=subprocess.STDOUT) + cmd = [keytool, '-printcert', '-file', f.name] + logging.warning('Running: %s', ' '.join(cmd)) + subprocess.check_call(cmd) + if self.args.full_cert: + # Redirect stderr to hide a keytool warning about using non-standard + # keystore format. + pem_encoded_certificate = subprocess.check_output( + cmd + ['-rfc'], stderr=subprocess.STDOUT).decode() + else: + + def run_apksigner(min_sdk_version): + cmd = [ + build_tools.GetPath('apksigner'), 'verify', '--min-sdk-version', + str(min_sdk_version), '--print-certs-pem', '--verbose', + self.apk_helper.path + ] + logging.warning('Running: %s', ' '.join(cmd)) + env = os.environ.copy() + env['PATH'] = os.path.pathsep.join( + [os.path.join(_JAVA_HOME, 'bin'), + env.get('PATH')]) + # Redirect stderr to hide verification failures (see explanation below). + return subprocess.check_output(cmd, + env=env, + universal_newlines=True, + stderr=subprocess.STDOUT) + + # apksigner's default behavior is nonintuitive: it will print "Verified + # using ...: false" for any scheme which is obsolete for + # the APK's minSdkVersion even if it actually was signed with that scheme + # (ex. it prints "Verified using v1 scheme: false" for Monochrome because + # v1 was obsolete by N). To workaround this, we force apksigner to use the + # lowest possible minSdkVersion. We need to fallback to higher + # minSdkVersions in case the APK fails to verify for that minSdkVersion + # (which means the APK is genuinely not signed with that scheme). These + # SDK values are the highest SDK version before the next scheme is + # available: + versions = [ + version_codes.MARSHMALLOW, # before v2 launched in N + version_codes.OREO_MR1, # before v3 launched in P + version_codes.Q, # before v4 launched in R + version_codes.R, + ] + stdout = None + for min_sdk_version in versions: + try: + stdout = run_apksigner(min_sdk_version) + break + except subprocess.CalledProcessError: + # Doesn't verify with this min-sdk-version, so try again with a higher + # one + continue + if not stdout: + raise RuntimeError('apksigner was not able to verify APK') + + # Separate what the '--print-certs' flag would output vs. the additional + # signature output included by '--print-certs-pem'. The additional PEM + # output is only printed when self.args.full_cert is specified. + verification_hash_info = pem_certificate_pattern.sub('', stdout) + print(verification_hash_info) + if self.args.full_cert: + m = pem_certificate_pattern.search(stdout) + if not m: + raise Exception('apksigner did not print a certificate') + pem_encoded_certificate = m.group(0) + + + if self.args.full_cert: + m = pem_certificate_pattern.search(pem_encoded_certificate) + if not m: + raise Exception( + 'Unable to parse certificate:\n{}'.format(pem_encoded_certificate)) + signature = re.sub(r'[\r\n]+', '', m.group(1)) + print() + print('Full Signature:') + print(signature) + + +class _ProfileCommand(_Command): + name = 'profile' + description = ('Run the simpleperf sampling CPU profiler on the currently-' + 'running APK. If --args is used, the extra arguments will be ' + 'passed on to simpleperf; otherwise, the following default ' + 'arguments are used: -g -f 1000 -o /data/local/tmp/perf.data') + needs_package_name = True + needs_output_directory = True + supports_multiple_devices = False + accepts_args = True + + def _RegisterExtraArgs(self, group): + group.add_argument( + '--profile-process', default='browser', + help=('Which process to profile. This may be a process name or pid ' + 'such as you would get from running `%s ps`; or ' + 'it can be one of (browser, renderer, gpu).' % sys.argv[0])) + group.add_argument( + '--profile-thread', default=None, + help=('(Optional) Profile only a single thread. This may be either a ' + 'thread ID such as you would get by running `adb shell ps -t` ' + '(pre-Oreo) or `adb shell ps -e -T` (Oreo and later); or it may ' + 'be one of (io, compositor, main, render), in which case ' + '--profile-process is also required. (Note that "render" thread ' + 'refers to a thread in the browser process that manages a ' + 'renderer; to profile the main thread of the renderer process, ' + 'use --profile-thread=main).')) + group.add_argument('--profile-output', default='profile.pb', + help='Output file for profiling data') + group.add_argument('--profile-events', default='cpu-cycles', + help=('A comma separated list of perf events to capture ' + '(e.g. \'cpu-cycles,branch-misses\'). Run ' + '`simpleperf list` on your device to see available ' + 'events.')) + + def Run(self): + extra_args = shlex.split(self.args.args or '') + _RunProfile(self.devices[0], self.args.package_name, + self.args.output_directory, self.args.profile_output, + self.args.profile_process, self.args.profile_thread, + self.args.profile_events, extra_args) + + +class _RunCommand(_InstallCommand, _LaunchCommand, _LogcatCommand): + name = 'run' + description = 'Install, launch, and show logcat (when targeting one device).' + all_devices_by_default = False + supports_multiple_devices = True + + def _RegisterExtraArgs(self, group): + _InstallCommand._RegisterExtraArgs(self, group) + _LaunchCommand._RegisterExtraArgs(self, group) + _LogcatCommand._RegisterExtraArgs(self, group) + group.add_argument('--no-logcat', action='store_true', + help='Install and launch, but do not enter logcat.') + + def Run(self): + if self.is_test_apk: + raise Exception('Use the bin/run_* scripts to run test apks.') + logging.warning('Installing...') + _InstallCommand.Run(self) + logging.warning('Sending launch intent...') + _LaunchCommand.Run(self) + if len(self.devices) == 1 and not self.args.no_logcat: + logging.warning('Entering logcat...') + _LogcatCommand.Run(self) + + +class _BuildBundleApks(_Command): + name = 'build-bundle-apks' + description = ('Build the .apks archive from an Android app bundle, and ' + 'optionally copy it to a specific destination.') + need_device_args = False + + def _RegisterExtraArgs(self, group): + group.add_argument( + '--output-apks', required=True, help='Destination path for .apks file.') + group.add_argument( + '--minimal', + action='store_true', + help='Build .apks archive that targets the bundle\'s minSdkVersion and ' + 'contains only english splits. It still contains optional splits.') + group.add_argument( + '--sdk-version', help='The sdkVersion to build the .apks for.') + group.add_argument( + '--build-mode', + choices=app_bundle_utils.BUILD_APKS_MODES, + help='Specify which type of APKs archive to build. "default" ' + 'generates regular splits, "universal" generates an archive with a ' + 'single universal APK, "system" generates an archive with a system ' + 'image APK, while "system_compressed" generates a compressed system ' + 'APK, with an additional stub APK for the system image.') + group.add_argument( + '--optimize-for', + choices=app_bundle_utils.OPTIMIZE_FOR_OPTIONS, + help='Override split configuration.') + + def Run(self): + _GenerateBundleApks( + self.bundle_generation_info, + output_path=self.args.output_apks, + minimal=self.args.minimal, + minimal_sdk_version=self.args.sdk_version, + mode=self.args.build_mode, + optimize_for=self.args.optimize_for) + + +class _ManifestCommand(_Command): + name = 'dump-manifest' + description = 'Dump the android manifest as XML, to stdout.' + need_device_args = False + needs_apk_helper = True + + def Run(self): + if self.is_bundle: + sys.stdout.write( + bundletool.RunBundleTool([ + 'dump', 'manifest', '--bundle', + self.bundle_generation_info.bundle_path + ])) + else: + apkanalyzer = os.path.join(_DIR_SOURCE_ROOT, 'third_party', 'android_sdk', + 'public', 'cmdline-tools', 'latest', 'bin', + 'apkanalyzer') + subprocess.check_call( + [apkanalyzer, 'manifest', 'print', self.apk_helper.path]) + + +class _StackCommand(_Command): + name = 'stack' + description = 'Decodes an Android stack.' + need_device_args = False + + def _RegisterExtraArgs(self, group): + group.add_argument( + 'file', + nargs='?', + help='File to decode. If not specified, stdin is processed.') + + def Run(self): + context = _StackScriptContext(self.args.output_directory, + self.args.apk_path, + self.bundle_generation_info) + try: + proc = context.Popen(input_file=self.args.file) + if proc.wait(): + raise Exception('stack script returned {}'.format(proc.returncode)) + finally: + context.Close() + + +# Shared commands for regular APKs and app bundles. +_COMMANDS = [ + _DevicesCommand, + _PackageInfoCommand, + _InstallCommand, + _UninstallCommand, + _SetWebViewProviderCommand, + _LaunchCommand, + _StopCommand, + _ClearDataCommand, + _ArgvCommand, + _GdbCommand, + _LogcatCommand, + _PsCommand, + _DiskUsageCommand, + _MemUsageCommand, + _ShellCommand, + _CompileDexCommand, + _PrintCertsCommand, + _ProfileCommand, + _RunCommand, + _StackCommand, + _ManifestCommand, +] + +# Commands specific to app bundles. +_BUNDLE_COMMANDS = [ + _BuildBundleApks, +] + + +def _ParseArgs(parser, from_wrapper_script, is_bundle, is_test_apk): + subparsers = parser.add_subparsers() + command_list = _COMMANDS + (_BUNDLE_COMMANDS if is_bundle else []) + commands = [ + clazz(from_wrapper_script, is_bundle, is_test_apk) + for clazz in command_list + ] + + for command in commands: + if from_wrapper_script or not command.needs_output_directory: + command.RegisterArgs(subparsers) + + # Show extended help when no command is passed. + argv = sys.argv[1:] + if not argv: + argv = ['--help'] + + return parser.parse_args(argv) + + +def _RunInternal(parser, + output_directory=None, + additional_apk_paths=None, + bundle_generation_info=None, + is_test_apk=False): + colorama.init() + parser.set_defaults( + additional_apk_paths=additional_apk_paths, + output_directory=output_directory) + from_wrapper_script = bool(output_directory) + args = _ParseArgs(parser, + from_wrapper_script, + is_bundle=bool(bundle_generation_info), + is_test_apk=is_test_apk) + run_tests_helper.SetLogLevel(args.verbose_count) + if bundle_generation_info: + args.command.RegisterBundleGenerationInfo(bundle_generation_info) + if args.additional_apk_paths: + for path in additional_apk_paths: + if not path or not os.path.exists(path): + raise Exception('Invalid additional APK path "{}"'.format(path)) + args.command.ProcessArgs(args) + args.command.Run() + # Incremental install depends on the cache being cleared when uninstalling. + if args.command.name != 'uninstall': + _SaveDeviceCaches(args.command.devices, output_directory) + + +def Run(output_directory, apk_path, additional_apk_paths, incremental_json, + command_line_flags_file, target_cpu, proguard_mapping_path): + """Entry point for generated wrapper scripts.""" + constants.SetOutputDirectory(output_directory) + devil_chromium.Initialize(output_directory=output_directory) + parser = argparse.ArgumentParser() + exists_or_none = lambda p: p if p and os.path.exists(p) else None + + parser.set_defaults( + command_line_flags_file=command_line_flags_file, + target_cpu=target_cpu, + apk_path=exists_or_none(apk_path), + incremental_json=exists_or_none(incremental_json), + proguard_mapping_path=proguard_mapping_path) + _RunInternal( + parser, + output_directory=output_directory, + additional_apk_paths=additional_apk_paths) + + +def RunForBundle(output_directory, bundle_path, bundle_apks_path, + additional_apk_paths, aapt2_path, keystore_path, + keystore_password, keystore_alias, package_name, + command_line_flags_file, proguard_mapping_path, target_cpu, + system_image_locales, default_modules): + """Entry point for generated app bundle wrapper scripts. + + Args: + output_dir: Chromium output directory path. + bundle_path: Input bundle path. + bundle_apks_path: Output bundle .apks archive path. + additional_apk_paths: Additional APKs to install prior to bundle install. + aapt2_path: Aapt2 tool path. + keystore_path: Keystore file path. + keystore_password: Keystore password. + keystore_alias: Signing key name alias in keystore file. + package_name: Application's package name. + command_line_flags_file: Optional. Name of an on-device file that will be + used to store command-line flags for this bundle. + proguard_mapping_path: Input path to the Proguard mapping file, used to + deobfuscate Java stack traces. + target_cpu: Chromium target CPU name, used by the 'gdb' command. + system_image_locales: List of Chromium locales that should be included in + system image APKs. + default_modules: List of modules that are installed in addition to those + given by the '-m' switch. + """ + constants.SetOutputDirectory(output_directory) + devil_chromium.Initialize(output_directory=output_directory) + bundle_generation_info = BundleGenerationInfo( + bundle_path=bundle_path, + bundle_apks_path=bundle_apks_path, + aapt2_path=aapt2_path, + keystore_path=keystore_path, + keystore_password=keystore_password, + keystore_alias=keystore_alias, + system_image_locales=system_image_locales) + _InstallCommand.default_modules = default_modules + + parser = argparse.ArgumentParser() + parser.set_defaults( + package_name=package_name, + command_line_flags_file=command_line_flags_file, + proguard_mapping_path=proguard_mapping_path, + target_cpu=target_cpu) + _RunInternal( + parser, + output_directory=output_directory, + additional_apk_paths=additional_apk_paths, + bundle_generation_info=bundle_generation_info) + + +def RunForTestApk(*, output_directory, package_name, test_apk_path, + test_apk_json, proguard_mapping_path, additional_apk_paths): + """Entry point for generated test apk wrapper scripts. + + This is intended to make commands like logcat (with proguard deobfuscation) + available. The run_* scripts should be used to actually run tests. + + Args: + output_dir: Chromium output directory path. + package_name: The package name for the test apk. + test_apk_path: The test apk to install. + test_apk_json: The incremental json dict for the test apk. + proguard_mapping_path: Input path to the Proguard mapping file, used to + deobfuscate Java stack traces. + additional_apk_paths: Additional APKs to install. + """ + constants.SetOutputDirectory(output_directory) + devil_chromium.Initialize(output_directory=output_directory) + + parser = argparse.ArgumentParser() + exists_or_none = lambda p: p if p and os.path.exists(p) else None + + parser.set_defaults(apk_path=exists_or_none(test_apk_path), + incremental_json=exists_or_none(test_apk_json), + package_name=package_name, + proguard_mapping_path=proguard_mapping_path) + + _RunInternal(parser, + output_directory=output_directory, + additional_apk_paths=additional_apk_paths, + is_test_apk=True) + + +def main(): + devil_chromium.Initialize() + _RunInternal(argparse.ArgumentParser()) + + +if __name__ == '__main__': + main() diff --git a/android/apk_operations.pydeps b/android/apk_operations.pydeps new file mode 100644 index 000000000000..d20bcf24581e --- /dev/null +++ b/android/apk_operations.pydeps @@ -0,0 +1,112 @@ +# Generated by running: +# build/print_python_deps.py --root build/android --output build/android/apk_operations.pydeps build/android/apk_operations.py +../../third_party/catapult/common/py_utils/py_utils/__init__.py +../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py +../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py +../../third_party/catapult/common/py_utils/py_utils/lock.py +../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py +../../third_party/catapult/dependency_manager/dependency_manager/__init__.py +../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py +../../third_party/catapult/dependency_manager/dependency_manager/base_config.py +../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py +../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py +../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py +../../third_party/catapult/dependency_manager/dependency_manager/manager.py +../../third_party/catapult/dependency_manager/dependency_manager/uploader.py +../../third_party/catapult/devil/devil/__init__.py +../../third_party/catapult/devil/devil/android/__init__.py +../../third_party/catapult/devil/devil/android/apk_helper.py +../../third_party/catapult/devil/devil/android/constants/__init__.py +../../third_party/catapult/devil/devil/android/constants/chrome.py +../../third_party/catapult/devil/devil/android/constants/file_system.py +../../third_party/catapult/devil/devil/android/decorators.py +../../third_party/catapult/devil/devil/android/device_denylist.py +../../third_party/catapult/devil/devil/android/device_errors.py +../../third_party/catapult/devil/devil/android/device_signal.py +../../third_party/catapult/devil/devil/android/device_temp_file.py +../../third_party/catapult/devil/devil/android/device_utils.py +../../third_party/catapult/devil/devil/android/flag_changer.py +../../third_party/catapult/devil/devil/android/install_commands.py +../../third_party/catapult/devil/devil/android/logcat_monitor.py +../../third_party/catapult/devil/devil/android/md5sum.py +../../third_party/catapult/devil/devil/android/ndk/__init__.py +../../third_party/catapult/devil/devil/android/ndk/abis.py +../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../third_party/catapult/devil/devil/android/sdk/aapt.py +../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py +../../third_party/catapult/devil/devil/android/sdk/build_tools.py +../../third_party/catapult/devil/devil/android/sdk/bundletool.py +../../third_party/catapult/devil/devil/android/sdk/intent.py +../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../third_party/catapult/devil/devil/android/sdk/split_select.py +../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../third_party/catapult/devil/devil/android/tools/__init__.py +../../third_party/catapult/devil/devil/android/tools/script_common.py +../../third_party/catapult/devil/devil/base_error.py +../../third_party/catapult/devil/devil/constants/__init__.py +../../third_party/catapult/devil/devil/constants/exit_codes.py +../../third_party/catapult/devil/devil/devil_env.py +../../third_party/catapult/devil/devil/utils/__init__.py +../../third_party/catapult/devil/devil/utils/cmd_helper.py +../../third_party/catapult/devil/devil/utils/host_utils.py +../../third_party/catapult/devil/devil/utils/lazy/__init__.py +../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py +../../third_party/catapult/devil/devil/utils/logging_common.py +../../third_party/catapult/devil/devil/utils/lsusb.py +../../third_party/catapult/devil/devil/utils/parallelizer.py +../../third_party/catapult/devil/devil/utils/reraiser_thread.py +../../third_party/catapult/devil/devil/utils/reset_usb.py +../../third_party/catapult/devil/devil/utils/run_tests_helper.py +../../third_party/catapult/devil/devil/utils/timeout_retry.py +../../third_party/catapult/devil/devil/utils/watchdog_timer.py +../../third_party/catapult/devil/devil/utils/zip_utils.py +../../third_party/catapult/third_party/six/six.py +../../third_party/jinja2/__init__.py +../../third_party/jinja2/_identifier.py +../../third_party/jinja2/async_utils.py +../../third_party/jinja2/bccache.py +../../third_party/jinja2/compiler.py +../../third_party/jinja2/defaults.py +../../third_party/jinja2/environment.py +../../third_party/jinja2/exceptions.py +../../third_party/jinja2/filters.py +../../third_party/jinja2/idtracking.py +../../third_party/jinja2/lexer.py +../../third_party/jinja2/loaders.py +../../third_party/jinja2/nodes.py +../../third_party/jinja2/optimizer.py +../../third_party/jinja2/parser.py +../../third_party/jinja2/runtime.py +../../third_party/jinja2/tests.py +../../third_party/jinja2/utils.py +../../third_party/jinja2/visitor.py +../../third_party/markupsafe/__init__.py +../../third_party/markupsafe/_compat.py +../../third_party/markupsafe/_native.py +../action_helpers.py +../gn_helpers.py +../print_python_deps.py +../zip_helpers.py +adb_command_line.py +apk_operations.py +devil_chromium.py +gyp/bundletool.py +gyp/dex.py +gyp/util/__init__.py +gyp/util/build_utils.py +gyp/util/md5_check.py +gyp/util/resource_utils.py +incremental_install/__init__.py +incremental_install/installer.py +pylib/__init__.py +pylib/constants/__init__.py +pylib/constants/host_paths.py +pylib/symbols/__init__.py +pylib/symbols/deobfuscator.py +pylib/symbols/expensive_line_transformer.py +pylib/utils/__init__.py +pylib/utils/app_bundle_utils.py +pylib/utils/simpleperf.py +pylib/utils/time_profile.py diff --git a/android/apply_shared_preference_file.py b/android/apply_shared_preference_file.py new file mode 100755 index 000000000000..a4aa4994cf79 --- /dev/null +++ b/android/apply_shared_preference_file.py @@ -0,0 +1,50 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Manually applies a shared preference JSON file. + +If needed during automation, use the --shared-prefs-file in test_runner.py +instead. +""" + +import argparse +import sys + +# pylint: disable=ungrouped-imports +from pylib.constants import host_paths +if host_paths.DEVIL_PATH not in sys.path: + sys.path.append(host_paths.DEVIL_PATH) + +from devil.android import device_utils +from devil.android.sdk import shared_prefs +from pylib.utils import shared_preference_utils + + +def main(): + parser = argparse.ArgumentParser( + description='Manually apply shared preference JSON files.') + parser.add_argument('filepaths', nargs='*', + help='Any number of paths to shared preference JSON ' + 'files to apply.') + args = parser.parse_args() + + all_devices = device_utils.DeviceUtils.HealthyDevices() + if not all_devices: + raise RuntimeError('No healthy devices attached') + + for filepath in args.filepaths: + all_settings = shared_preference_utils.ExtractSettingsFromJson(filepath) + for setting in all_settings: + for device in all_devices: + shared_pref = shared_prefs.SharedPrefs( + device, setting['package'], setting['filename'], + use_encrypted_path=setting.get('supports_encrypted_path', False)) + shared_preference_utils.ApplySharedPreferenceSetting( + shared_pref, setting) + + +if __name__ == '__main__': + main() diff --git a/android/asan_symbolize.py b/android/asan_symbolize.py new file mode 100755 index 000000000000..3274b95042f7 --- /dev/null +++ b/android/asan_symbolize.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import collections +import os +import re +import sys + +from pylib import constants +from pylib.constants import host_paths + +# pylint: disable=wrong-import-order +# Uses symbol.py from third_party/android_platform, not python's. +with host_paths.SysPath( + host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH, + position=0): + import symbol + + +_RE_ASAN = re.compile( + r""" + (?P.*?) + (?P\#\S*?) # position of the call in stack. + # escape the char "#" due to the VERBOSE flag. + \s+(\S*?)\s+ + \( # match the char "(". + (?P.*?) # library path. + \+0[xX](?P.*?) # address of the symbol in hex. + # the prefix "0x" is skipped. + \) # match the char ")". + """, re.VERBOSE) + +# This named tuple models a parsed Asan log line. +AsanParsedLine = collections.namedtuple('AsanParsedLine', + 'prefix,library,pos,rel_address') + +# This named tuple models an Asan log line. 'raw' is the raw content +# while 'parsed' is None or an AsanParsedLine instance. +AsanLogLine = collections.namedtuple('AsanLogLine', 'raw,parsed') + +def _ParseAsanLogLine(line): + """Parse line into corresponding AsanParsedLine value, if any, or None.""" + m = re.match(_RE_ASAN, line) + if not m: + return None + return AsanParsedLine(prefix=m.group('prefix'), + library=m.group('lib'), + pos=m.group('pos'), + rel_address=int(m.group('addr'), 16)) + + +def _FindASanLibraries(): + asan_lib_dir = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'third_party', 'llvm-build', + 'Release+Asserts', 'lib') + asan_libs = [] + for src_dir, _, files in os.walk(asan_lib_dir): + asan_libs += [os.path.relpath(os.path.join(src_dir, f)) + for f in files + if f.endswith('.so')] + return asan_libs + + +def _TranslateLibPath(library, asan_libs): + for asan_lib in asan_libs: + if os.path.basename(library) == os.path.basename(asan_lib): + return '/' + asan_lib + # pylint: disable=no-member + return symbol.TranslateLibPath(library) + + +def _PrintSymbolized(asan_input, arch): + """Print symbolized logcat output for Asan symbols. + + Args: + asan_input: list of input lines. + arch: Target CPU architecture. + """ + asan_libs = _FindASanLibraries() + + # Maps library -> [ AsanParsedLine... ] + libraries = collections.defaultdict(list) + + asan_log_lines = [] + for line in asan_input: + line = line.rstrip() + parsed = _ParseAsanLogLine(line) + if parsed: + libraries[parsed.library].append(parsed) + asan_log_lines.append(AsanLogLine(raw=line, parsed=parsed)) + + # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] } + all_symbols = collections.defaultdict(dict) + + for library, items in libraries.items(): + libname = _TranslateLibPath(library, asan_libs) + lib_relative_addrs = set(i.rel_address for i in items) + # pylint: disable=no-member + symbols_by_library = symbol.SymbolInformationForSet(libname, + lib_relative_addrs, + True, + cpu_arch=arch) + if symbols_by_library: + all_symbols[library] = symbols_by_library + + for log_line in asan_log_lines: + m = log_line.parsed + if (m and m.library in all_symbols and + m.rel_address in all_symbols[m.library]): + # NOTE: all_symbols[lib][address] is a never-emtpy list of tuples. + # NOTE: The documentation for SymbolInformationForSet() indicates + # that usually one wants to display the last list item, not the first. + # The code below takes the first, is this the best choice here? + s = all_symbols[m.library][m.rel_address][0] + symbol_name = s[0] + symbol_location = s[1] + print('%s%s %s %s @ \'%s\'' % + (m.prefix, m.pos, hex(m.rel_address), symbol_name, symbol_location)) + else: + print(log_line.raw) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('-l', + '--logcat', + help='File containing adb logcat output with ASan ' + 'stacks. Use stdin if not specified.') + parser.add_argument('--output-directory', + help='Path to the root build directory.') + parser.add_argument('--arch', default='arm', help='CPU architecture name') + args = parser.parse_args() + + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + # Do an up-front test that the output directory is known. + constants.CheckOutputDirectory() + + if args.logcat: + asan_input = open(args.logcat, 'r') + else: + asan_input = sys.stdin + + _PrintSymbolized(asan_input.readlines(), args.arch) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/android/bytecode/BUILD.gn b/android/bytecode/BUILD.gn new file mode 100644 index 000000000000..9478807d78e4 --- /dev/null +++ b/android/bytecode/BUILD.gn @@ -0,0 +1,86 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +java_binary("bytecode_processor") { + main_class = "org.chromium.bytecode.ByteCodeProcessor" + wrapper_script_name = "helper/bytecode_processor" + deps = [ ":bytecode_processor_java" ] +} + +java_library("bytecode_processor_java") { + sources = [ + "java/org/chromium/bytecode/ByteCodeProcessor.java", + "java/org/chromium/bytecode/ClassPathValidator.java", + "java/org/chromium/bytecode/TypeUtils.java", + ] + deps = [ + "//third_party/android_deps:org_ow2_asm_asm_java", + "//third_party/android_deps:org_ow2_asm_asm_util_java", + ] + enable_bytecode_checks = false +} + +# A bytecode rewriter that replaces all calls to +# `FragmentActivity Fragment.getActivity()` with +# `Activity Fragment.getActivity()`. +java_binary("fragment_activity_replacer") { + main_class = "org.chromium.bytecode.FragmentActivityReplacer" + deps = [ ":fragment_activity_replacer_java" ] + wrapper_script_name = "helper/fragment_activity_replacer" +} + +# A bytecode rewriter that replaces all calls to +# `FragmentActivity Fragment.getActivity()` with +# `Activity Fragment.getActivity()` followed by a cast to FragmentActivity. +# Prefer :fragment_activity_replacer. This rewriter should only be used for +# libraries that rely on getActivity() returning a FragmentActivity *and* are +# not going to be used in an app that contains multiple copies of the AndroidX +# Fragment library (i.e. WebLayer). +java_binary("fragment_activity_replacer_single_androidx") { + main_class = "org.chromium.bytecode.FragmentActivityReplacer" + deps = [ ":fragment_activity_replacer_java" ] + wrapper_script_name = "helper/fragment_activity_replacer_single_androidx" + wrapper_script_args = [ "--single-androidx" ] +} + +java_library("fragment_activity_replacer_java") { + visibility = [ ":*" ] + sources = [ + "java/org/chromium/bytecode/ByteCodeRewriter.java", + "java/org/chromium/bytecode/FragmentActivityReplacer.java", + ] + deps = [ + "//third_party/android_deps:org_ow2_asm_asm_commons_java", + "//third_party/android_deps:org_ow2_asm_asm_java", + "//third_party/android_deps:org_ow2_asm_asm_util_java", + ] +} + +java_binary("trace_event_adder") { + main_class = "org.chromium.bytecode.TraceEventAdder" + deps = [ ":trace_event_adder_java" ] + wrapper_script_name = "helper/trace_event_adder" +} + +java_library("trace_event_adder_java") { + visibility = [ ":*" ] + sources = [ + "java/org/chromium/bytecode/ByteCodeRewriter.java", + "java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java", + "java/org/chromium/bytecode/MethodCheckerClassAdapter.java", + "java/org/chromium/bytecode/MethodDescription.java", + "java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java", + "java/org/chromium/bytecode/TraceEventAdder.java", + "java/org/chromium/bytecode/TraceEventAdderClassAdapter.java", + "java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java", + ] + deps = [ + ":bytecode_processor_java", + "//third_party/android_deps:org_ow2_asm_asm_commons_java", + "//third_party/android_deps:org_ow2_asm_asm_java", + "//third_party/android_deps:org_ow2_asm_asm_util_java", + ] +} diff --git a/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java b/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java new file mode 100644 index 000000000000..48624914717d --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/ByteCodeProcessor.java @@ -0,0 +1,167 @@ +// Copyright 2017 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import org.objectweb.asm.ClassReader; + +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +/** + * Java application that takes in an input jar, performs a series of bytecode + * transformations, and generates an output jar. + */ +class ByteCodeProcessor { + private static final String CLASS_FILE_SUFFIX = ".class"; + private static final int BUFFER_SIZE = 16384; + private static boolean sVerbose; + private static boolean sIsPrebuilt; + private static ClassLoader sDirectClassPathClassLoader; + private static ClassLoader sFullClassPathClassLoader; + private static Set sFullClassPathJarPaths; + private static Set sMissingClassesAllowlist; + private static Map sJarToGnTarget; + private static ClassPathValidator sValidator; + + private static Void processEntry(ZipEntry entry, byte[] data) { + ClassReader reader = new ClassReader(data); + if (sIsPrebuilt) { + sValidator.validateFullClassPath( + reader, sFullClassPathClassLoader, sMissingClassesAllowlist); + } else { + sValidator.validateDirectClassPath(reader, sDirectClassPathClassLoader, + sFullClassPathClassLoader, sFullClassPathJarPaths, sMissingClassesAllowlist, + sVerbose); + } + return null; + } + + private static void process(String gnTarget, String inputJarPath) + throws ExecutionException, InterruptedException { + ExecutorService executorService = + Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); + try (ZipInputStream inputStream = new ZipInputStream( + new BufferedInputStream(new FileInputStream(inputJarPath)))) { + while (true) { + ZipEntry entry = inputStream.getNextEntry(); + if (entry == null) { + break; + } + byte[] data = readAllBytes(inputStream); + executorService.submit(() -> processEntry(entry, data)); + } + executorService.shutdown(); // This is essential in order to avoid waiting infinitely. + executorService.awaitTermination(1, TimeUnit.HOURS); + } catch (IOException e) { + throw new RuntimeException(e); + } + + if (sValidator.hasErrors()) { + sValidator.printAll(gnTarget, sJarToGnTarget); + System.exit(1); + } + } + + private static byte[] readAllBytes(InputStream inputStream) throws IOException { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + int numRead = 0; + byte[] data = new byte[BUFFER_SIZE]; + while ((numRead = inputStream.read(data, 0, data.length)) != -1) { + buffer.write(data, 0, numRead); + } + return buffer.toByteArray(); + } + + /** + * Loads a list of jars and returns a ClassLoader capable of loading all classes found in the + * given jars. + */ + static ClassLoader loadJars(Collection paths) { + URL[] jarUrls = new URL[paths.size()]; + int i = 0; + for (String path : paths) { + try { + jarUrls[i++] = new File(path).toURI().toURL(); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + } + return new URLClassLoader(jarUrls); + } + + /** + * Extracts a length-encoded list of strings from the arguments, and adds them to |out|. Returns + * the new "next index" to be processed. + */ + private static int parseListArgument(String[] args, int index, Collection out) { + int argLength = Integer.parseInt(args[index++]); + out.addAll(Arrays.asList(Arrays.copyOfRange(args, index, index + argLength))); + return index + argLength; + } + + public static void main(String[] args) throws ClassPathValidator.ClassNotLoadedException, + ExecutionException, InterruptedException { + // Invoke this script using //build/android/gyp/bytecode_processor.py + int currIndex = 0; + String gnTarget = args[currIndex++]; + String inputJarPath = args[currIndex++]; + sVerbose = args[currIndex++].equals("--verbose"); + sIsPrebuilt = args[currIndex++].equals("--is-prebuilt"); + + sMissingClassesAllowlist = new HashSet<>(); + currIndex = parseListArgument(args, currIndex, sMissingClassesAllowlist); + + ArrayList sdkJarPaths = new ArrayList<>(); + currIndex = parseListArgument(args, currIndex, sdkJarPaths); + + ArrayList directClassPathJarPaths = new ArrayList<>(); + directClassPathJarPaths.add(inputJarPath); + directClassPathJarPaths.addAll(sdkJarPaths); + currIndex = parseListArgument(args, currIndex, directClassPathJarPaths); + sDirectClassPathClassLoader = loadJars(directClassPathJarPaths); + + ArrayList fullClassPathJarPaths = new ArrayList<>(); + currIndex = parseListArgument(args, currIndex, fullClassPathJarPaths); + ArrayList gnTargets = new ArrayList<>(); + parseListArgument(args, currIndex, gnTargets); + sJarToGnTarget = new HashMap<>(); + assert fullClassPathJarPaths.size() == gnTargets.size(); + for (int i = 0; i < fullClassPathJarPaths.size(); ++i) { + sJarToGnTarget.put(fullClassPathJarPaths.get(i), gnTargets.get(i)); + } + + // Load all jars that are on the classpath for the input jar for analyzing class + // hierarchy. + sFullClassPathJarPaths = new HashSet<>(); + sFullClassPathJarPaths.add(inputJarPath); + sFullClassPathJarPaths.addAll(sdkJarPaths); + sFullClassPathJarPaths.addAll(fullClassPathJarPaths); + sFullClassPathClassLoader = loadJars(sFullClassPathJarPaths); + sFullClassPathJarPaths.removeAll(directClassPathJarPaths); + + sValidator = new ClassPathValidator(); + process(gnTarget, inputJarPath); + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java b/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java new file mode 100644 index 000000000000..b97f87dada38 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/ByteCodeRewriter.java @@ -0,0 +1,116 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; + +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import java.util.zip.ZipOutputStream; + +/** + * Base class for scripts that perform bytecode modifications on a jar file. + */ +public abstract class ByteCodeRewriter { + private static final String CLASS_FILE_SUFFIX = ".class"; + + public void rewrite(File inputJar, File outputJar) throws IOException { + if (!inputJar.exists()) { + throw new FileNotFoundException("Input jar not found: " + inputJar.getPath()); + } + + try (InputStream inputStream = new BufferedInputStream(new FileInputStream(inputJar)); + OutputStream outputStream = new FileOutputStream(outputJar)) { + processZip(inputStream, outputStream); + } + } + + /** Returns true if the class at the given path in the archive should be rewritten. */ + protected abstract boolean shouldRewriteClass(String classPath); + + /** + * Returns true if the class at the given {@link ClassReader} should be rewritten. + */ + protected boolean shouldRewriteClass(ClassReader classReader) { + return true; + } + + /** + * Returns the ClassVisitor that should be used to modify the bytecode of class at the given + * path in the archive. + */ + protected abstract ClassVisitor getClassVisitorForClass( + String classPath, ClassVisitor delegate); + + private void processZip(InputStream inputStream, OutputStream outputStream) { + try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream); + ZipInputStream zipInputStream = new ZipInputStream(inputStream)) { + ZipEntry entry; + while ((entry = zipInputStream.getNextEntry()) != null) { + // Get the uncompressed contents of the current zip entry and wrap in an input + // stream. This is done because ZipInputStreams can't be reset so they can only be + // read once, and classes that don't need rewriting need to be read twice, first to + // parse and then to copy. + byte[] currentEntryBytes = zipInputStream.readAllBytes(); + ByteArrayInputStream currentEntryInputStream = + new ByteArrayInputStream(currentEntryBytes); + ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream(); + boolean handled = processClassEntry(entry, currentEntryInputStream, outputBuffer); + + ZipEntry newEntry = new ZipEntry(entry.getName()); + newEntry.setTime(entry.getTime()); + zipOutputStream.putNextEntry(newEntry); + if (handled) { + zipOutputStream.write(outputBuffer.toByteArray(), 0, outputBuffer.size()); + } else { + // processClassEntry may have advanced currentEntryInputStream, so reset it to + // copy zip entry contents unmodified. + currentEntryInputStream.reset(); + currentEntryInputStream.transferTo(zipOutputStream); + } + zipOutputStream.closeEntry(); + } + + zipOutputStream.finish(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private boolean processClassEntry( + ZipEntry entry, InputStream inputStream, OutputStream outputStream) { + if (!entry.getName().endsWith(CLASS_FILE_SUFFIX) || !shouldRewriteClass(entry.getName())) { + return false; + } + try { + ClassReader reader = new ClassReader(inputStream); + if (!shouldRewriteClass(reader)) { + return false; + } + ClassWriter writer = new ClassWriter(reader, ClassWriter.COMPUTE_FRAMES); + ClassVisitor classVisitor = getClassVisitorForClass(entry.getName(), writer); + reader.accept(classVisitor, ClassReader.EXPAND_FRAMES); + + writer.visitEnd(); + byte[] classData = writer.toByteArray(); + outputStream.write(classData, 0, classData.length); + return true; + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java b/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java new file mode 100644 index 000000000000..a997bf05a3d3 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/ClassPathValidator.java @@ -0,0 +1,238 @@ +// Copyright 2018 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import org.objectweb.asm.ClassReader; + +import java.io.PrintStream; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.function.Consumer; + +/** + * Checks classpaths (given as ClassLoaders) by reading the constant pool of the class file and + * attempting to load every referenced class. If there are some that are unable to be found, it + * stores a helpful error message if it knows where it might find them, and exits the program if it + * can't find the class with any given classpath. + */ +public class ClassPathValidator { + // Number of warnings to print. + private static final int MAX_MISSING_CLASS_WARNINGS = 10; + // Number of missing classes to show per missing jar. + private static final int MAX_ERRORS_PER_JAR = 2; + // Map of missing .jar -> Missing class -> Classes that failed. + // TreeMap so that error messages have sorted list of jars. + private final Map>> mDirectErrors = + Collections.synchronizedMap(new TreeMap<>()); + // Missing classes we only track the first one for each jar. + // Map of missingClass -> srcClass. + private final Map mMissingClasses = + Collections.synchronizedMap(new TreeMap<>()); + + static class ClassNotLoadedException extends ClassNotFoundException { + private final String mClassName; + + ClassNotLoadedException(String className, Throwable ex) { + super("Couldn't load " + className, ex); + mClassName = className; + } + + public String getClassName() { + return mClassName; + } + } + + private static void validateClass(ClassLoader classLoader, String className) + throws ClassNotLoadedException { + if (className.startsWith("[")) { + // Dealing with an array type which isn't encoded nicely in the constant pool. + // For example, [[Lorg/chromium/Class$1; + className = className.substring(className.lastIndexOf('[') + 1); + if (className.charAt(0) == 'L' && className.endsWith(";")) { + className = className.substring(1, className.length() - 1); + } else { + // Bailing out if we have an non-class array type. + // This could be something like [B + return; + } + } + if (className.matches(".*\\bR(\\$\\w+)?$")) { + // Resources in R.java files are not expected to be valid at this stage in the build. + return; + } + if (className.matches("^libcore\\b.*")) { + // libcore exists on devices, but is not included in the Android sdk as it is a private + // API. + return; + } + if (className.matches("^android\\b.*")) { + // OS APIs sometime pop up in prebuilts. Rather than force prebuilt targets to set a + // proper alternative_android_sdk_dep, just ignore android.* + return; + } + try { + classLoader.loadClass(className.replace('/', '.')); + } catch (ClassNotFoundException e) { + throw new ClassNotLoadedException(className, e); + } catch (NoClassDefFoundError e) { + // We assume that this is caused by another class that is not going to able to be + // loaded, so we will skip this and let that class fail with ClassNotFoundException. + } + } + + /** + * Given a .class file, see if every class referenced in the main class' constant pool can be + * loaded by the given ClassLoader. + * + * @param classReader .class file interface for reading the constant pool. + * @param classLoader classpath you wish to validate. + * @param errorConsumer Called for each missing class. + */ + private static void validateClassPath(ClassReader classReader, ClassLoader classLoader, + Consumer errorConsumer) { + char[] charBuffer = new char[classReader.getMaxStringLength()]; + // According to the Java spec, the constant pool is indexed from 1 to constant_pool_count - + // 1. See https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4 + for (int i = 1; i < classReader.getItemCount(); i++) { + int offset = classReader.getItem(i); + // Class entries correspond to 7 in the constant pool + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.4 + if (offset > 0 && classReader.readByte(offset - 1) == 7) { + try { + validateClass(classLoader, classReader.readUTF8(offset, charBuffer)); + } catch (ClassNotLoadedException e) { + errorConsumer.accept(e); + } + } + } + } + + public void validateFullClassPath(ClassReader classReader, ClassLoader fullClassLoader, + Set missingClassAllowlist) { + // Prebuilts only need transitive dependencies checked, not direct dependencies. + validateClassPath(classReader, fullClassLoader, (e) -> { + if (!missingClassAllowlist.contains(e.getClassName())) { + addMissingError(classReader.getClassName(), e.getClassName()); + } + }); + } + + public void validateDirectClassPath(ClassReader classReader, ClassLoader directClassLoader, + ClassLoader fullClassLoader, Collection jarsOnlyInFullClassPath, + Set missingClassAllowlist, boolean verbose) { + validateClassPath(classReader, directClassLoader, (e) -> { + try { + validateClass(fullClassLoader, e.getClassName()); + } catch (ClassNotLoadedException d) { + if (!missingClassAllowlist.contains(e.getClassName())) { + addMissingError(classReader.getClassName(), e.getClassName()); + } + return; + } + if (verbose) { + System.err.println("Class \"" + e.getClassName() + + "\" not found in direct dependencies," + + " but found in indirect dependiences."); + } + // Iterating through all jars that are in the full classpath but not the direct + // classpath to find which one provides the class we are looking for. + for (String jarPath : jarsOnlyInFullClassPath) { + try { + ClassLoader smallLoader = + ByteCodeProcessor.loadJars(Collections.singletonList(jarPath)); + validateClass(smallLoader, e.getClassName()); + addDirectError(jarPath, classReader.getClassName(), e.getClassName()); + break; + } catch (ClassNotLoadedException f) { + } + } + }); + } + + private void addMissingError(String srcClass, String missingClass) { + mMissingClasses.put(missingClass, srcClass); + } + + private void addDirectError(String jarPath, String srcClass, String missingClass) { + synchronized (mDirectErrors) { + Map> failedClassesByMissingClass = mDirectErrors.get(jarPath); + if (failedClassesByMissingClass == null) { + // TreeMap so that error messages have sorted list of classes. + failedClassesByMissingClass = new TreeMap<>(); + mDirectErrors.put(jarPath, failedClassesByMissingClass); + } + Set failedClasses = failedClassesByMissingClass.get(missingClass); + if (failedClasses == null) { + failedClasses = new TreeSet<>(); + failedClassesByMissingClass.put(missingClass, failedClasses); + } + failedClasses.add(srcClass); + } + } + + public boolean hasErrors() { + return !mDirectErrors.isEmpty() || !mMissingClasses.isEmpty(); + } + + private static void printValidationError( + PrintStream out, String gnTarget, Map> missingClasses) { + out.print(" * "); + out.println(gnTarget); + int i = 0; + // The list of missing classes is non-exhaustive because each class that fails to validate + // reports only the first missing class. + for (Map.Entry> entry : missingClasses.entrySet()) { + String missingClass = entry.getKey(); + Set filesThatNeededIt = entry.getValue(); + out.print(" * "); + if (i == MAX_ERRORS_PER_JAR) { + out.print(String.format( + "And %d more...", missingClasses.size() - MAX_ERRORS_PER_JAR)); + break; + } + out.print(missingClass.replace('/', '.')); + out.print(" (needed by "); + out.print(filesThatNeededIt.iterator().next().replace('/', '.')); + if (filesThatNeededIt.size() > 1) { + out.print(String.format(" and %d more", filesThatNeededIt.size() - 1)); + } + out.println(")"); + i++; + } + } + + public void printAll(String gnTarget, Map jarToGnTarget) { + String streamer = "============================="; + System.err.println(); + System.err.println(streamer + " Dependency Checks Failed " + streamer); + System.err.println("Target: " + gnTarget); + if (!mMissingClasses.isEmpty()) { + int i = 0; + for (Map.Entry entry : mMissingClasses.entrySet()) { + if (++i > MAX_MISSING_CLASS_WARNINGS) { + System.err.println(String.format("... and %d more.", + mMissingClasses.size() - MAX_MISSING_CLASS_WARNINGS)); + break; + } + System.err.println(String.format( + "Class \"%s\" not found on any classpath. Used by class \"%s\"", + entry.getKey(), entry.getValue())); + } + System.err.println(); + } + if (!mDirectErrors.isEmpty()) { + System.err.println("Direct classpath is incomplete. To fix, add deps on:"); + for (Map.Entry>> entry : mDirectErrors.entrySet()) { + printValidationError( + System.err, jarToGnTarget.get(entry.getKey()), entry.getValue()); + } + System.err.println(); + } + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java b/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java new file mode 100644 index 000000000000..3cf3a83d4d75 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/EmptyOverrideGeneratorClassAdapter.java @@ -0,0 +1,104 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ACC_ABSTRACT; +import static org.objectweb.asm.Opcodes.ACC_INTERFACE; +import static org.objectweb.asm.Opcodes.ALOAD; +import static org.objectweb.asm.Opcodes.ASM7; +import static org.objectweb.asm.Opcodes.ILOAD; +import static org.objectweb.asm.Opcodes.INVOKESPECIAL; +import static org.objectweb.asm.Opcodes.IRETURN; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Type; + +import java.util.ArrayList; + +class EmptyOverrideGeneratorClassAdapter extends ClassVisitor { + private final ArrayList mMethodsToGenerate; + private String mSuperClassName; + private boolean mIsAbstract; + private boolean mIsInterface; + + public EmptyOverrideGeneratorClassAdapter( + ClassVisitor cv, ArrayList methodsToGenerate) { + super(ASM7, cv); + mMethodsToGenerate = methodsToGenerate; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + + mSuperClassName = superName; + mIsAbstract = (access & ACC_ABSTRACT) == ACC_ABSTRACT; + mIsInterface = (access & ACC_INTERFACE) == ACC_INTERFACE; + } + + @Override + public void visitEnd() { + if (mIsAbstract || mIsInterface || mMethodsToGenerate.isEmpty()) { + super.visitEnd(); + return; + } + + for (MethodDescription method : mMethodsToGenerate) { + if (!method.shouldCreateOverride) { + continue; + } + + MethodVisitor mv = super.visitMethod( + method.access, method.methodName, method.description, null, null); + writeOverrideCode(mv, method.access, method.methodName, method.description); + } + + super.visitEnd(); + } + + /** + * Writes code to a method to call that method's parent implementation. + *
    +     * {@code
    +     * // Calling writeOverrideCode(mv, ACC_PUBLIC, "doFoo", "(Ljava/lang/String;)I") writes the
    +     * following method body: public int doFoo(String arg){ return super.doFoo(arg);
    +     * }
    +     * }
    +     * 
    + * + * This will be rewritten later by TraceEventAdderClassAdapter to wrap the body in a trace + * event. + */ + private void writeOverrideCode( + MethodVisitor mv, final int access, final String name, final String descriptor) { + assert access != 0; + Type[] argTypes = Type.getArgumentTypes(descriptor); + Type returnType = Type.getReturnType(descriptor); + + mv.visitCode(); + + // Variable 0 contains `this`, load it into the operand stack. + mv.visitVarInsn(ALOAD, 0); + + // Variables 1..n contain all arguments, load them all into the operand stack. + int i = 1; + for (Type arg : argTypes) { + // getOpcode(ILOAD) returns the ILOAD equivalent to the current argument's type. + mv.visitVarInsn(arg.getOpcode(ILOAD), i); + i += arg.getSize(); + } + + // Call the parent class method with the same arguments. + mv.visitMethodInsn(INVOKESPECIAL, mSuperClassName, name, descriptor, false); + + // Return the result. + mv.visitInsn(returnType.getOpcode(IRETURN)); + + mv.visitMaxs(0, 0); + mv.visitEnd(); + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java b/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java new file mode 100644 index 000000000000..0966be0b303b --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/FragmentActivityReplacer.java @@ -0,0 +1,303 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; +import org.objectweb.asm.commons.MethodRemapper; +import org.objectweb.asm.commons.Remapper; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; + +/** + * Java application that modifies Fragment.getActivity() to return an Activity instead of a + * FragmentActivity, and updates any existing getActivity() calls to reference the updated method. + * + * See crbug.com/1144345 for more context. + */ +public class FragmentActivityReplacer extends ByteCodeRewriter { + private static final String GET_ACTIVITY_METHOD_NAME = "getActivity"; + private static final String GET_LIFECYCLE_ACTIVITY_METHOD_NAME = "getLifecycleActivity"; + private static final String NEW_METHOD_DESCRIPTOR = "()Landroid/app/Activity;"; + private static final String OLD_METHOD_DESCRIPTOR = + "()Landroidx/fragment/app/FragmentActivity;"; + private static final String REQUIRE_ACTIVITY_METHOD_NAME = "requireActivity"; + private static final String SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME = + "com.google.android.gms.common.api.internal.SupportLifecycleFragmentImpl"; + + public static void main(String[] args) throws IOException { + // Invoke this script using //build/android/gyp/bytecode_rewriter.py + if (!(args.length == 2 || args.length == 3 && args[0].equals("--single-androidx"))) { + System.err.println("Expected arguments: [--single-androidx] "); + System.exit(1); + } + + if (args.length == 2) { + FragmentActivityReplacer rewriter = new FragmentActivityReplacer(false); + rewriter.rewrite(new File(args[0]), new File(args[1])); + } else { + FragmentActivityReplacer rewriter = new FragmentActivityReplacer(true); + rewriter.rewrite(new File(args[1]), new File(args[2])); + } + } + + private final boolean mSingleAndroidX; + + public FragmentActivityReplacer(boolean singleAndroidX) { + mSingleAndroidX = singleAndroidX; + } + + @Override + protected boolean shouldRewriteClass(String classPath) { + return true; + } + + @Override + protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor delegate) { + ClassVisitor invocationVisitor = new InvocationReplacer(delegate, mSingleAndroidX); + switch (classPath) { + case "androidx/fragment/app/Fragment.class": + return new FragmentClassVisitor(invocationVisitor); + case "com/google/android/gms/common/api/internal/SupportLifecycleFragmentImpl.class": + return new SupportLifecycleFragmentImplClassVisitor(invocationVisitor); + default: + return invocationVisitor; + } + } + + /** + * Updates any Fragment.getActivity/requireActivity() or getLifecycleActivity() calls to call + * the replaced method. + */ + private static class InvocationReplacer extends ClassVisitor { + /** + * A ClassLoader that will resolve R classes to Object. + * + * R won't be in our classpath, and we don't access any information about them, so resolving + * it to a dummy value is fine. + */ + private static class ResourceStubbingClassLoader extends ClassLoader { + @Override + protected Class findClass(String name) throws ClassNotFoundException { + if (name.matches(".*\\.R(\\$.+)?")) { + return Object.class; + } + return super.findClass(name); + } + } + + private final boolean mSingleAndroidX; + private final ClassLoader mClassLoader; + + private InvocationReplacer(ClassVisitor baseVisitor, boolean singleAndroidX) { + super(Opcodes.ASM7, baseVisitor); + mSingleAndroidX = singleAndroidX; + mClassLoader = new ResourceStubbingClassLoader(); + } + + @Override + public MethodVisitor visitMethod( + int access, String name, String descriptor, String signature, String[] exceptions) { + MethodVisitor base = super.visitMethod(access, name, descriptor, signature, exceptions); + return new MethodVisitor(Opcodes.ASM7, base) { + @Override + public void visitMethodInsn(int opcode, String owner, String name, + String descriptor, boolean isInterface) { + // Change the return type of getActivity and replaceActivity. + if (isActivityGetterInvocation(opcode, owner, name, descriptor)) { + super.visitMethodInsn( + opcode, owner, name, NEW_METHOD_DESCRIPTOR, isInterface); + if (mSingleAndroidX) { + super.visitTypeInsn( + Opcodes.CHECKCAST, "androidx/fragment/app/FragmentActivity"); + } + } else if (isDowncastableFragmentActivityMethodInvocation( + opcode, owner, name, descriptor)) { + // Replace FragmentActivity.foo() with Activity.foo() to fix cases where the + // above code changed the getActivity return type. See the + // isDowncastableFragmentActivityMethodInvocation documentation for details. + super.visitMethodInsn( + opcode, "android/app/Activity", name, descriptor, isInterface); + } else { + super.visitMethodInsn(opcode, owner, name, descriptor, isInterface); + } + } + + private boolean isActivityGetterInvocation( + int opcode, String owner, String name, String descriptor) { + boolean isFragmentGetActivity = name.equals(GET_ACTIVITY_METHOD_NAME) + && descriptor.equals(OLD_METHOD_DESCRIPTOR) + && isFragmentSubclass(owner); + boolean isFragmentRequireActivity = name.equals(REQUIRE_ACTIVITY_METHOD_NAME) + && descriptor.equals(OLD_METHOD_DESCRIPTOR) + && isFragmentSubclass(owner); + boolean isSupportLifecycleFragmentImplGetLifecycleActivity = + name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME) + && descriptor.equals(OLD_METHOD_DESCRIPTOR) + && owner.equals(SUPPORT_LIFECYCLE_FRAGMENT_IMPL_BINARY_NAME); + return (opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL) + && (isFragmentGetActivity || isFragmentRequireActivity + || isSupportLifecycleFragmentImplGetLifecycleActivity); + } + + /** + * Returns true if the given method belongs to FragmentActivity, and also exists on + * Activity. + * + * The Java code `requireActivity().getClassLoader()` will compile to the following + * bytecode: + * aload_0 + * // Method requireActivity:()Landroid/app/Activity; + * invokevirtual #n + * // Method androidx/fragment/app/FragmentActivity.getClassLoader:()LClassLoader; + * invokevirtual #m + * + * The second invokevirtual instruction doesn't typecheck because the + * requireActivity() return type was changed from FragmentActivity to Activity. Note + * that this is only an issue when validating the bytecode on the JVM, not in + * Dalvik, so while the above code works on device, it fails in robolectric tests. + * + * To fix the example above, we'd replace the second invokevirtual call with a call + * to android/app/Activity.getClassLoader:()Ljava/lang/ClassLoader. In general, any + * call to FragmentActivity.foo, where foo also exists on Activity, will be replaced + * with a call to Activity.foo. Activity.foo will still resolve to + * FragmentActivity.foo at runtime, while typechecking in robolectric tests. + */ + private boolean isDowncastableFragmentActivityMethodInvocation( + int opcode, String owner, String name, String descriptor) { + // Return if this isn't an invoke instruction on a FragmentActivity. + if (!(opcode == Opcodes.INVOKEVIRTUAL || opcode == Opcodes.INVOKESPECIAL) + || !owner.equals("androidx/fragment/app/FragmentActivity")) { + return false; + } + try { + // Check if the method exists in Activity. + Class activity = mClassLoader.loadClass("android.app.Activity"); + for (Method activityMethod : activity.getMethods()) { + if (activityMethod.getName().equals(name) + && Type.getMethodDescriptor(activityMethod) + .equals(descriptor)) { + return true; + } + } + return false; + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + private boolean isFragmentSubclass(String internalType) { + // This doesn't use Class#isAssignableFrom to avoid us needing to load + // AndroidX's Fragment class, which may not be on the classpath. + try { + String binaryName = Type.getObjectType(internalType).getClassName(); + Class clazz = mClassLoader.loadClass(binaryName); + while (clazz != null) { + if (clazz.getName().equals("androidx.fragment.app.Fragment")) { + return true; + } + clazz = clazz.getSuperclass(); + } + return false; + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + }; + } + } + + /** + * Updates the implementation of Fragment.getActivity() and Fragment.requireActivity(). + */ + private static class FragmentClassVisitor extends ClassVisitor { + private FragmentClassVisitor(ClassVisitor baseVisitor) { + super(Opcodes.ASM7, baseVisitor); + } + + @Override + public MethodVisitor visitMethod( + int access, String name, String descriptor, String signature, String[] exceptions) { + // Update the descriptor of getActivity() and requireActivity(). + MethodVisitor baseVisitor; + if (descriptor.equals(OLD_METHOD_DESCRIPTOR) + && (name.equals(GET_ACTIVITY_METHOD_NAME) + || name.equals(REQUIRE_ACTIVITY_METHOD_NAME))) { + // Some Fragments in a Clank library implement an interface that defines an + // `Activity getActivity()` method. Fragment.getActivity() is considered its + // implementation from a typechecking perspective, but javac still generates a + // getActivity() method in these Fragments that call Fragment.getActivity(). This + // isn't an issue when the methods return different types, but after changing + // Fragment.getActivity() to return an Activity, this generated implementation is + // now overriding Fragment's, which it can't do because Fragment.getActivity() is + // final. We make it non-final here to avoid this issue. + baseVisitor = super.visitMethod( + access & ~Opcodes.ACC_FINAL, name, NEW_METHOD_DESCRIPTOR, null, exceptions); + } else { + baseVisitor = super.visitMethod(access, name, descriptor, signature, exceptions); + } + + // Replace getActivity() with `return ContextUtils.activityFromContext(getContext());` + if (name.equals(GET_ACTIVITY_METHOD_NAME) && descriptor.equals(OLD_METHOD_DESCRIPTOR)) { + baseVisitor.visitVarInsn(Opcodes.ALOAD, 0); + baseVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "androidx/fragment/app/Fragment", + "getContext", "()Landroid/content/Context;", false); + baseVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "org/chromium/utils/ContextUtils", + "activityFromContext", "(Landroid/content/Context;)Landroid/app/Activity;", + false); + baseVisitor.visitInsn(Opcodes.ARETURN); + // Since we set COMPUTE_FRAMES, the arguments of visitMaxs are ignored, but calling + // it forces ClassWriter to actually recompute the correct stack/local values. + // Without this call ClassWriter keeps the original stack=0,locals=1 which is wrong. + baseVisitor.visitMaxs(0, 0); + return null; + } + + return new MethodRemapper(baseVisitor, new Remapper() { + @Override + public String mapType(String internalName) { + if (internalName.equals("androidx/fragment/app/FragmentActivity")) { + return "android/app/Activity"; + } + return internalName; + } + }); + } + } + + /** + * Update SupportLifecycleFragmentImpl.getLifecycleActivity(). + */ + private static class SupportLifecycleFragmentImplClassVisitor extends ClassVisitor { + private SupportLifecycleFragmentImplClassVisitor(ClassVisitor baseVisitor) { + super(Opcodes.ASM7, baseVisitor); + } + + @Override + public MethodVisitor visitMethod( + int access, String name, String descriptor, String signature, String[] exceptions) { + // SupportLifecycleFragmentImpl has two getActivity methods: + // 1. public FragmentActivity getLifecycleActivity(): + // This is what you'll see in the source. This delegates to Fragment.getActivity(). + // 2. public Activity getLifecycleActivity(): + // This is generated because the class implements LifecycleFragment, which + // declares this method, and delegates to #1. + // + // Here we change the return type of #1 and delete #2. + if (name.equals(GET_LIFECYCLE_ACTIVITY_METHOD_NAME)) { + if (descriptor.equals(OLD_METHOD_DESCRIPTOR)) { + return super.visitMethod( + access, name, NEW_METHOD_DESCRIPTOR, signature, exceptions); + } + return null; + } + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java b/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java new file mode 100644 index 000000000000..6794a77a6745 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/MethodCheckerClassAdapter.java @@ -0,0 +1,144 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.ClassReader.EXPAND_FRAMES; +import static org.objectweb.asm.Opcodes.ACC_ABSTRACT; +import static org.objectweb.asm.Opcodes.ACC_INTERFACE; +import static org.objectweb.asm.Opcodes.ASM7; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; + +/** + * This ClassVisitor verifies that a class and its methods are suitable for rewriting. + * Given a class and a list of methods it performs the following checks: + * 1. Class is subclass of a class that we want to trace. + * 2. Class is not abstract or an interface. + * + * For each method provided in {@code methodsToCheck}: + * If the class overrides the method then we can rewrite it directly. + * If the class doesn't override the method then we can generate an override with {@link + * EmptyOverrideGeneratorClassAdapter}, but first we must check if the parent method is private or + * final using {@link ParentMethodCheckerClassAdapter}. + * + * This adapter modifies the provided method list to indicate which methods should be overridden or + * skipped. + */ +class MethodCheckerClassAdapter extends ClassVisitor { + private static final String VIEW_CLASS_DESCRIPTOR = "android/view/View"; + private static final String ANIMATOR_UPDATE_LISTENER_CLASS_DESCRIPTOR = + "android/animation/ValueAnimator$AnimatorUpdateListener"; + private static final String ANIMATOR_LISTENER_CLASS_DESCRIPTOR = + "android/animation/Animator$AnimatorListener"; + + private final ArrayList mMethodsToCheck; + private final ClassLoader mJarClassLoader; + private String mSuperName; + + public MethodCheckerClassAdapter( + ArrayList methodsToCheck, ClassLoader jarClassLoader) { + super(ASM7); + mMethodsToCheck = methodsToCheck; + mJarClassLoader = jarClassLoader; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + + mSuperName = superName; + + boolean isAbstract = (access & ACC_ABSTRACT) == ACC_ABSTRACT; + boolean isInterface = (access & ACC_INTERFACE) == ACC_INTERFACE; + + if (isAbstract || isInterface || !shouldTraceClass(name)) { + mMethodsToCheck.clear(); + return; + } + } + + @Override + public MethodVisitor visitMethod( + int access, String name, String descriptor, String signature, String[] exceptions) { + if (mMethodsToCheck.isEmpty()) { + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + for (MethodDescription method : mMethodsToCheck) { + if (method.methodName.equals(name) && method.description.equals(descriptor)) { + method.shouldCreateOverride = false; + } + } + + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + @Override + public void visitEnd() { + if (mMethodsToCheck.isEmpty()) { + super.visitEnd(); + return; + } + + boolean areAnyUncheckedMethods = false; + + for (MethodDescription method : mMethodsToCheck) { + if (method.shouldCreateOverride == null) { + areAnyUncheckedMethods = true; + break; + } + } + + if (areAnyUncheckedMethods) { + checkParentClass(mSuperName, mMethodsToCheck, mJarClassLoader); + } + + super.visitEnd(); + } + + private boolean shouldTraceClass(String desc) { + Class clazz = getClass(desc); + return isClassDerivedFrom(clazz, VIEW_CLASS_DESCRIPTOR) + || isClassDerivedFrom(clazz, ANIMATOR_UPDATE_LISTENER_CLASS_DESCRIPTOR) + || isClassDerivedFrom(clazz, ANIMATOR_LISTENER_CLASS_DESCRIPTOR); + } + + private boolean isClassDerivedFrom(Class clazz, String classDescriptor) { + Class superClass = getClass(classDescriptor); + if (clazz == null || superClass == null) return false; + return superClass.isAssignableFrom(clazz); + } + + private Class getClass(String desc) { + try { + return mJarClassLoader.loadClass(desc.replace('/', '.')); + } catch (ClassNotFoundException | NoClassDefFoundError | IllegalAccessError e) { + return null; + } + } + + static void checkParentClass(String superClassName, ArrayList methodsToCheck, + ClassLoader jarClassLoader) { + try { + ClassReader cr = new ClassReader(getClassAsStream(jarClassLoader, superClassName)); + ParentMethodCheckerClassAdapter parentChecker = + new ParentMethodCheckerClassAdapter(methodsToCheck, jarClassLoader); + cr.accept(parentChecker, EXPAND_FRAMES); + } catch (IOException ex) { + // Ignore errors in case class can't be loaded. + } + } + + private static InputStream getClassAsStream(ClassLoader jarClassLoader, String desc) { + return jarClassLoader.getResourceAsStream(desc.replace('.', '/') + ".class"); + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/MethodDescription.java b/android/bytecode/java/org/chromium/bytecode/MethodDescription.java new file mode 100644 index 000000000000..26717c0616b2 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/MethodDescription.java @@ -0,0 +1,20 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +class MethodDescription { + public final String methodName; + public final String description; + public final int access; + public Boolean shouldCreateOverride; + + public MethodDescription(String methodName, String description, int access) { + this.methodName = methodName; + this.description = description; + this.access = access; + // A null value means we haven't checked the method. + this.shouldCreateOverride = null; + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java b/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java new file mode 100644 index 000000000000..4656c34ab5d4 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/ParentMethodCheckerClassAdapter.java @@ -0,0 +1,109 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ACC_FINAL; +import static org.objectweb.asm.Opcodes.ACC_PRIVATE; +import static org.objectweb.asm.Opcodes.ACC_PROTECTED; +import static org.objectweb.asm.Opcodes.ACC_PUBLIC; +import static org.objectweb.asm.Opcodes.ASM7; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; + +import java.util.ArrayList; + +/** + * This ClassVisitor checks if the given class overrides methods on {@code methodsToCheck}, and if + * so it determines whether they can be overridden by a child class. If at the end any unchecked + * methods remain then we recurse on the class's superclass. + */ +class ParentMethodCheckerClassAdapter extends ClassVisitor { + private static final String OBJECT_CLASS_DESCRIPTOR = "java/lang/Object"; + + private final ArrayList mMethodsToCheck; + private final ClassLoader mJarClassLoader; + private String mSuperName; + private boolean mIsCheckingObjectClass; + + public ParentMethodCheckerClassAdapter( + ArrayList methodsToCheck, ClassLoader jarClassLoader) { + super(ASM7); + mMethodsToCheck = methodsToCheck; + mJarClassLoader = jarClassLoader; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + + if (name.equals(OBJECT_CLASS_DESCRIPTOR)) { + mIsCheckingObjectClass = true; + return; + } + + mSuperName = superName; + } + + @Override + public MethodVisitor visitMethod( + int access, String name, String descriptor, String signature, String[] exceptions) { + if (mIsCheckingObjectClass) { + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + for (MethodDescription methodToCheck : mMethodsToCheck) { + if (methodToCheck.shouldCreateOverride != null || !methodToCheck.methodName.equals(name) + || !methodToCheck.description.equals(descriptor)) { + continue; + } + + // This class contains methodToCheck. + boolean isMethodPrivate = (access & ACC_PRIVATE) == ACC_PRIVATE; + boolean isMethodFinal = (access & ACC_FINAL) == ACC_FINAL; + boolean isMethodPackagePrivate = + (access & (ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE)) == 0; + + // If the method is private or final then don't create an override. + methodToCheck.shouldCreateOverride = + !isMethodPrivate && !isMethodFinal && !isMethodPackagePrivate; + } + + return super.visitMethod(access, name, descriptor, signature, exceptions); + } + + @Override + public void visitEnd() { + if (mIsCheckingObjectClass) { + // We support tracing methods that are defined in classes that are derived from View, + // but are not defined in View itself. If we've reached the Object class in the + // hierarchy, it means the method doesn't exist in this hierarchy, so don't override it, + // and stop looking for it. + for (MethodDescription method : mMethodsToCheck) { + if (method.shouldCreateOverride == null) { + method.shouldCreateOverride = false; + } + } + return; + } + + boolean areAnyUncheckedMethods = false; + + for (MethodDescription method : mMethodsToCheck) { + if (method.shouldCreateOverride == null) { + areAnyUncheckedMethods = true; + break; + } + } + + if (areAnyUncheckedMethods) { + MethodCheckerClassAdapter.checkParentClass( + mSuperName, mMethodsToCheck, mJarClassLoader); + } + + super.visitEnd(); + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java b/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java new file mode 100644 index 000000000000..4a8515951f48 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/TraceEventAdder.java @@ -0,0 +1,109 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Opcodes; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; + +/** + * Java application that modifies all implementations of "draw", "onMeasure" and "onLayout" on all + * {@link android.view.View} subclasses to wrap them in trace events. + */ +public class TraceEventAdder extends ByteCodeRewriter { + private final ClassLoader mClassPathJarsClassLoader; + private ArrayList mMethodsToTrace; + + public static void main(String[] args) throws IOException { + // Invoke this script using //build/android/gyp/trace_event_bytecode_rewriter.py + + if (args.length < 2) { + System.err.println("Expected arguments: <':' separated list with N input jar paths> " + + "<':' separated list with N output jar paths>"); + System.exit(1); + } + + String[] inputJars = args[0].split(":"); + String[] outputJars = args[1].split(":"); + + assert inputJars.length + == outputJars.length : "Input and output lists are not the same length. Inputs: " + + inputJars.length + " Outputs: " + outputJars.length; + + // outputJars[n] must be the same as inputJars[n] but with a suffix, validate this. + for (int i = 0; i < inputJars.length; i++) { + File inputJarPath = new File(inputJars[i]); + String inputJarFilename = inputJarPath.getName(); + File outputJarPath = new File(outputJars[i]); + + String inputFilenameNoExtension = + inputJarFilename.substring(0, inputJarFilename.lastIndexOf(".jar")); + + assert outputJarPath.getName().startsWith(inputFilenameNoExtension); + } + + ArrayList classPathJarsPaths = new ArrayList<>(); + classPathJarsPaths.addAll(Arrays.asList(inputJars)); + ClassLoader classPathJarsClassLoader = ByteCodeProcessor.loadJars(classPathJarsPaths); + + TraceEventAdder adder = new TraceEventAdder(classPathJarsClassLoader); + for (int i = 0; i < inputJars.length; i++) { + adder.rewrite(new File(inputJars[i]), new File(outputJars[i])); + } + } + + public TraceEventAdder(ClassLoader classPathJarsClassLoader) { + mClassPathJarsClassLoader = classPathJarsClassLoader; + } + + @Override + protected boolean shouldRewriteClass(String classPath) { + return true; + } + + @Override + protected boolean shouldRewriteClass(ClassReader classReader) { + mMethodsToTrace = new ArrayList<>(Arrays.asList( + // Methods on View.java + new MethodDescription( + "dispatchTouchEvent", "(Landroid/view/MotionEvent;)Z", Opcodes.ACC_PUBLIC), + new MethodDescription("draw", "(Landroid/graphics/Canvas;)V", Opcodes.ACC_PUBLIC), + new MethodDescription("onMeasure", "(II)V", Opcodes.ACC_PROTECTED), + new MethodDescription("onLayout", "(ZIIII)V", Opcodes.ACC_PROTECTED), + // Methods on RecyclerView.java in AndroidX + new MethodDescription("scrollStep", "(II[I)V", 0), + // Methods on Animator.AnimatorListener + new MethodDescription( + "onAnimationStart", "(Landroid/animation/Animator;)V", Opcodes.ACC_PUBLIC), + new MethodDescription( + "onAnimationEnd", "(Landroid/animation/Animator;)V", Opcodes.ACC_PUBLIC), + // Methods on ValueAnimator.AnimatorUpdateListener + new MethodDescription("onAnimationUpdate", "(Landroid/animation/ValueAnimator;)V", + Opcodes.ACC_PUBLIC))); + + // This adapter will modify mMethodsToTrace to indicate which methods already exist in the + // class and which ones need to be overridden. In case the class is not an Android view + // we'll clear the list and skip rewriting. + MethodCheckerClassAdapter methodChecker = + new MethodCheckerClassAdapter(mMethodsToTrace, mClassPathJarsClassLoader); + + classReader.accept(methodChecker, ClassReader.EXPAND_FRAMES); + + return !mMethodsToTrace.isEmpty(); + } + + @Override + protected ClassVisitor getClassVisitorForClass(String classPath, ClassVisitor delegate) { + ClassVisitor chain = new TraceEventAdderClassAdapter(delegate, mMethodsToTrace); + chain = new EmptyOverrideGeneratorClassAdapter(chain, mMethodsToTrace); + + return chain; + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java b/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java new file mode 100644 index 000000000000..f2d03fbcc781 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/TraceEventAdderClassAdapter.java @@ -0,0 +1,47 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ASM7; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; + +import java.util.ArrayList; + +/** + * A ClassVisitor for adding TraceEvent.begin and TraceEvent.end methods to any methods specified in + * a list. + */ +class TraceEventAdderClassAdapter extends ClassVisitor { + private final ArrayList mMethodsToTrace; + private String mShortClassName; + + TraceEventAdderClassAdapter(ClassVisitor visitor, ArrayList methodsToTrace) { + super(ASM7, visitor); + mMethodsToTrace = methodsToTrace; + } + + @Override + public void visit(int version, int access, String name, String signature, String superName, + String[] interfaces) { + super.visit(version, access, name, signature, superName, interfaces); + mShortClassName = name.substring(name.lastIndexOf('/') + 1); + } + + @Override + public MethodVisitor visitMethod(final int access, final String name, String desc, + String signature, String[] exceptions) { + MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions); + + for (MethodDescription method : mMethodsToTrace) { + if (method.methodName.equals(name) && method.description.equals(desc)) { + return new TraceEventAdderMethodAdapter(mv, mShortClassName, name); + } + } + + return mv; + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java b/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java new file mode 100644 index 000000000000..11f2a273c93c --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/TraceEventAdderMethodAdapter.java @@ -0,0 +1,83 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import static org.objectweb.asm.Opcodes.ASM7; +import static org.objectweb.asm.Opcodes.ATHROW; +import static org.objectweb.asm.Opcodes.INVOKESTATIC; +import static org.objectweb.asm.Opcodes.IRETURN; +import static org.objectweb.asm.Opcodes.RETURN; + +import static org.chromium.bytecode.TypeUtils.STRING; +import static org.chromium.bytecode.TypeUtils.VOID; + +import org.objectweb.asm.MethodVisitor; + +/** + * MethodVisitor that wraps all code in TraceEvent.begin and TraceEvent.end calls. TraceEvent.end + * calls are added on all returns and thrown exceptions. + * + * Example: + *
    + *   {@code
    + *      int methodToTrace(String foo){
    + *
    + *        //Line added by rewriter:
    + *        TraceEvent.begin("ClassName.methodToTrace");
    + *
    + *        if(foo == null){
    + *          //Line added by rewriter:
    + *          TraceEvent.end("ClassName.methodToTrace");
    + *
    + *          throw new Exception();
    + *        }
    + *        else if(foo.equals("Two")){
    + *          //Line added by rewriter:
    + *          TraceEvent.end("ClassName.methodToTrace");
    + *
    + *          return 2;
    + *        }
    + *
    + *        //Line added by rewriter:
    + *        TraceEvent.end("ClassName.methodToTrace");
    + *
    + *        return 0;
    + *      }
    + *   }
    + * 
    + * + */ +class TraceEventAdderMethodAdapter extends MethodVisitor { + private static final String TRACE_EVENT_DESCRIPTOR = "org/chromium/base/TraceEvent"; + private static final String TRACE_EVENT_SIGNATURE = TypeUtils.getMethodDescriptor(VOID, STRING); + private final String mEventName; + + public TraceEventAdderMethodAdapter( + MethodVisitor methodVisitor, String shortClassName, String methodName) { + super(ASM7, methodVisitor); + + mEventName = shortClassName + "." + methodName; + } + + @Override + public void visitCode() { + super.visitCode(); + + mv.visitLdcInsn(mEventName); + mv.visitMethodInsn( + INVOKESTATIC, TRACE_EVENT_DESCRIPTOR, "begin", TRACE_EVENT_SIGNATURE, false); + } + + @Override + public void visitInsn(int opcode) { + if ((opcode >= IRETURN && opcode <= RETURN) || opcode == ATHROW) { + mv.visitLdcInsn(mEventName); + mv.visitMethodInsn( + INVOKESTATIC, TRACE_EVENT_DESCRIPTOR, "end", TRACE_EVENT_SIGNATURE, false); + } + + mv.visitInsn(opcode); + } +} diff --git a/android/bytecode/java/org/chromium/bytecode/TypeUtils.java b/android/bytecode/java/org/chromium/bytecode/TypeUtils.java new file mode 100644 index 000000000000..e62a912f8779 --- /dev/null +++ b/android/bytecode/java/org/chromium/bytecode/TypeUtils.java @@ -0,0 +1,87 @@ +// Copyright 2017 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.bytecode; + +import org.objectweb.asm.Type; + +import java.util.HashMap; +import java.util.Map; + +/** + * Utility methods for accessing {@link Type}s Strings. + * + * Useful definitions to keep in mind when using this class: + * Internal name - The fully qualified name for a type with dots replaced by slashes. Not really + * relevant for primitive types. + * Type descriptor - Single letters for primitive types, "L" + internal name + ";" for class types. + * + * The methods in this class accept internal names or primitive type descriptors. + */ +class TypeUtils { + static final String ASSERTION_ERROR = "java/lang/AssertionError"; + static final String ASSET_MANAGER = "android/content/res/AssetManager"; + static final String BUILD_HOOKS = "org/chromium/build/BuildHooks"; + static final String BUILD_HOOKS_ANDROID = "org/chromium/build/BuildHooksAndroid"; + static final String CONFIGURATION = "android/content/res/Configuration"; + static final String CONTEXT = "android/content/Context"; + static final String CONTEXT_WRAPPER = "android/content/ContextWrapper"; + static final String RESOURCES = "android/content/res/Resources"; + static final String STRING = "java/lang/String"; + static final String THEME = "android/content/res/Resources$Theme"; + + static final String BOOLEAN = "Z"; + static final String INT = "I"; + static final String VOID = "V"; + private static final Map PRIMITIVE_DESCRIPTORS; + static { + PRIMITIVE_DESCRIPTORS = new HashMap<>(); + PRIMITIVE_DESCRIPTORS.put(Type.BOOLEAN_TYPE.toString(), Type.BOOLEAN_TYPE); + PRIMITIVE_DESCRIPTORS.put(Type.INT_TYPE.toString(), Type.INT_TYPE); + PRIMITIVE_DESCRIPTORS.put(Type.VOID_TYPE.toString(), Type.VOID_TYPE); + } + + /** + * Returns the full method signature with internal names. + * + * @param methodName Name of the method (ex. "getResources"). + * @param returnType Internal name for the return type. + * @param argumentTypes List of internal names for argument types. + * @return String representation of the method signature. + */ + static String getMethodSignature( + String methodName, String returnType, String... argumentTypes) { + return methodName + getMethodDescriptor(returnType, argumentTypes); + } + + /** + * Builds a method descriptor suitable for use with {@link org.objectweb.asm.MethodVisitor}. + * + * @param returnType Internal name for the return type of the method (primitive or class). + * @param argumentTypes Internal names for the argument types (primitive or class). + * @return The generated method descriptor. + */ + static String getMethodDescriptor(String returnType, String... argumentTypes) { + Type[] typedArguments = new Type[argumentTypes.length]; + for (int i = 0; i < argumentTypes.length; ++i) { + // Argument list should be empty in this case, not V (void). + assert !Type.VOID_TYPE.toString().equals(argumentTypes[i]); + typedArguments[i] = convert(argumentTypes[i]); + } + return Type.getMethodDescriptor(convert(returnType), typedArguments); + } + + /** + * Converts an internal name for a type to a {@link Type}. + * + * @param type Internal name for a type (primitive or class). + * @return The resulting Type. + */ + private static Type convert(String type) { + if (PRIMITIVE_DESCRIPTORS.containsKey(type)) { + return PRIMITIVE_DESCRIPTORS.get(type); + } + return Type.getObjectType(type); + } +} diff --git a/android/chromium-debug.keystore b/android/chromium-debug.keystore new file mode 100644 index 0000000000000000000000000000000000000000..67eb0aa34c5af88603ee1e2dedf4bfee3e33be2c GIT binary patch literal 2223 zcmchYc{J1u8^`B2W)NoVyHFI_X32;dTgWo@EhRH!$U2s65fXEe^}Q_0rg12rYiw;?Gs z{=LDH7{WJuK^9r$<#WrWU`ewUg2U>BD9xv`AwniOw(5~UwTvs;VjI1_tSa^$CuH}Q z1eecLmKwrMW_6c;7UVWA)e=Q?zZ-?Tth>`;lwp)#s5Pfy`Pb`)0k=f-jm7$87#9OI z%~Zup+O#c>o`{*k9W&5mdK!v1Ud4@^unLL>p23U?w=|%e*0Pon;j;9)V_kXVG>L?y zvX1yNf8BDt9p_)VL36U_=L|;GAC8-Ro0hvPMajjm44Dxqy z<8WEMsx$r#-|-CnqapW*5S(1^>}s>IbpmvMxLkg@r~OhO-ZfG z6bfdSFY|G(SrzF}<4_hSOZVcI@lPvO4holvcRgQZybC3t@zdoa-LEmXz(%iX;^HWx@ zI(dyNG_x0lABcVX9;NFXLGh@@xj%6=DLZe0?d;R)`BHugs9xjczH6~_jlw=OMP}31 zLjdv1jr13HJE3tj>($%Y6z-0oKl?@7%T7d7Bm}}Sx$|swBl5jtov%K0-alo{nQvvGM!Gu`;`E;4ZoP-9=2*eJC#AIWS zs;NzUIHB-i6&-drp$oIE;+olP+CKS->zo@XT}*#BKJ`*G4I4-SFICY#`s%$AhoHsR zZ_kKYGI6l5r=pn`-BBr-4HkK*GDzyCOw1=v& z^EvYcvoP6w;rU*PQ_U9%{4r^IV=B`%ZoqEfu}5}8app#M^Pj_IfrbioQ6d!>TMr+! zMz!V6^yz3J7hLEgblT1Zy{^xkTLIGB?=*Fu(9Q^JhdRu<7aeo3A$-BwU%C-H(D9=ofQ%#N^RdZ)QPj5|1{?m$kwE7v(V zvq1$$4iarb1l~NoQd9n{e70xniX@cV7r=ywu!#R6K1YCT&? z(C{4L=hx}R+a`*^DPsENE7iMM*cxGbQItY+ENLSmiBuXw7cv-opfSuodf1*}JKM%@ zE-I3x(c~ZhqrFAzh-tf$OQ>SLZ)p~sjG(lgC3x*>isd<1<^}L4bq@M(p$6N2S`vCj z2ZJ;Kfp|QTRG!mFD%hC80|vlgh?9es84|{OKqK8ceFL`d$9-XjphWhi$>R>;73Xsg zqlAUWhW(fD|3dhFAp*Y;!Cwf^{|Hi98iqn64NypwK2i^(?Z=9ZWQRR09smUVv!VTy*>4B{m87SlXgLMto3G}hrME=Bj-=EtU+vEh?Q#rm z4JA%#XY(Q<=jQw(M}T*gCkY~EhxFm=hWufU<}p6wYKnC;sMXf?gg=hsv#b!-{uA*| z)lHTzz0G!kk7VoER#sJGZCCE2KEv4T6vAQnDtgh8XEW*cgHT>l-xQkqt` zmqDPNGdtKf`|4Mu#o^k}(jlgfd3`!&^U#Q>{6Ql}>E50*7auJZoi@6wrQI@q%XooM tRxx5&F~gtfV%`+}Bzpa0aL`OR>U@K&{F-oP)^j+W$;?<{kNOz|{|%@*$JGD; literal 0 HcmV?d00001 diff --git a/android/chromium_annotations.flags b/android/chromium_annotations.flags new file mode 100644 index 000000000000..e3f7afa3f29d --- /dev/null +++ b/android/chromium_annotations.flags @@ -0,0 +1,79 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Contains flags related to annotations in //build/android that can be safely +# shared with Cronet, and thus would be appropriate for third-party apps to +# include. + +# Keep all annotation related attributes that can affect runtime +-keepattributes RuntimeVisible*Annotations +-keepattributes AnnotationDefault + +# Keep the annotations, because if we don't, the ProGuard rules that use them +# will not be respected. These classes then show up in our final dex, which we +# do not want - see crbug.com/628226. +-keep @interface org.chromium.base.annotations.AccessedByNative +-keep @interface org.chromium.base.annotations.CalledByNative +-keep @interface org.chromium.base.annotations.CalledByNativeUnchecked +-keep @interface org.chromium.build.annotations.DoNotInline +-keep @interface org.chromium.build.annotations.UsedByReflection +-keep @interface org.chromium.build.annotations.IdentifierNameString + +# Keeps for class level annotations. +-keep,allowaccessmodification @org.chromium.build.annotations.UsedByReflection class ** {} + +# Keeps for method level annotations. +-keepclasseswithmembers,allowaccessmodification class ** { + @org.chromium.base.annotations.AccessedByNative ; +} +-keepclasseswithmembers,includedescriptorclasses,allowaccessmodification class ** { + @org.chromium.base.annotations.CalledByNative ; +} +-keepclasseswithmembers,includedescriptorclasses,allowaccessmodification class ** { + @org.chromium.base.annotations.CalledByNativeUnchecked ; +} +-keepclasseswithmembers,allowaccessmodification class ** { + @org.chromium.build.annotations.UsedByReflection ; +} +-keepclasseswithmembers,allowaccessmodification class ** { + @org.chromium.build.annotations.UsedByReflection ; +} + +# Never inline classes, methods, or fields with this annotation, but allow +# shrinking and obfuscation. +# Relevant to fields when they are needed to store strong references to objects +# that are held as weak references by native code. +-if @org.chromium.build.annotations.DoNotInline class * { + *** *(...); +} +-keep,allowobfuscation,allowaccessmodification class <1> { + *** <2>(...); +} +-keepclassmembers,allowobfuscation,allowaccessmodification class * { + @org.chromium.build.annotations.DoNotInline ; +} +-keepclassmembers,allowobfuscation,allowaccessmodification class * { + @org.chromium.build.annotations.DoNotInline ; +} + +-alwaysinline class * { + @org.chromium.build.annotations.AlwaysInline *; +} + +# Keep all logs (Log.VERBOSE = 2). R8 does not allow setting to 0. +-maximumremovedandroidloglevel 1 class ** { + @org.chromium.build.annotations.DoNotStripLogs ; +} +-maximumremovedandroidloglevel 1 @org.chromium.build.annotations.DoNotStripLogs class ** { + ; +} + +# Never merge classes horizontally or vertically with this annotation. +# Relevant to classes being used as a key in maps or sets. +-keep,allowaccessmodification,allowobfuscation,allowshrinking @org.chromium.build.annotations.DoNotClassMerge class * + +# Mark members annotated with IdentifierNameString as identifier name strings +-identifiernamestring class * { + @org.chromium.build.annotations.IdentifierNameString *; +} diff --git a/android/convert_dex_profile.py b/android/convert_dex_profile.py new file mode 100755 index 000000000000..13a48edfdeb6 --- /dev/null +++ b/android/convert_dex_profile.py @@ -0,0 +1,569 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import collections +import functools +import logging +import re +import subprocess +import sys + +DEX_CLASS_NAME_RE = re.compile(r'\'L(?P[^;]+);\'') +DEX_METHOD_NAME_RE = re.compile(r'\'(?P[^\']+)\'') +DEX_METHOD_TYPE_RE = re.compile( # type descriptor method signature re + r'\'' + r'\(' + r'(?P[^)]*)' + r'\)' + r'(?P[^\']+)' + r'\'') +DEX_METHOD_LINE_NR_RE = re.compile(r'line=(?P\d+)') + +PROFILE_METHOD_RE = re.compile( + r'(?P[HSP]+)' # tags such as H/S/P + r'(?PL[^;]+;)' # class name in type descriptor format + r'->(?P[^(]+)' + r'\((?P[^)]*)\)' + r'(?P.+)') + +PROGUARD_CLASS_MAPPING_RE = re.compile( + r'(?P[^ ]+)' + r' -> ' + r'(?P[^:]+):') +PROGUARD_METHOD_MAPPING_RE = re.compile( + # line_start:line_end: (optional) + r'((?P\d+):(?P\d+):)?' + r'(?P[^ ]+)' # original method return type + # original method class name (if exists) + r' (?:(?P[a-zA-Z_\d.$]+)\.)?' + r'(?P[^.\(]+)' + r'\((?P[^\)]*)\)' # original method params + r'(?:[^ ]*)' # original method line numbers (ignored) + r' -> ' + r'(?P.+)') # obfuscated method name + +TYPE_DESCRIPTOR_RE = re.compile( + r'(?P\[*)' + r'(?:' + r'(?PL[^;]+;)' + r'|' + r'[VZBSCIJFD]' + r')') + +DOT_NOTATION_MAP = { + '': '', + 'boolean': 'Z', + 'byte': 'B', + 'void': 'V', + 'short': 'S', + 'char': 'C', + 'int': 'I', + 'long': 'J', + 'float': 'F', + 'double': 'D' +} + + +@functools.total_ordering +class Method: + def __init__(self, name, class_name, param_types=None, return_type=None): + self.name = name + self.class_name = class_name + self.param_types = param_types + self.return_type = return_type + + def __str__(self): + return '{}->{}({}){}'.format(self.class_name, self.name, + self.param_types or '', self.return_type or '') + + def __repr__(self): + return 'Method<{}->{}({}){}>'.format(self.class_name, self.name, + self.param_types or '', self.return_type or '') + + @staticmethod + def serialize(method): + return (method.class_name, method.name, method.param_types, + method.return_type) + + def __eq__(self, other): + return self.serialize(self) == self.serialize(other) + + def __lt__(self, other): + return self.serialize(self) < self.serialize(other) + + def __hash__(self): + # only hash name and class_name since other fields may not be set yet. + return hash((self.name, self.class_name)) + + +class Class: + def __init__(self, name): + self.name = name + self._methods = [] + + def AddMethod(self, method, line_numbers): + self._methods.append((method, set(line_numbers))) + + def FindMethodsAtLine(self, method_name, line_start, line_end=None): + """Searches through dex class for a method given a name and line numbers + + The dex maps methods to line numbers, this method, given the a method name + in this class as well as a start line and an optional end line (which act as + hints as to which function in the class is being looked for), returns a list + of possible matches (or none if none are found). + + Args: + method_name: name of method being searched for + line_start: start of hint range for lines in this method + line_end: end of hint range for lines in this method (optional) + + Returns: + A list of Method objects that could match the hints given, or None if no + method is found. + """ + found_methods = [] + if line_end is None: + hint_lines = set([line_start]) + else: + hint_lines = set(range(line_start, line_end+1)) + + named_methods = [(method, l) for method, l in self._methods + if method.name == method_name] + + if len(named_methods) == 1: + return [method for method, l in named_methods] + if len(named_methods) == 0: + return None + + for method, line_numbers in named_methods: + if not hint_lines.isdisjoint(line_numbers): + found_methods.append(method) + + if len(found_methods) > 0: + if len(found_methods) > 1: + logging.warning('ambigous methods in dex %s at lines %s in class "%s"', + found_methods, hint_lines, self.name) + return found_methods + + for method, line_numbers in named_methods: + if (max(hint_lines) >= min(line_numbers) + and min(hint_lines) <= max(line_numbers)): + found_methods.append(method) + + if len(found_methods) > 0: + if len(found_methods) > 1: + logging.warning('ambigous methods in dex %s at lines %s in class "%s"', + found_methods, hint_lines, self.name) + return found_methods + logging.warning( + 'No method named "%s" in class "%s" is ' + 'mapped to lines %s', method_name, self.name, hint_lines) + return None + + +class Profile: + def __init__(self): + # {Method: set(char)} + self._methods = collections.defaultdict(set) + self._classes = [] + + def AddMethod(self, method, tags): + for tag in tags: + self._methods[method].add(tag) + + def AddClass(self, cls): + self._classes.append(cls) + + def WriteToFile(self, path): + with open(path, 'w') as output_profile: + for cls in sorted(self._classes): + output_profile.write(cls + '\n') + for method in sorted(self._methods): + tags = sorted(self._methods[method]) + line = '{}{}\n'.format(''.join(tags), str(method)) + output_profile.write(line) + + +class ProguardMapping: + def __init__(self): + # {Method: set(Method)} + self._method_mapping = collections.defaultdict(set) + # {String: String} String is class name in type descriptor format + self._class_mapping = dict() + + def AddMethodMapping(self, from_method, to_method): + self._method_mapping[from_method].add(to_method) + + def AddClassMapping(self, from_class, to_class): + self._class_mapping[from_class] = to_class + + def GetMethodMapping(self, from_method): + return self._method_mapping.get(from_method) + + def GetClassMapping(self, from_class): + return self._class_mapping.get(from_class, from_class) + + def MapTypeDescriptor(self, type_descriptor): + match = TYPE_DESCRIPTOR_RE.search(type_descriptor) + assert match is not None + class_name = match.group('class_name') + if class_name is not None: + return match.group('brackets') + self.GetClassMapping(class_name) + # just a native type, return as is + return match.group() + + def MapTypeDescriptorList(self, type_descriptor_list): + return TYPE_DESCRIPTOR_RE.sub( + lambda match: self.MapTypeDescriptor(match.group()), + type_descriptor_list) + + +class MalformedLineException(Exception): + def __init__(self, message, line_number): + super().__init__(message) + self.message = message + self.line_number = line_number + + def __str__(self): + return self.message + ' at line {}'.format(self.line_number) + + +class MalformedProguardMappingException(MalformedLineException): + pass + + +class MalformedProfileException(MalformedLineException): + pass + + +def _RunDexDump(dexdump_path, dex_file_path): + return subprocess.check_output([dexdump_path, + dex_file_path]).decode('utf-8').splitlines() + + +def _ReadFile(file_path): + with open(file_path, 'r') as f: + return f.readlines() + + +def _ToTypeDescriptor(dot_notation): + """Parses a dot notation type and returns it in type descriptor format + + eg: + org.chromium.browser.ChromeActivity -> Lorg/chromium/browser/ChromeActivity; + boolean -> Z + int[] -> [I + + Args: + dot_notation: trimmed string with a single type in dot notation format + + Returns: + A string with the type in type descriptor format + """ + dot_notation = dot_notation.strip() + prefix = '' + while dot_notation.endswith('[]'): + prefix += '[' + dot_notation = dot_notation[:-2] + if dot_notation in DOT_NOTATION_MAP: + return prefix + DOT_NOTATION_MAP[dot_notation] + return prefix + 'L' + dot_notation.replace('.', '/') + ';' + + +def _DotNotationListToTypeDescriptorList(dot_notation_list_string): + """Parses a param list of dot notation format and returns it in type + descriptor format + + eg: + org.chromium.browser.ChromeActivity,boolean,int[] -> + Lorg/chromium/browser/ChromeActivity;Z[I + + Args: + dot_notation_list_string: single string with multiple comma separated types + in dot notation format + + Returns: + A string with the param list in type descriptor format + """ + return ''.join(_ToTypeDescriptor(param) for param in + dot_notation_list_string.split(',')) + + +def ProcessDex(dex_dump): + """Parses dexdump output returning a dict of class names to Class objects + + Parses output of the dexdump command on a dex file and extracts information + about classes and their respective methods and which line numbers a method is + mapped to. + + Methods that are not mapped to any line number are ignored and not listed + inside their respective Class objects. + + Args: + dex_dump: An array of lines of dexdump output + + Returns: + A dict that maps from class names in type descriptor format (but without the + surrounding 'L' and ';') to Class objects. + """ + # class_name: Class + classes_by_name = {} + current_class = None + current_method = None + reading_positions = False + reading_methods = False + method_line_numbers = [] + for line in dex_dump: + line = line.strip() + if line.startswith('Class descriptor'): + # New class started, no longer reading methods. + reading_methods = False + current_class = Class(DEX_CLASS_NAME_RE.search(line).group('class_name')) + classes_by_name[current_class.name] = current_class + elif (line.startswith('Direct methods') + or line.startswith('Virtual methods')): + reading_methods = True + elif reading_methods and line.startswith('name'): + assert current_class is not None + current_method = Method( + DEX_METHOD_NAME_RE.search(line).group('method_name'), + "L" + current_class.name + ";") + elif reading_methods and line.startswith('type'): + assert current_method is not None + match = DEX_METHOD_TYPE_RE.search(line) + current_method.param_types = match.group('method_params') + current_method.return_type = match.group('method_return_type') + elif line.startswith('positions'): + assert reading_methods + reading_positions = True + method_line_numbers = [] + elif reading_positions and line.startswith('0x'): + line_number = DEX_METHOD_LINE_NR_RE.search(line).group('line_number') + method_line_numbers.append(int(line_number)) + elif reading_positions and line.startswith('locals'): + if len(method_line_numbers) > 0: + current_class.AddMethod(current_method, method_line_numbers) + # finished reading method line numbers + reading_positions = False + return classes_by_name + + +def ProcessProguardMapping(proguard_mapping_lines, dex): + """Parses a proguard mapping file + + This takes proguard mapping file lines and then uses the obfuscated dex to + create a mapping of unobfuscated methods to obfuscated ones and vice versa. + + The dex is used because the proguard mapping file only has the name of the + obfuscated methods but not their signature, thus the dex is read to look up + which method with a specific name was mapped to the lines mentioned in the + proguard mapping file. + + Args: + proguard_mapping_lines: Array of strings, each is a line from the proguard + mapping file (in order). + dex: a dict of class name (in type descriptor format but without the + enclosing 'L' and ';') to a Class object. + Returns: + Two dicts the first maps from obfuscated methods to a set of non-obfuscated + ones. It also maps the obfuscated class names to original class names, both + in type descriptor format (with the enclosing 'L' and ';') + """ + mapping = ProguardMapping() + reverse_mapping = ProguardMapping() + to_be_obfuscated = [] + current_class_orig = None + current_class_obfs = None + for index, line in enumerate(proguard_mapping_lines): + if line.strip() == '': + continue + if not line.startswith(' '): + match = PROGUARD_CLASS_MAPPING_RE.search(line) + if match is None: + raise MalformedProguardMappingException( + 'Malformed class mapping', index) + current_class_orig = match.group('original_name') + current_class_obfs = match.group('obfuscated_name') + mapping.AddClassMapping(_ToTypeDescriptor(current_class_obfs), + _ToTypeDescriptor(current_class_orig)) + reverse_mapping.AddClassMapping(_ToTypeDescriptor(current_class_orig), + _ToTypeDescriptor(current_class_obfs)) + continue + + assert current_class_orig is not None + assert current_class_obfs is not None + line = line.strip() + match = PROGUARD_METHOD_MAPPING_RE.search(line) + # check if is a method mapping (we ignore field mappings) + if match is not None: + # check if this line is an inlining by reading ahead 1 line. + if index + 1 < len(proguard_mapping_lines): + next_match = PROGUARD_METHOD_MAPPING_RE.search( + proguard_mapping_lines[index+1].strip()) + if (next_match and match.group('line_start') is not None + and next_match.group('line_start') == match.group('line_start') + and next_match.group('line_end') == match.group('line_end')): + continue # This is an inlining, skip + + original_method = Method( + match.group('original_method_name'), + _ToTypeDescriptor( + match.group('original_method_class') or current_class_orig), + _DotNotationListToTypeDescriptorList(match.group('params')), + _ToTypeDescriptor(match.group('return_type'))) + + if match.group('line_start') is not None: + obfs_methods = (dex[current_class_obfs.replace('.', '/')] + .FindMethodsAtLine( + match.group('obfuscated_name'), + int(match.group('line_start')), + int(match.group('line_end')))) + + if obfs_methods is None: + continue + + for obfs_method in obfs_methods: + mapping.AddMethodMapping(obfs_method, original_method) + reverse_mapping.AddMethodMapping(original_method, obfs_method) + else: + to_be_obfuscated.append( + (original_method, match.group('obfuscated_name'))) + + for original_method, obfuscated_name in to_be_obfuscated: + obfuscated_method = Method( + obfuscated_name, + reverse_mapping.GetClassMapping(original_method.class_name), + reverse_mapping.MapTypeDescriptorList(original_method.param_types), + reverse_mapping.MapTypeDescriptor(original_method.return_type)) + mapping.AddMethodMapping(obfuscated_method, original_method) + reverse_mapping.AddMethodMapping(original_method, obfuscated_method) + return mapping, reverse_mapping + + +def ProcessProfile(input_profile, proguard_mapping): + """Parses an android profile and uses the proguard mapping to (de)obfuscate it + + This takes the android profile lines and for each method or class for the + profile, it uses the mapping to either obfuscate or deobfuscate (based on the + provided mapping) and returns a Profile object that stores this information. + + Args: + input_profile: array of lines of the input profile + proguard_mapping: a proguard mapping that would map from the classes and + methods in the input profile to the classes and methods + that should be in the output profile. + + Returns: + A Profile object that stores the information (ie list of mapped classes and + methods + tags) + """ + profile = Profile() + for index, line in enumerate(input_profile): + line = line.strip() + if line.startswith('L'): + profile.AddClass(proguard_mapping.GetClassMapping(line)) + continue + match = PROFILE_METHOD_RE.search(line) + if not match: + raise MalformedProfileException("Malformed line", index) + + method = Method( + match.group('method_name'), + match.group('class_name'), + match.group('method_params'), + match.group('method_return_type')) + + mapped_methods = proguard_mapping.GetMethodMapping(method) + if mapped_methods is None: + logging.warning('No method matching "%s" has been found in the proguard ' + 'mapping file', method) + continue + + for original_method in mapped_methods: + profile.AddMethod(original_method, match.group('tags')) + + return profile + + +def ObfuscateProfile(nonobfuscated_profile, dex_file, proguard_mapping, + dexdump_path, output_filename): + """Helper method for obfuscating a profile. + + Args: + nonobfuscated_profile: a profile with nonobfuscated symbols. + dex_file: path to the dex file matching the mapping. + proguard_mapping: a mapping from nonobfuscated to obfuscated symbols used + in the dex file. + dexdump_path: path to the dexdump utility. + output_filename: output filename in which to write the obfuscated profile. + """ + dexinfo = ProcessDex(_RunDexDump(dexdump_path, dex_file)) + _, reverse_mapping = ProcessProguardMapping( + _ReadFile(proguard_mapping), dexinfo) + obfuscated_profile = ProcessProfile( + _ReadFile(nonobfuscated_profile), reverse_mapping) + obfuscated_profile.WriteToFile(output_filename) + + +def main(args): + parser = argparse.ArgumentParser() + parser.add_argument( + '--dexdump-path', + required=True, + help='Path to dexdump binary.') + parser.add_argument( + '--dex-path', + required=True, + help='Path to dex file corresponding to the proguard mapping file.') + parser.add_argument( + '--proguard-mapping-path', + required=True, + help='Path to input proguard mapping file corresponding to the dex file.') + parser.add_argument( + '--output-profile-path', + required=True, + help='Path to output profile.') + parser.add_argument( + '--input-profile-path', + required=True, + help='Path to output profile.') + parser.add_argument( + '--verbose', + action='store_true', + default=False, + help='Print verbose output.') + obfuscation = parser.add_mutually_exclusive_group(required=True) + obfuscation.add_argument('--obfuscate', action='store_true', + help='Indicates to output an obfuscated profile given a deobfuscated ' + 'one.') + obfuscation.add_argument('--deobfuscate', dest='obfuscate', + action='store_false', help='Indicates to output a deobfuscated profile ' + 'given an obfuscated one.') + options = parser.parse_args(args) + + if options.verbose: + log_level = logging.WARNING + else: + log_level = logging.ERROR + logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) + + dex = ProcessDex(_RunDexDump(options.dexdump_path, options.dex_path)) + proguard_mapping, reverse_proguard_mapping = ProcessProguardMapping( + _ReadFile(options.proguard_mapping_path), dex) + if options.obfuscate: + profile = ProcessProfile( + _ReadFile(options.input_profile_path), + reverse_proguard_mapping) + else: + profile = ProcessProfile( + _ReadFile(options.input_profile_path), + proguard_mapping) + profile.WriteToFile(options.output_profile_path) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/convert_dex_profile_tests.py b/android/convert_dex_profile_tests.py new file mode 100755 index 000000000000..915d26387a4b --- /dev/null +++ b/android/convert_dex_profile_tests.py @@ -0,0 +1,277 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for convert_dex_profile. + +Can be run from build/android/: + $ cd build/android + $ python convert_dex_profile_tests.py +""" + +import os +import sys +import tempfile +import unittest + +import convert_dex_profile as cp + +sys.path.insert(1, os.path.join(os.path.dirname(__file__), 'gyp')) +from util import build_utils + +cp.logging.disable(cp.logging.CRITICAL) + +# There are two obfuscations used in the tests below, each with the same +# unobfuscated profile. The first, corresponding to DEX_DUMP, PROGUARD_MAPPING, +# and OBFUSCATED_PROFILE, has an ambiguous method a() which is mapped to both +# getInstance and initialize. The second, corresponding to DEX_DUMP_2, +# PROGUARD_MAPPING_2 and OBFUSCATED_PROFILE_2, removes the ambiguity. + +DEX_DUMP = """ + +Class descriptor : 'La;' + Direct methods - + #0 : (in La;) + name : '' + type : '(Ljava/lang/String;)V' + code - + catches : 1 + 0x000f - 0x001e + -> 0x0093 + positions : + 0x0001 line=310 + 0x0057 line=313 + locals : + #1 : (in La;) + name : '' + type : '()V' + positions : + locals : + Virtual methods - + #0 : (in La;) + name : 'a' + type : '(Ljava/lang/String;)I' + positions : + 0x0000 line=2 + 0x0003 line=3 + 0x001b line=8 + locals : + 0x0000 - 0x0021 reg=3 this La; + #1 : (in La;) + name : 'a' + type : '(Ljava/lang/Object;)I' + positions : + 0x0000 line=8 + 0x0003 line=9 + locals : + 0x0000 - 0x0021 reg=3 this La; + #2 : (in La;) + name : 'b' + type : '()La;' + positions : + 0x0000 line=1 + locals : +""" + +# pylint: disable=line-too-long +PROGUARD_MAPPING = \ +"""org.chromium.Original -> a: + org.chromium.Original sDisplayAndroidManager -> e + org.chromium.Original another() -> b + 4:4:void inlined():237:237 -> a + 4:4:org.chromium.Original getInstance():203 -> a + 5:5:void org.chromium.Original$Subclass.(org.chromium.Original,byte):130:130 -> a + 5:5:void initialize():237 -> a + 5:5:org.chromium.Original getInstance():203 -> a + 6:6:void initialize():237:237 -> a + 9:9:android.content.Context org.chromium.base.ContextUtils.getApplicationContext():49:49 -> a + 9:9:android.content.Context getContext():219 -> a + 9:9:void initialize():245 -> a + 9:9:org.chromium.Original getInstance():203 -> a""" + +OBFUSCATED_PROFILE = \ +"""La; +PLa;->b()La; +SLa;->a(Ljava/lang/Object;)I +HPLa;->a(Ljava/lang/String;)I""" + +DEX_DUMP_2 = """ + +Class descriptor : 'La;' + Direct methods - + #0 : (in La;) + name : '' + type : '(Ljava/lang/String;)V' + code - + catches : 1 + 0x000f - 0x001e + -> 0x0093 + positions : + 0x0001 line=310 + 0x0057 line=313 + locals : + #1 : (in La;) + name : '' + type : '()V' + positions : + locals : + Virtual methods - + #0 : (in La;) + name : 'a' + type : '(Ljava/lang/String;)I' + positions : + 0x0000 line=2 + 0x0003 line=3 + 0x001b line=8 + locals : + 0x0000 - 0x0021 reg=3 this La; + #1 : (in La;) + name : 'c' + type : '(Ljava/lang/Object;)I' + positions : + 0x0000 line=8 + 0x0003 line=9 + locals : + 0x0000 - 0x0021 reg=3 this La; + #2 : (in La;) + name : 'b' + type : '()La;' + positions : + 0x0000 line=1 + locals : +""" + +# pylint: disable=line-too-long +PROGUARD_MAPPING_2 = \ +"""org.chromium.Original -> a: + org.chromium.Original sDisplayAndroidManager -> e + org.chromium.Original another() -> b + void initialize() -> c + org.chromium.Original getInstance():203 -> a + 4:4:void inlined():237:237 -> a""" + +OBFUSCATED_PROFILE_2 = \ +"""La; +PLa;->b()La; +HPSLa;->a()La; +HPLa;->c()V""" + +UNOBFUSCATED_PROFILE = \ +"""Lorg/chromium/Original; +PLorg/chromium/Original;->another()Lorg/chromium/Original; +HPSLorg/chromium/Original;->getInstance()Lorg/chromium/Original; +HPLorg/chromium/Original;->initialize()V""" + +class GenerateProfileTests(unittest.TestCase): + def testProcessDex(self): + dex = cp.ProcessDex(DEX_DUMP.splitlines()) + self.assertIsNotNone(dex['a']) + + self.assertEqual(len(dex['a'].FindMethodsAtLine('', 311, 313)), 1) + self.assertEqual(len(dex['a'].FindMethodsAtLine('', 309, 315)), 1) + clinit = dex['a'].FindMethodsAtLine('', 311, 313)[0] + self.assertEqual(clinit.name, '') + self.assertEqual(clinit.return_type, 'V') + self.assertEqual(clinit.param_types, 'Ljava/lang/String;') + + self.assertEqual(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2) + self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None)) + +# pylint: disable=protected-access + def testProcessProguardMapping(self): + dex = cp.ProcessDex(DEX_DUMP.splitlines()) + mapping, reverse = cp.ProcessProguardMapping( + PROGUARD_MAPPING.splitlines(), dex) + + self.assertEqual('La;', reverse.GetClassMapping('Lorg/chromium/Original;')) + + getInstance = cp.Method( + 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;') + initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V') + another = cp.Method( + 'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;') + subclassInit = cp.Method( + '', 'Lorg/chromium/Original$Subclass;', + 'Lorg/chromium/Original;B', 'V') + + mapped = mapping.GetMethodMapping( + cp.Method('a', 'La;', 'Ljava/lang/String;', 'I')) + self.assertEqual(len(mapped), 2) + self.assertIn(getInstance, mapped) + self.assertNotIn(subclassInit, mapped) + self.assertNotIn( + cp.Method('inlined', 'Lorg/chromium/Original;', '', 'V'), mapped) + self.assertIn(initialize, mapped) + + mapped = mapping.GetMethodMapping( + cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I')) + self.assertEqual(len(mapped), 1) + self.assertIn(getInstance, mapped) + + mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;')) + self.assertEqual(len(mapped), 1) + self.assertIn(another, mapped) + + for from_method, to_methods in mapping._method_mapping.items(): + for to_method in to_methods: + self.assertIn(from_method, reverse.GetMethodMapping(to_method)) + for from_class, to_class in mapping._class_mapping.items(): + self.assertEqual(from_class, reverse.GetClassMapping(to_class)) + + def testProcessProfile(self): + dex = cp.ProcessDex(DEX_DUMP.splitlines()) + mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex) + profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping) + + getInstance = cp.Method( + 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;') + initialize = cp.Method('initialize', 'Lorg/chromium/Original;', '', 'V') + another = cp.Method( + 'another', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;') + + self.assertIn('Lorg/chromium/Original;', profile._classes) + self.assertIn(getInstance, profile._methods) + self.assertIn(initialize, profile._methods) + self.assertIn(another, profile._methods) + + self.assertEqual(profile._methods[getInstance], set(['H', 'S', 'P'])) + self.assertEqual(profile._methods[initialize], set(['H', 'P'])) + self.assertEqual(profile._methods[another], set(['P'])) + + def testEndToEnd(self): + dex = cp.ProcessDex(DEX_DUMP.splitlines()) + mapping, _ = cp.ProcessProguardMapping(PROGUARD_MAPPING.splitlines(), dex) + + profile = cp.ProcessProfile(OBFUSCATED_PROFILE.splitlines(), mapping) + with tempfile.NamedTemporaryFile() as temp: + profile.WriteToFile(temp.name) + with open(temp.name, 'r') as f: + for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())): + self.assertEqual(a.strip(), b.strip()) + + def testObfuscateProfile(self): + with build_utils.TempDir() as temp_dir: + # The dex dump is used as the dexfile, by passing /bin/cat as the dexdump + # program. + dex_path = os.path.join(temp_dir, 'dexdump') + with open(dex_path, 'w') as dex_file: + dex_file.write(DEX_DUMP_2) + mapping_path = os.path.join(temp_dir, 'mapping') + with open(mapping_path, 'w') as mapping_file: + mapping_file.write(PROGUARD_MAPPING_2) + unobfuscated_path = os.path.join(temp_dir, 'unobfuscated') + with open(unobfuscated_path, 'w') as unobfuscated_file: + unobfuscated_file.write(UNOBFUSCATED_PROFILE) + obfuscated_path = os.path.join(temp_dir, 'obfuscated') + cp.ObfuscateProfile(unobfuscated_path, dex_path, mapping_path, '/bin/cat', + obfuscated_path) + with open(obfuscated_path) as obfuscated_file: + obfuscated_profile = sorted(obfuscated_file.readlines()) + for a, b in zip( + sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile): + self.assertEqual(a.strip(), b.strip()) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/dcheck_is_off.flags b/android/dcheck_is_off.flags new file mode 100644 index 000000000000..5718c27959e6 --- /dev/null +++ b/android/dcheck_is_off.flags @@ -0,0 +1,12 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Contains flags that are applied only when ENABLE_DCHECK=false. + +-checkdiscard @org.chromium.build.annotations.CheckDiscard class ** { + *; +} +-checkdiscard class ** { + @org.chromium.build.annotations.CheckDiscard *; +} diff --git a/android/devil_chromium.json b/android/devil_chromium.json new file mode 100644 index 000000000000..784406dbf16a --- /dev/null +++ b/android/devil_chromium.json @@ -0,0 +1,84 @@ +{ + "config_type": "BaseConfig", + "dependencies": { + "adb": { + "file_info": { + "linux2_x86_64": { + "local_paths": [ + "../../third_party/android_sdk/public/platform-tools/adb" + ] + } + } + }, + "android_sdk": { + "file_info": { + "linux2_x86_64": { + "local_paths": [ + "../../third_party/android_sdk/public" + ] + } + } + }, + "simpleperf": { + "file_info": { + "android_armeabi-v7a": { + "local_paths": [ + "../../third_party/android_ndk/simpleperf/bin/android/arm/simpleperf" + ] + }, + "android_arm64-v8a": { + "local_paths": [ + "../../third_party/android_ndk/simpleperf/bin/android/arm64/simpleperf" + ] + }, + "android_x86": { + "local_paths": [ + "../../third_party/android_ndk/simpleperf/bin/android/x86/simpleperf" + ] + }, + "android_x86_64": { + "local_paths": [ + "../../third_party/android_ndk/simpleperf/bin/android/x86_64/simpleperf" + ] + }, + "linux_x86": { + "local_paths": [ + "../../third_party/android_ndk/simpleperf/bin/linux/x86/simpleperf" + ] + }, + "linux_x86_64": { + "local_paths": [ + "../../third_party/android_ndk/simpleperf/bin/linux/x86_64/simpleperf" + ] + } + } + }, + "simpleperf_scripts": { + "file_info": { + "default": { + "local_paths": [ + "../../third_party/android_ndk/simpleperf" + ] + } + } + }, + "llvm-symbolizer": { + "file_info": { + "default": { + "local_paths": [ + "../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer" + ] + } + } + }, + "bundletool": { + "file_info": { + "default": { + "local_paths": [ + "../../third_party/android_build_tools/bundletool/bundletool.jar" + ] + } + } + } + } +} diff --git a/android/devil_chromium.py b/android/devil_chromium.py new file mode 100644 index 000000000000..fbc538952d5b --- /dev/null +++ b/android/devil_chromium.py @@ -0,0 +1,200 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Configures devil for use in chromium.""" + +import os +import sys + +from pylib import constants +from pylib.constants import host_paths + +if host_paths.DEVIL_PATH not in sys.path: + sys.path.insert(1, host_paths.DEVIL_PATH) + +from devil import devil_env +from devil.android.ndk import abis + +_BUILD_DIR = os.path.join(constants.DIR_SOURCE_ROOT, 'build') +if _BUILD_DIR not in sys.path: + sys.path.insert(1, _BUILD_DIR) + +import gn_helpers + +_DEVIL_CONFIG = os.path.abspath( + os.path.join(os.path.dirname(__file__), 'devil_chromium.json')) + +_DEVIL_BUILD_PRODUCT_DEPS = { + 'chromium_commands': [ + { + 'platform': 'linux2', + 'arch': 'x86_64', + 'path_components': ['lib.java', 'chromium_commands.dex.jar'], + } + ], + 'forwarder_device': [ + { + 'platform': 'android', + 'arch': abis.ARM, + 'path_components': ['forwarder_dist'], + }, + { + 'platform': 'android', + 'arch': abis.ARM_64, + 'path_components': ['forwarder_dist'], + }, + { + 'platform': 'android', + 'arch': 'mips', + 'path_components': ['forwarder_dist'], + }, + { + 'platform': 'android', + 'arch': 'mips64', + 'path_components': ['forwarder_dist'], + }, + { + 'platform': 'android', + 'arch': abis.X86, + 'path_components': ['forwarder_dist'], + }, + { + 'platform': 'android', + 'arch': abis.X86_64, + 'path_components': ['forwarder_dist'], + }, + ], + 'forwarder_host': [ + { + 'platform': 'linux2', + 'arch': 'x86_64', + 'path_components': ['host_forwarder'], + }, + ], + 'md5sum_device': [ + { + 'platform': 'android', + 'arch': abis.ARM, + 'path_components': ['md5sum_dist'], + }, + { + 'platform': 'android', + 'arch': abis.ARM_64, + 'path_components': ['md5sum_dist'], + }, + { + 'platform': 'android', + 'arch': 'mips', + 'path_components': ['md5sum_dist'], + }, + { + 'platform': 'android', + 'arch': 'mips64', + 'path_components': ['md5sum_dist'], + }, + { + 'platform': 'android', + 'arch': abis.X86, + 'path_components': ['md5sum_dist'], + }, + { + 'platform': 'android', + 'arch': abis.X86_64, + 'path_components': ['md5sum_dist'], + }, + ], + 'md5sum_host': [ + { + 'platform': 'linux2', + 'arch': 'x86_64', + 'path_components': ['md5sum_bin_host'], + }, + ], +} + + +def _UseLocalBuildProducts(output_directory, devil_dynamic_config): + output_directory = os.path.abspath(output_directory) + devil_dynamic_config['dependencies'] = { + dep_name: { + 'file_info': { + '%s_%s' % (dep_config['platform'], dep_config['arch']): { + 'local_paths': [ + os.path.join(output_directory, + *dep_config['path_components']), + ], + } + for dep_config in dep_configs + } + } + for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.items() + } + + +def _BuildWithChromium(): + """Returns value of gclient's |build_with_chromium|.""" + gni_path = os.path.join(_BUILD_DIR, 'config', 'gclient_args.gni') + if not os.path.exists(gni_path): + return False + with open(gni_path) as f: + data = f.read() + args = gn_helpers.FromGNArgs(data) + return args.get('build_with_chromium', False) + + +def Initialize(output_directory=None, custom_deps=None, adb_path=None): + """Initializes devil with chromium's binaries and third-party libraries. + + This includes: + - Libraries: + - the android SDK ("android_sdk") + - Build products: + - host & device forwarder binaries + ("forwarder_device" and "forwarder_host") + - host & device md5sum binaries ("md5sum_device" and "md5sum_host") + + Args: + output_directory: An optional path to the output directory. If not set, + no built dependencies are configured. + custom_deps: An optional dictionary specifying custom dependencies. + This should be of the form: + + { + 'dependency_name': { + 'platform': 'path', + ... + }, + ... + } + adb_path: An optional path to use for the adb binary. If not set, this uses + the adb binary provided by the Android SDK. + """ + build_with_chromium = _BuildWithChromium() + + devil_dynamic_config = { + 'config_type': 'BaseConfig', + 'dependencies': {}, + } + if build_with_chromium and output_directory: + # Non-chromium users of chromium's //build directory fetch build products + # from google storage rather than use locally built copies. Chromium uses + # locally-built copies so that changes to the tools can be easily tested. + _UseLocalBuildProducts(output_directory, devil_dynamic_config) + + if custom_deps: + devil_dynamic_config['dependencies'].update(custom_deps) + if adb_path: + devil_dynamic_config['dependencies'].update({ + 'adb': { + 'file_info': { + devil_env.GetPlatform(): { + 'local_paths': [adb_path] + } + } + } + }) + + config_files = [_DEVIL_CONFIG] if build_with_chromium else None + devil_env.config.Initialize(configs=[devil_dynamic_config], + config_files=config_files) diff --git a/android/devil_chromium.pydeps b/android/devil_chromium.pydeps new file mode 100644 index 000000000000..41438059296e --- /dev/null +++ b/android/devil_chromium.pydeps @@ -0,0 +1,39 @@ +# Generated by running: +# build/print_python_deps.py --root build/android --output build/android/devil_chromium.pydeps build/android/devil_chromium.py +../../third_party/catapult/common/py_utils/py_utils/__init__.py +../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py +../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py +../../third_party/catapult/common/py_utils/py_utils/lock.py +../../third_party/catapult/dependency_manager/dependency_manager/__init__.py +../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py +../../third_party/catapult/dependency_manager/dependency_manager/base_config.py +../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py +../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py +../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py +../../third_party/catapult/dependency_manager/dependency_manager/manager.py +../../third_party/catapult/dependency_manager/dependency_manager/uploader.py +../../third_party/catapult/devil/devil/__init__.py +../../third_party/catapult/devil/devil/android/__init__.py +../../third_party/catapult/devil/devil/android/constants/__init__.py +../../third_party/catapult/devil/devil/android/constants/chrome.py +../../third_party/catapult/devil/devil/android/ndk/__init__.py +../../third_party/catapult/devil/devil/android/ndk/abis.py +../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../third_party/catapult/devil/devil/base_error.py +../../third_party/catapult/devil/devil/constants/__init__.py +../../third_party/catapult/devil/devil/constants/exit_codes.py +../../third_party/catapult/devil/devil/devil_env.py +../../third_party/catapult/devil/devil/utils/__init__.py +../../third_party/catapult/devil/devil/utils/reraiser_thread.py +../../third_party/catapult/devil/devil/utils/timeout_retry.py +../../third_party/catapult/devil/devil/utils/watchdog_timer.py +../../third_party/catapult/third_party/six/six.py +../gn_helpers.py +devil_chromium.py +pylib/__init__.py +pylib/constants/__init__.py +pylib/constants/host_paths.py diff --git a/android/diff_resource_sizes.py b/android/diff_resource_sizes.py new file mode 100755 index 000000000000..ff21d8180498 --- /dev/null +++ b/android/diff_resource_sizes.py @@ -0,0 +1,200 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs resource_sizes.py on two apks and outputs the diff.""" + + +import argparse +import json +import logging +import os +import subprocess +import sys + +from pylib.constants import host_paths +from pylib.utils import shared_preference_utils + +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): + import perf_tests_results_helper # pylint: disable=import-error + +with host_paths.SysPath(host_paths.TRACING_PATH): + from tracing.value import convert_chart_json # pylint: disable=import-error + +_ANDROID_DIR = os.path.dirname(os.path.abspath(__file__)) +with host_paths.SysPath(os.path.join(_ANDROID_DIR, 'gyp')): + from util import build_utils # pylint: disable=import-error + + +_BASE_CHART = { + 'format_version': '0.1', + 'benchmark_name': 'resource_sizes_diff', + 'benchmark_description': 'APK resource size diff information', + 'trace_rerun_options': [], + 'charts': {}, +} + +_CHARTJSON_FILENAME = 'results-chart.json' +_HISTOGRAMS_FILENAME = 'perf_results.json' + + +def DiffResults(chartjson, base_results, diff_results): + """Reports the diff between the two given results. + + Args: + chartjson: A dictionary that chartjson results will be placed in, or None + to only print results. + base_results: The chartjson-formatted size results of the base APK. + diff_results: The chartjson-formatted size results of the diff APK. + """ + for graph_title, graph in base_results['charts'].items(): + for trace_title, trace in graph.items(): + perf_tests_results_helper.ReportPerfResult( + chartjson, graph_title, trace_title, + diff_results['charts'][graph_title][trace_title]['value'] + - trace['value'], + trace['units'], trace['improvement_direction'], + trace['important']) + + +def AddIntermediateResults(chartjson, base_results, diff_results): + """Copies the intermediate size results into the output chartjson. + + Args: + chartjson: A dictionary that chartjson results will be placed in. + base_results: The chartjson-formatted size results of the base APK. + diff_results: The chartjson-formatted size results of the diff APK. + """ + for graph_title, graph in base_results['charts'].items(): + for trace_title, trace in graph.items(): + perf_tests_results_helper.ReportPerfResult( + chartjson, graph_title + '_base_apk', trace_title, + trace['value'], trace['units'], trace['improvement_direction'], + trace['important']) + + # Both base_results and diff_results should have the same charts/traces, but + # loop over them separately in case they don't + for graph_title, graph in diff_results['charts'].items(): + for trace_title, trace in graph.items(): + perf_tests_results_helper.ReportPerfResult( + chartjson, graph_title + '_diff_apk', trace_title, + trace['value'], trace['units'], trace['improvement_direction'], + trace['important']) + + +def _CreateArgparser(): + def chromium_path(arg): + if arg.startswith('//'): + return os.path.join(host_paths.DIR_SOURCE_ROOT, arg[2:]) + return arg + + argparser = argparse.ArgumentParser( + description='Diff resource sizes of two APKs. Arguments not listed here ' + 'will be passed on to both invocations of resource_sizes.py.') + argparser.add_argument('--chromium-output-directory-base', + dest='out_dir_base', + type=chromium_path, + help='Location of the build artifacts for the base ' + 'APK, i.e. what the size increase/decrease will ' + 'be measured from.') + argparser.add_argument('--chromium-output-directory-diff', + dest='out_dir_diff', + type=chromium_path, + help='Location of the build artifacts for the diff ' + 'APK.') + argparser.add_argument('--chartjson', + action='store_true', + help='DEPRECATED. Use --output-format=chartjson ' + 'instead.') + argparser.add_argument('--output-format', + choices=['chartjson', 'histograms'], + help='Output the results to a file in the given ' + 'format instead of printing the results.') + argparser.add_argument('--include-intermediate-results', + action='store_true', + help='Include the results from the resource_sizes.py ' + 'runs in the chartjson output.') + argparser.add_argument('--output-dir', + default='.', + type=chromium_path, + help='Directory to save chartjson to.') + argparser.add_argument('--base-apk', + required=True, + type=chromium_path, + help='Path to the base APK, i.e. what the size ' + 'increase/decrease will be measured from.') + argparser.add_argument('--diff-apk', + required=True, + type=chromium_path, + help='Path to the diff APK, i.e. the APK whose size ' + 'increase/decrease will be measured against the ' + 'base APK.') + return argparser + + +def main(): + args, unknown_args = _CreateArgparser().parse_known_args() + # TODO(bsheedy): Remove this once all uses of --chartjson are removed. + if args.chartjson: + args.output_format = 'chartjson' + + chartjson = _BASE_CHART.copy() if args.output_format else None + + with build_utils.TempDir() as base_dir, build_utils.TempDir() as diff_dir: + # Run resource_sizes.py on the two APKs + resource_sizes_path = os.path.join(_ANDROID_DIR, 'resource_sizes.py') + shared_args = (['python', resource_sizes_path, '--output-format=chartjson'] + + unknown_args) + + base_args = shared_args + ['--output-dir', base_dir, args.base_apk] + if args.out_dir_base: + base_args += ['--chromium-output-directory', args.out_dir_base] + try: + subprocess.check_output(base_args, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + print(e.output) + raise + + diff_args = shared_args + ['--output-dir', diff_dir, args.diff_apk] + if args.out_dir_diff: + diff_args += ['--chromium-output-directory', args.out_dir_diff] + try: + subprocess.check_output(diff_args, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + print(e.output) + raise + + # Combine the separate results + base_file = os.path.join(base_dir, _CHARTJSON_FILENAME) + diff_file = os.path.join(diff_dir, _CHARTJSON_FILENAME) + base_results = shared_preference_utils.ExtractSettingsFromJson(base_file) + diff_results = shared_preference_utils.ExtractSettingsFromJson(diff_file) + DiffResults(chartjson, base_results, diff_results) + if args.include_intermediate_results: + AddIntermediateResults(chartjson, base_results, diff_results) + + if args.output_format: + chartjson_path = os.path.join(os.path.abspath(args.output_dir), + _CHARTJSON_FILENAME) + logging.critical('Dumping diff chartjson to %s', chartjson_path) + with open(chartjson_path, 'w') as outfile: + json.dump(chartjson, outfile) + + if args.output_format == 'histograms': + histogram_result = convert_chart_json.ConvertChartJson(chartjson_path) + if histogram_result.returncode != 0: + logging.error('chartjson conversion failed with error: %s', + histogram_result.stdout) + return 1 + + histogram_path = os.path.join(os.path.abspath(args.output_dir), + 'perf_results.json') + logging.critical('Dumping diff histograms to %s', histogram_path) + with open(histogram_path, 'w') as json_file: + json_file.write(histogram_result.stdout) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/docs/README.md b/android/docs/README.md new file mode 100644 index 000000000000..5ee0ca638f16 --- /dev/null +++ b/android/docs/README.md @@ -0,0 +1,16 @@ +# Android Build Docs + +* [//docs/android_build_instructions.md](/docs/android_build_instructions.md) +* [//docs/android_dynamic_feature_modules.md](/docs/android_dynamic_feature_modules.md) +* [build_config.md](build_config.md) +* [coverage.md](coverage.md) +* [java_toolchain.md](java_toolchain.md) +* [java_optimization.md](java_optimization.md) +* [lint.md](lint.md) +* [life_of_a_resource.md](life_of_a_resource.md) +* [../incremental_install/README.md](../incremental_install/README.md) +* [//docs/ui/android/bytecode_rewriting.md](/docs/ui/android/bytecode_rewriting.md) +* [go/doubledown](https://goto.google.com/doubledown) (Googlers only) + +See also: +* [//build/README.md](../../README.md) diff --git a/android/docs/build_config.md b/android/docs/build_config.md new file mode 100644 index 000000000000..8f752a66916b --- /dev/null +++ b/android/docs/build_config.md @@ -0,0 +1,168 @@ +# Introduction + +This document describes the `.build_config.json` files that are used by the +Chromium build system for Android-specific targets like APK, resources, +and more. + +[TOC] + +# I. Overview of .build_config.json files: + +The Android build requires performing computations about dependencies in +various targets, which are not possible with the GN build language. To address +this, `.build_config.json` files are written during the build to store the needed +per-target information as JSON files. + +They are always written to `$target_gen_dir/${target_name}.build_config.json`. + +Many scripts under [`build/android/gyp/`](build/android_gyp/), which are used +during the build, can also accept parameter arguments using +`@FileArg references`, which look like: + + --some-param=@FileArg(:::..) + +This placeholder will ensure that `` is read as a JSON file, then +return the value at `[key1][key2]...[keyN]` for the `--some-param` option. + +Apart from that, the scripts do not need to know anything about the structure +of `.build_config.json` files (but the GN rules that invoke them do and select +which `@FileArg()` references to use). + +For a concrete example, consider the following GN fragment: + +```gn +# From //ui/android/BUILD.gn: +android_resources("ui_java_resources") { + custom_package = "org.chromium.ui" + resource_dirs = [ "java/res" ] + deps = [ + ":ui_strings_grd", + ] +} +``` + +This will end up generating the following JSON file under +`$CHROMIUM_OUTPUT_DIR/gen/ui/android/ui_java_resources.build_config.json`: + +```json +{ + "deps_info": { + "deps_configs": [ + "gen/ui/android/ui_strings_grd.build_config.json" + ], + "name": "ui_java_resources.build_config.json", + "package_name": "org.chromium.ui", + "path": "gen/ui/android/ui_java_resources.build_config.json", + "r_text": "gen/ui/android/ui_java_resources_R.txt", + "resources_dirs": [ + "../../ui/android/java/res" + ], + "resources_zip": "resource_zips/ui/android/ui_java_resources.resources.zip", + "srcjar": "gen/ui/android/ui_java_resources.srcjar", + "type": "android_resources" + }, + "gradle": {}, + "resources": { + "dependency_zips": [ + "resource_zips/ui/android/ui_strings_grd.resources.zip" + ], + "extra_package_names": [], + } +} +``` + +NOTE: All path values in `.build_config.json` files are relative to your +`$CHROMIUM_OUTPUT_DIR`. + +# II. Generation of .build_config.json files: + +They are generated by the GN [`write_build_config()`](gn_write_build_config) +internal template, which ends up invoking +[`write_build_config.py`](write_build_config_py). For our example above, this +is with the following parameters: + +``` +python ../../build/android/gyp/write_build_config.py \ + --type=android_resources \ + --depfile gen/ui/android/ui_java_resources__build_config_crbug_908819.d \ + --deps-configs=\[\"gen/ui/android/ui_strings_grd.build_config.json\"\] \ + --build-config gen/ui/android/ui_java_resources.build_config.json \ + --resources-zip resource_zips/ui/android/ui_java_resources.resources.zip \ + --package-name org.chromium.ui \ + --r-text gen/ui/android/ui_java_resources_R.txt \ + --resource-dirs=\[\"../../ui/android/java/res\"\] \ + --srcjar gen/ui/android/ui_java_resources.srcjar +``` + +Note that *most* of the content of the JSON file comes from command-line +parameters, but not all of it. + +In particular, the `resources['dependency_zips']` entry was computed by +inspecting the content of all dependencies (here, only +`ui_string_grd.build_config.json`), and collecting their +`deps_configs['resources_zip']` values. + +Because a target's `.build_config.json` file will always be generated after +that of all of its dependencies, +[`write_build_config.py`](write_build_config_py) can traverse the +whole (transitive) set of direct *and* indirect dependencies for a given target +and extract useful information out of it. + +This is the kind of processing that cannot be done at the GN language level, +and is very powerful for Android builds. + + +# III. Usage of .build_config.json files: + +In addition to being parsed by `write_build_config.py`, when they are listed +in the `--deps-configs` of a given target, the `.build_config.json` files are used +by other scripts under [build/android/gyp/] to build stuff. + +For example, the GN `android_resources` template uses it to invoke the +[`process_resources.py`] script with the following command, in order to +generate various related files (e.g. `ui_java_resources_R.txt`): + +```sh +python ../../build/android/gyp/process_resources.py \ + --depfile gen/ui/android/ui_java_resources_1.d \ + --android-sdk-jar ../../third_party/android_sdk/public/platforms/android-29/android.jar \ + --aapt-path ../../third_party/android_sdk/public/build-tools/29.0.2/aapt \ + --dependencies-res-zips=@FileArg\(gen/ui/android/ui_java_resources.build_config.json:resources:dependency_zips\) \ + --extra-res-packages=@FileArg\(gen/ui/android/ui_java_resources.build_config.json:resources:extra_package_names\) \ + --resource-dirs=\[\"../../ui/android/java/res\"\] \ + --debuggable \ + --resource-zip-out resource_zips/ui/android/ui_java_resources.resources.zip \ + --r-text-out gen/ui/android/ui_java_resources_R.txt \ + --srcjar-out gen/ui/android/ui_java_resources.srcjar \ + --non-constant-id \ + --custom-package org.chromium.ui \ + --shared-resources +``` + +Note the use of `@FileArg()` references here, to tell the script where to find +the information it needs. + + +# IV. Format of .build_config.json files: + +Thanks to `@FileArg()` references, Python build scripts under +[`build/android/gyp/`](build/android/gyp/) do not need to know anything +about the internal format of `.build_config.json` files. + +This format is decided between internal GN build rules and +[`write_build_config.py`][write_build_config_py]. Since these changes rather +often, the format documentation is kept inside the Python script itself, but +can be extracted as a Markdown file and visualized with the following commands: + +```sh +# Extract .build_config.json format documentation +build/android/gyp/write_build_config.py \ + --generate-markdown-format-doc > /tmp/format.md + +# Launch a browser to visualize the format documentation. +python tools/md_browser/md_browser.py -d /tmp /tmp/format.md +``` + +[build/android/gyp/]: https://chromium.googlesource.com/chromium/src/build/+/main/android/gyp/ +[gn_write_build_config]: https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?q=write_build_config&sq=package:chromium +[write_build_config_py]: https://chromium.googlesource.com/chromium/src/build/+/main/android/gyp/write_build_config.py diff --git a/android/docs/class_verification_failures.md b/android/docs/class_verification_failures.md new file mode 100644 index 000000000000..ab9a24135749 --- /dev/null +++ b/android/docs/class_verification_failures.md @@ -0,0 +1,294 @@ +# Class Verification Failures + +[TOC] + +## This document is obsolete + +While class verification failures still exist, our Java optimizer, R8, has +solved this problem for us. Developers should not have to worry about this +problem unless there is a bug in R8. See [this bug](http://b/138781768) for where +they implemented this solution for us. + +## What's this all about? + +This document aims to explain class verification on Android, how this can affect +app performance, how to identify problems, and chromium-specific solutions. For +simplicity, this document focuses on how class verification is implemented by +ART, the virtual machine which replaced Dalvik starting in Android Lollipop. + +## What is class verification? + +The Java language requires any virtual machine to _verify_ the class files it +loads and executes. Generally, verification is extra work the virtual machine is +responsible for doing, on top of the work of loading the class and performing +[class initialization][1]. + +A class may fail verification for a wide variety of reasons, but in practice +it's usually because the class's code refers to unknown classes or methods. An +example case might look like: + +```java +public class WindowHelper { + // ... + public boolean isWideColorGamut() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) { + return mWindow.isWideColorGamut(); + } + return false; + } +} +``` + +### Why does that fail? + +In this example, `WindowHelper` is a helper class intended to help callers +figure out wide color gamut support, even on pre-OMR1 devices. However, this +class will fail class verification on pre-OMR1 devices, because it refers to +[`Window#isWideColorGamut()`][2] (new-in-OMR1), which appears to be an undefined +method. + +### Huh? But we have an SDK check! + +SDK checks are completely irrelevant for class verification. Although readers +can see we'll never call the new-in-OMR1 API unless we're on >= OMR1 devices, +the Oreo version of ART doesn't know `isWideColorGamut()` was added in next +year's release. From ART's perspective, we may as well be calling +`methodWhichDoesNotExist()`, which would clearly be unsafe. + +All the SDK check does is protect us from crashing at runtime if we call this +method on Oreo or below. + +### Class verification on ART + +While the above is a mostly general description of class verification, it's +important to understand how the Android runtime handles this. + +Since class verification is extra work, ART has an optimization called **AOT +("ahead-of-time") verification**¹. Immediately after installing an app, ART will +scan the dex files and verify as many classes as it can. If a class fails +verification, this is usually a "soft failure" (hard failures are uncommon), and +ART marks the class with the status `RetryVerificationAtRuntime`. + +`RetryVerificationAtRuntime`, as the name suggests, means ART must try again to +verify the class at runtime. ART does so the first time you access the class +(right before class initialization/`()` method). However, depending on +the class, this verification step can be very expensive (we've observed cases +which take [several milliseconds][3]). Since apps tend to initialize most of +their classes during startup, verification significantly increases startup time. + +Another minor cost to failing class verification is that ART cannot optimize +classes which fail verification, so **all** methods in the class will perform +slower at runtime, even after the verification step. + +*** aside +¹ AOT _verification_ should not be confused with AOT _compilation_ (another ART +feature). Unlike compilation, AOT verification happens during install time for +every application, whereas recent versions of ART aim to apply AOT compilation +selectively to optimize space. +*** + +## Chromium's solution + +**Note:** This section is no longer relevant as R8 has fixed this for us. We intend +to remove these ApiHelperFor classes - see [this bug](https://crbug.com/1302156). + +In Chromium, we try to avoid doing class verification at runtime by +manually out-of-lining all Android API usage like so: + +```java +public class ApiHelperForOMR1 { + public static boolean isWideColorGamut(Window window) { + return window.isWideColorGamut(); + } +} + +public class WindowHelper { + // ... + public boolean isWideColorGamut() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) { + return ApiHelperForOMR1.isWideColorGamut(mWindow); + } + return false; + } +} +``` + +This pushes the class verification failure out of `WindowHelper` and into the +new `ApiHelperForOMR1` class. There's no magic here: `ApiHelperForOMR1` will +fail class verification on Oreo and below, for the same reason `WindowHelper` +did previously. + +The key is that, while `WindowHelper` is used on all API levels, it only calls +into `ApiHelperForOMR1` on OMR1 and above. Because we never use +`ApiHelperForOMR1` on Oreo and below, we never load and initialize the class, +and thanks to ART's lazy runtime class verification, we never actually retry +verification. **Note:** `list_class_verification_failures.py` will still list +`ApiHelperFor*` classes in its output, although these don't cause performance +issues. + +### Creating ApiHelperFor\* classes + +There are several examples throughout the code base, but such classes should +look as follows: + +```java +/** + * Utility class to use new APIs that were added in O_MR1 (API level 27). + * These need to exist in a separate class so that Android framework can successfully verify + * classes without encountering the new APIs. + */ +@RequiresApi(Build.VERSION_CODES.O_MR1) +public class ApiHelperForOMR1 { + private ApiHelperForOMR1() {} + + // ... +} +``` + +* `@RequiresApi(Build.VERSION_CODES.O_MR1)`: this tells Android Lint it's OK to + use OMR1 APIs since this class is only used on OMR1 and above. Substitute + `O_MR1` for the [appropriate constant][4], depending when the APIs were + introduced. +* Don't put any `SDK_INT` checks inside this class, because it must only be + called on >= OMR1. +* R8 is smart enough not to inline methods where doing so would introduce + verification failures (b/138781768) + +### Out-of-lining if your method has a new type in its signature + +Sometimes you'll run into a situation where a class **needs** to have a method +which either accepts a parameter which is a new type or returns a new type +(e.g., externally-facing code, such as WebView's glue layer). Even though it's +impossible to write such a class without referring to the new type, it's still +possible to avoid failing class verification. ART has a useful optimization: if +your class only moves a value between registers (i.e., it doesn't call any +methods or fields on the value), then ART will not check for the existence of +that value's type. This means you can write your class like so: + +```java +public class FooBar { + // FooBar needs to have the getNewTypeInAndroidP method, but it would be + // expensive to fail verification. This method will only be called on >= P + // but other methods on the class will be used on lower OS versions (and + // also can't be factored into another class). + public NewTypeInAndroidP getNewTypeInAndroidP() { + assert Build.VERSION.SDK_INT >= Build.VERSION_CODES.P; + // Stores a NewTypeInAndroidP in the return register, but doesn't do + // anything else with it + return ApiHelperForP.getNewTypeInAndroidP(); + } + + // ... +} + +@VerifiesOnP +@RequiresApi(Build.VERSION_CODES.P) +public class ApiHelperForP { + public static NewTypeInAndroidP getNewTypeInAndroidP() { + return new NewTypeInAndroidP(); + } + + // ... +} +``` + +**Note:** this only works in ART (L+), not Dalvik (KitKat and earlier). + +## Investigating class verification failures + +Class verification is generally surprising and nonintuitive. Fortunately, the +ART team have provided tools to investigate errors (and the chromium team has +built helpful wrappers). + +### Listing failing classes + +The main starting point is to figure out which classes fail verification (those +which ART marks as `RetryVerificationAtRuntime`). This can be done for **any +Android app** (it doesn't have to be from the chromium project) like so: + +```shell +# Install the app first. Using Chrome as an example. +autoninja -C out/Default chrome_public_apk +out/Default/bin/chrome_public_apk install + +# List all classes marked as 'RetryVerificationAtRuntime' +build/android/list_class_verification_failures.py --package="org.chromium.chrome" +W 0.000s Main Skipping deobfuscation because no map file was provided. +first.failing.Class +second.failing.Class +... +``` + +"Skipping deobfuscation because no map file was provided" is a warning, since +many Android applications (including Chrome's release builds) are built with +proguard (or similar tools) to obfuscate Java classes and shrink code. Although +it's safe to ignore this warning if you don't obfuscate Java code, the script +knows how to deobfuscate classes for you (useful for `is_debug = true` or +`is_java_debug = true`): + +```shell +build/android/list_class_verification_failures.py --package="org.chromium.chrome" \ + --mapping= # ex. out/Release/apks/ChromePublic.apk.mapping +android.support.design.widget.AppBarLayout +android.support.design.widget.TextInputLayout +... +``` + +Googlers can also download mappings for [official +builds](http://go/webview-official-builds). + +### Understanding the reason for the failure + +ART team also provide tooling for this. You can configure ART on a rooted device +to log all class verification failures (during installation), at which point the +cause is much clearer: + +```shell +# Enable ART logging (requires root). Note the 2 pairs of quotes! +adb root +adb shell setprop dalvik.vm.dex2oat-flags '"--runtime-arg -verbose:verifier"' + +# Restart Android services to pick up the settings +adb shell stop && adb shell start + +# Optional: clear logs which aren't relevant +adb logcat -c + +# Install the app and check for ART logs +adb install -d -r out/Default/apks/ChromePublic.apk +adb logcat | grep 'dex2oat' +... +... I dex2oat : Soft verification failures in boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu) +... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xF0] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List; +... I dex2oat : boolean org.chromium.content.browser.selection.SelectionPopupControllerImpl.b(android.view.ActionMode, android.view.Menu): [0xFA] couldn't find method android.view.textclassifier.TextClassification.getActions ()Ljava/util/List; +... +``` + +*** note +**Note:** you may want to avoid `adb` wrapper scripts (ex. +`out/Default/bin/chrome_public_apk install`). These scripts cache the package +manager state to optimize away idempotent installs. However in this case, we +**do** want to trigger idempotent installs, because we want to re-trigger AOT +verification. +*** + +In the above example, `SelectionPopupControllerImpl` fails verification on Oreo +(API 26) because it refers to [`TextClassification.getActions()`][5], which was +added in Pie (API 28). If `SelectionPopupControllerImpl` is used on pre-Pie +devices, then `TextClassification.getActions()` must be out-of-lined. + +## See also + +* Bugs or questions? Contact ntfschr@chromium.org +* ART team's Google I/O talks: [2014](https://youtu.be/EBlTzQsUoOw) and later + years +* Analysis of class verification in Chrome and WebView (Google-only + [doc](http://go/class-verification-chromium-analysis)) +* Presentation on class verification in Chrome and WebView (Google-only + [slide deck](http://go/class-verification-chromium-slides)) + +[1]: https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-5.html#jvms-5.5 +[2]: https://developer.android.com/reference/android/view/Window.html#isWideColorGamut() +[3]: https://bugs.chromium.org/p/chromium/issues/detail?id=838702 +[4]: https://developer.android.com/reference/android/os/Build.VERSION_CODES +[5]: https://developer.android.com/reference/android/view/textclassifier/TextClassification.html#getActions() diff --git a/android/docs/coverage.md b/android/docs/coverage.md new file mode 100644 index 000000000000..c7f3c1ffe6e0 --- /dev/null +++ b/android/docs/coverage.md @@ -0,0 +1,85 @@ +# Android code coverage instructions + +These are instructions for collecting code coverage data for android +instrumentation and JUnit tests. For Clang(C++) code coverage refer to [clang coverage]. + +[TOC] + +## How JaCoCo coverage works + +In order to use JaCoCo code coverage, we need to create build time pre-instrumented +class files and runtime **.exec** files. Then we need to process them using the +[build/android/generate_jacoco_report.py](https://source.chromium.org/chromium/chromium/src/+/main:build/android/generate_jacoco_report.py) script. + +## How to collect coverage data + +1. Use the following GN build arguments: + + ```gn + target_os = "android" + use_jacoco_coverage = true + ``` + + Now when building, pre-instrumented files will be created in the build directory. + +2. Run tests, with option `--coverage-dir `, to specify where to save + the .exec file. For example, you can run chrome JUnit tests: + `out/Debug/bin/run_chrome_junit_tests --coverage-dir /tmp/coverage`. + +3. The coverage results of JUnit and instrumentation tests will be merged + automatically if they are in the same directory. + +## How to generate coverage report + +1. Now we have generated .exec files already. We can create a JaCoCo HTML/XML/CSV + report using `generate_jacoco_report.py`, for example: + + ```shell + build/android/generate_jacoco_report.py \ + --format html \ + --output-dir /tmp/coverage_report/ \ + --coverage-dir /tmp/coverage/ \ + --sources-json-dir out/Debug/ \ + ``` + Then an index.html containing coverage info will be created in output directory: + + ``` + [INFO] Loading execution data file /tmp/coverage/testTitle.exec. + [INFO] Loading execution data file /tmp/coverage/testSelected.exec. + [INFO] Loading execution data file /tmp/coverage/testClickToSelect.exec. + [INFO] Loading execution data file /tmp/coverage/testClickToClose.exec. + [INFO] Loading execution data file /tmp/coverage/testThumbnail.exec. + [INFO] Analyzing 58 classes. + ``` + +2. For XML and CSV reports, we need to specify `--output-file` instead of `--output-dir` since + only one file will be generated as XML or CSV report. + ```shell + build/android/generate_jacoco_report.py \ + --format xml \ + --output-file /tmp/coverage_report/report.xml \ + --coverage-dir /tmp/coverage/ \ + --sources-json-dir out/Debug/ \ + ``` + + or + + ```shell + build/android/generate_jacoco_report.py \ + --format csv \ + --output-file /tmp/coverage_report/report.csv \ + --coverage-dir /tmp/coverage/ \ + --sources-json-dir out/Debug/ \ + ``` +3. If generating coverage and there are duplicate class files, as can happen + when generating coverage for downstream targets, use the + `--include-substr-filter` option to choose jars in the desired directory. Eg. + for generating coverage report for Clank internal repo + ```shell + build/android/generate_jacoco_report.py --format html \ + --output-dir /tmp/coverage_report/ --coverage-dir /tmp/coverage/ \ + --sources-json-dir out/java_coverage/ \ + --include-substr-filter obj/clank + ``` + +[clang coverage]: https://chromium.googlesource.com/chromium/src/+/HEAD/docs/testing/code_coverage.md \ No newline at end of file diff --git a/android/docs/java_asserts.md b/android/docs/java_asserts.md new file mode 100644 index 000000000000..37d94c1e3a3a --- /dev/null +++ b/android/docs/java_asserts.md @@ -0,0 +1,80 @@ +# Java Asserts in Chromium +This doc exists to explain how asserts in Java are enabled and disabled by +Chromium's build system. + +## javac Assertion Bytecode +Whenever javac compiles a Java class, assertions are transformed into the +following bytecode: + +``` + Code: + 0: getstatic #2 // Static field $assertionsDisabled + 3: ifne 20 // Conditional jump past assertion throw + 12: new #3 // Class java/lang/AssertionError + 19: athrow // Throwing AssertionError + 20: return + +// NOTE: this static block was made just to check the desiredAssertionStatus. +// There was no static block on the class before javac created one. + static {}; + Code: + 2: invokevirtual #6 // Method java/lang/Class.desiredAssertionStatus() + 5: ifne 12 + 8: iconst_1 + 9: goto 13 + 12: iconst_0 + 13: putstatic #2 // Static field $assertionsDisabled + 16: return +``` + +TL;DR - every single assertion is gated behind a `assertionDisabled` flag check, +which is a static field that can be set by the JRE's +`setDefaultAssertionStatus`, `setPackageAssertionStatus`, and +`setClassAssertionStatus` methods. + +## Assertion Enabling/Disabling +Our tools which consume javac output, namely R8 and D8, each have flags which +the build system uses to enable or disable asserts. We control this with the +`enable_java_asserts` gn arg. It does this by deleting the gating check on +`assertionsDisabled` when enabling, and by eliminating any reference to the +assert when disabling. + +```java +// Example equivalents of: +a = foo(); +assert a != 0; +return a; + +// Traditional, unoptimized javac output. +a = foo(); +if (!assertionsDisabled && a == 0) { + throw new AssertionError(); +} +return a; + +// Optimized with assertions enabled. +a = foo(); +if (a == 0) { + throw new AssertionError(); +} +return a; + +// Optimized with assertions disabled. +a = foo(); +return a; +``` + +## Assertion Enabling on Canary +Recently we [enabled +asserts](https://chromium-review.googlesource.com/c/chromium/src/+/3307087) on +Canary. It spiked our crash rate, and it was decided to not do this again, as +it's bad user experience to crash the app incessantly for non-fatal issues. + +So, we asked the R8 team for a feature which would rewrite the bytecode of these +assertions, which they implemented for us. Now, instead of just turning it on +and throwing an `AssertionError`, [R8 would call a provided assertion +handler](https://r8.googlesource.com/r8/+/aefe7bc18a7ce19f3e9c6dac0bedf6d182bbe142/src/main/java/com/android/tools/r8/ParseFlagInfoImpl.java#124) +with the `AssertionError`. We then wrote a [silent assertion +reporter](https://chromium-review.googlesource.com/c/chromium/src/+/3746261) +and this reports Java `AssertionErrors` to our crash server without crashing +the browser. diff --git a/android/docs/java_optimization.md b/android/docs/java_optimization.md new file mode 100644 index 000000000000..da10222a4459 --- /dev/null +++ b/android/docs/java_optimization.md @@ -0,0 +1,149 @@ +# Optimizing Java Code + +This doc describes how Java code is optimized in Chrome on Android and how to +deal with issues caused by the optimizer. For tips on how to write optimized +code, see [//docs/speed/binary_size/optimization_advice.md#optimizing-java-code](/docs/speed/binary_size/optimization_advice.md#optimizing-java-code). + +[TOC] + +## ProGuard vs R8 + +ProGuard is the original open-source tool used by many Android applications to +perform whole-program bytecode optimization. [R8](https://r8.googlesource.com/r8), +is a re-implementation that is used by Chrome (and the default for Android Studio). +The terms "ProGuard" and "R8" are used interchangeably within Chromium but +generally they're meant to refer to the tool providing Java code optimizations. + +## What does ProGuard do? + +1. Shrinking: ProGuard will remove unused code. This is especially useful + when depending on third party libraries where only a few functions are used. + +2. Obfuscation: ProGuard will rename classes/fields/methods to use shorter + names. Obfuscation is used for minification purposes only (not security). + +3. Optimization: ProGuard performs a series of optimizations to shrink code + further through various approaches (ex. inlining, outlining, class merging, + etc). + +## Build Process + +ProGuard is enabled only for release builds of Chrome because it is a slow build +step and breaks Java debugging. It can also be enabled manually via the GN arg: +```is_java_debug = false``` + +### ProGuard configuration files + +Most GN Java targets can specify ProGuard configuration files by setting the +`proguard_configs` variable. [//base/android/proguard](/base/android/proguard) +contains common flags shared by most Chrome applications. + +### GN build rules + +When `is_java_debug = false` and a target has enabled ProGuard, the `proguard` +step generates the `.dex` files for the application. The `proguard` step takes +as input a list of `.jar` files, runs R8/ProGuard on those `.jar` files, and +produces the final `.dex` file(s) that will be packaged into your `.apk` + +## Deobfuscation + +Obfuscation can be turned off for local builds while leaving ProGuard enabled +by setting `enable_proguard_obfuscation = false` in GN args. + +There are two main methods for deobfuscating Java stack traces locally: +1. Using APK wrapper scripts (stacks are automatically deobfuscated) + * `$OUT/bin/chrome_public_apk logcat` # Run adb logcat + * `$OUT/bin/chrome_public_apk run` # Launch chrome and run adb logcat + +2. Using `java_deobfuscate` + * build/android/stacktrace/java_deobfuscate.py $OUT/apks/ChromePublic.apk.mapping < logcat.txt` + * ProGuard mapping files are located beside APKs (ex. + `$OUT/apks/ChromePublic.apk` and `$OUT/apks/ChromePublic.apk.mapping`) + +Helpful links for deobfuscation: + +* [Internal bits about how mapping files are archived][proguard-site] +* [More detailed deobfuscation instructions][proguard-doc] +* [Script for deobfuscating official builds][deob-official] + +[proguard-site]: http://goto.google.com/chrome-android-proguard +[proguard-doc]: http://goto.google.com/chromejavadeobfuscation +[deob-official]: http://goto.google.com/chrome-android-official-deobfuscation + +## Debugging common failures + +ProGuard failures are often hard to debug. This section aims to outline some of +the more common errors. + +### Classes expected to be discarded + +The `-checkdiscard` directive can be used to ensure that certain items are +removed by ProGuard. A common use of `-checkdiscard` it to ensure that ProGuard +optimizations do not regress in their ability to remove code, such as code +intended only for debug builds, or generated JNI classes that are meant to be +zero-overhead abstractions. Annotating a class with +[@CheckDiscard][checkdiscard] will add a `-checkdiscard` rule automatically. + +[checkdiscard]: /build/android/java/src/org/chromium/build/annotations/CheckDiscard.java + +``` +Item void org.chromium.base.library_loader.LibraryPrefetcherJni.() was not discarded. +void org.chromium.base.library_loader.LibraryPrefetcherJni.() +|- is invoked from: +| void org.chromium.base.library_loader.LibraryPrefetcher.asyncPrefetchLibrariesToMemory() +... more code path lines +|- is referenced in keep rule: +| obj/chrome/android/chrome_public_apk/chrome_public_apk.resources.proguard.txt:104:1 + +Error: Discard checks failed. +``` + +Things to check + * Did you add code that is referenced by code path in the error message? + * If so, check the original class for why the `CheckDiscard` was added + originally and verify that the reason is still valid with your change (may + need git blame to do this). + * Try the extra debugging steps listed in the JNI section below. + +### JNI wrapper classes not discarded + +Proxy native methods (`@NativeMethods`) use generated wrapper classes to provide +access to native methods. We rely on ProGuard to fully optimize the generated +code so that native methods aren't a source of binary size bloat. The above +error message is an example when a JNI wrapper class wasn't discarded (notice +the name of the offending class). + * The ProGuard rule pointed to in the error message isn't helpful (just tells + us a code path that reaches the not-inlined class). + * Common causes: + * Caching the result of `ClassNameJni.get()` in a member variable. + * Passing a native wrapper method reference instead of using a lambda (i.e. + `Jni.get()::methodName` vs. `() -> Jni.get.methodName()`). + * For more debugging info, add to `base/android/proguard/chromium_code.flags`: + ``` + -whyareyounotinlining class org.chromium.base.library_loader.LibraryPrefetcherJni { + (); + } + ``` + +### Duplicate classes + +``` +Type YourClassName is defined multiple times: obj/jar1.jar:YourClassName.class, obj/jar2.jar:YourClassName.class +``` + +Common causes: + * Multiple targets with overlapping `srcjar_deps`: + * Each `.srcjar` can only be depended on by a single Java target in any + given APK target. `srcjar_deps` are just a convenient way to depend on + generated files and should be treated like source files rather than + `deps`. + * Solution: Wrap the `srcjar` in an `android_library` target or have only a + single Java target depend on the `srcjar` and have other targets depend on + the containing Java target instead. + * Accidentally enabling APK level generated files for multiple targets that + share generated code (ex. Trichrome or App Bundles): + * Solution: Make sure the generated file is only added once. + +Debugging ProGuard failures isn't easy, so please message java@chromium.org +or [file a bug](crbug.com/new) with `component=Build os=Android` for any +issues related to Java code optimization. diff --git a/android/docs/java_toolchain.md b/android/docs/java_toolchain.md new file mode 100644 index 000000000000..a9d229d21ad4 --- /dev/null +++ b/android/docs/java_toolchain.md @@ -0,0 +1,289 @@ +# Chromium's Java Toolchain + +This doc aims to describe the Chrome build process that takes a set of `.java` +files and turns them into a `classes.dex` file. + +[TOC] + +## Core GN Target Types + +The following have `supports_android` and `requires_android` set to false by +default: +* `java_library()`: Compiles `.java` -> `.jar` +* `java_prebuilt()`: Imports a prebuilt `.jar` file. + +The following have `supports_android` and `requires_android` set to true. They +also have a default `jar_excluded_patterns` set (more on that later): +* `android_library()` +* `android_java_prebuilt()` + +All target names must end with "_java" so that the build system can distinguish +them from non-java targets (or [other variations](https://cs.chromium.org/chromium/src/build/config/android/internal_rules.gni?rcl=ec2c17d7b4e424e060c3c7972842af87343526a1&l=20)). + +Most targets produce two separate `.jar` files: +* Device `.jar`: Used to produce `.dex.jar`, which is used on-device. +* Host `.jar`: For use on the host machine (`junit_binary` / `java_binary`). + * Host `.jar` files live in `lib.java/` so that they are archived in + builder/tester bots (which do not archive `obj/`). + +## From Source to Final Dex + +### Step 1: Create interface .jar with turbine or ijar + +What are interface jars?: + +* They contain `.class` files with all private symbols and all method bodies + removed. +* Dependant targets use interface `.jar` files to skip having to be rebuilt + when only private implementation details change. + +For prebuilt `.jar` files: we use [//third_party/ijar] to create interface +`.jar` files from the prebuilt ones. + +For non-prebuilt `.jar` files`: we use [//third_party/turbine] to create +interface `.jar` files directly from `.java` source files. Turbine is faster +than javac because it does not compile method bodies. Although Turbine causes +us to compile files twice, it speeds up builds by allowing `javac` compilation +of targets to happen concurrently with their dependencies. We also use Turbine +to run our annotation processors. + +[//third_party/ijar]: /third_party/ijar/README.chromium +[//third_party/turbine]: /third_party/turbine/README.chromium + +### Step 2a: Compile with javac + +This step is the only step that does not apply to prebuilt targets. + +* All `.java` files in a target are compiled by `javac` into `.class` files. + * This includes `.java` files that live within `.srcjar` files, referenced + through `srcjar_deps`. +* The `classpath` used when compiling a target is comprised of `.jar` files of + its deps. + * When deps are library targets, the Step 1 `.jar` file is used. + * When deps are prebuilt targets, the original `.jar` file is used. + * All `.jar` processing done in subsequent steps does not impact compilation + classpath. +* `.class` files are zipped into an output `.jar` file. +* There is **no support** for incremental compilation at this level. + * If one source file changes within a library, then the entire library is + recompiled. + * Prefer smaller targets to avoid slow compiles. + +### Step 2b: Compile with ErrorProne + +This step can be disabled via GN arg: `use_errorprone_java_compiler = false` + +* Concurrently with step 1a: [ErrorProne] compiles java files and checks for bug + patterns, including some [custom to Chromium][ep_plugins]. +* ErrorProne used to replace step 1a, but was changed to a concurrent step after + being identified as being slower. + +[ErrorProne]: https://errorprone.info/ +[ep_plugins]: /tools/android/errorprone_plugin/ + +### Step 3: Desugaring (Device .jar Only) + +This step happens only when targets have `supports_android = true`. It is not +applied to `.jar` files used by `junit_binary`. + +* `//third_party/bazel/desugar` converts certain Java 8 constructs, such as + lambdas and default interface methods, into constructs that are compatible + with Java 7. + +### Step 4: Instrumenting (Device .jar Only) + +This step happens only when this GN arg is set: `use_jacoco_coverage = true` + +* [Jacoco] adds instrumentation hooks to methods. + +[Jacoco]: https://www.eclemma.org/jacoco/ + +### Step 5: Filtering + +This step happens only when targets that have `jar_excluded_patterns` or +`jar_included_patterns` set (e.g. all `android_` targets). + +* Remove `.class` files that match the filters from the `.jar`. These `.class` + files are generally those that are re-created with different implementations + further on in the build process. + * E.g.: `R.class` files - a part of [Android Resources]. + * E.g.: `GEN_JNI.class` - a part of our [JNI] glue. + * E.g.: `AppHooksImpl.class` - how `chrome_java` wires up different + implementations for [non-public builds][apphooks]. + +[JNI]: /base/android/jni_generator/README.md +[Android Resources]: life_of_a_resource.md +[apphooks]: /chrome/android/java/src/org/chromium/chrome/browser/AppHooksImpl.java + +### Step 6: Per-Library Dexing + +This step happens only when targets have `supports_android = true`. + +* [d8] converts `.jar` files containing `.class` files into `.dex.jar` files + containing `classes.dex` files. +* Dexing is incremental - it will reuse dex'ed classes from a previous build if + the corresponding `.class` file is unchanged. +* These per-library `.dex.jar` files are used directly by [incremental install], + and are inputs to the Apk step when `enable_proguard = false`. + * Even when `is_java_debug = false`, many apk targets do not enable ProGuard + (e.g. unit tests). + +[d8]: https://developer.android.com/studio/command-line/d8 +[incremental install]: /build/android/incremental_install/README.md + +### Step 7: Apk / Bundle Module Compile + +* Each `android_apk` and `android_bundle_module` template has a nested + `java_library` target. The nested library includes final copies of files + stripped out by prior filtering steps. These files include: + * Final `R.java` files, created by `compile_resources.py`. + * Final `GEN_JNI.java` for [JNI glue]. + * `BuildConfig.java` and `NativeLibraries.java` (//base dependencies). + +[JNI glue]: /base/android/jni_generator/README.md + +### Step 8: Final Dexing + +This step is skipped when building using [Incremental Install]. + +When `is_java_debug = true`: +* [d8] merges all library `.dex.jar` files into a final `.mergeddex.jar`. + +When `is_java_debug = false`: +* [R8] performs whole-program optimization on all library `lib.java` `.jar` + files and outputs a final `.r8dex.jar`. + * For App Bundles, R8 creates a `.r8dex.jar` for each module. + +[Incremental Install]: /build/android/incremental_install/README.md +[R8]: https://r8.googlesource.com/r8 + +## Test APKs with apk_under_test + +Test APKs are normal APKs that contain an `` tag within their +`AndroidManifest.xml`. If this tag specifies an `android:targetPackage` +different from itself, then Android will add that package's `classes.dex` to the +test APK's Java classpath when run. In GN, you can enable this behavior using +the `apk_under_test` parameter on `instrumentation_test_apk` targets. Using it +is discouraged if APKs have `proguard_enabled=true`. + +### Difference in Final Dex + +When `enable_proguard=false`: +* Any library depended on by the test APK that is also depended on by the + apk-under-test is excluded from the test APK's final dex step. + +When `enable_proguard=true`: +* Test APKs cannot make use of the apk-under-test's dex because only symbols + explicitly kept by `-keep` directives are guaranteed to exist after + ProGuarding. As a work-around, test APKs include all of the apk-under-test's + libraries directly in its own final dex such that the under-test apk's Java + code is never used (because it is entirely shadowed by the test apk's dex). + * We've found this configuration to be fragile, and are trying to [move away + from it](https://bugs.chromium.org/p/chromium/issues/detail?id=890452). + +### Difference in GEN_JNI.java +* Calling native methods using [JNI glue] requires that a `GEN_JNI.java` class + be generated that contains all native methods for an APK. There cannot be + conflicting `GEN_JNI` classes in both the test apk and the apk-under-test, so + only the apk-under-test has one generated for it. As a result this, + instrumentation test APKs that use apk-under-test cannot use native methods + that aren't already part of the apk-under-test. + +## How to Generate Java Source Code +There are two ways to go about generating source files: Annotation Processors +and custom build steps. + +### Annotation Processors +* These are run by `javac` as part of the compile step. +* They **cannot** modify the source files that they apply to. They can only + generate new sources. +* Use these when: + * an existing Annotation Processor does what you want + (E.g. Dagger, AutoService, etc.), or + * you need to understand Java types to do generation. + +### Custom Build Steps +* These use discrete build actions to generate source files. + * Some generate `.java` directly, but most generate a zip file of sources + (called a `.srcjar`) to simplify the number of inputs / outputs. +* Examples of existing templates: + * `jinja_template`: Generates source files using [Jinja]. + * `java_cpp_template`: Generates source files using the C preprocessor. + * `java_cpp_enum`: Generates `@IntDef`s based on enums within `.h` files. + * `java_cpp_strings`: Generates String constants based on strings defined in + `.cc` files. +* Custom build steps are preferred over Annotation Processors because they are + generally easier to understand, and can run in parallel with other steps + (rather than being tied to compiles). + +[Jinja]: https://palletsprojects.com/p/jinja/ + +## Static Analysis & Code Checks + +We use several tools for static analysis. + +### [ErrorProne](https://errorprone.info/) +* Runs as part of normal compilation. Controlled by GN arg: `use_errorprone_java_compiler`. +* Most useful check: + * Enforcement of `@GuardedBy` annotations. +* List of enabled / disabled checks exists [within compile_java.py](https://cs.chromium.org/chromium/src/build/android/gyp/compile_java.py?l=30) + * Many checks are currently disabled because there is work involved in fixing + violations they introduce. Please help! +* Custom checks for Chrome: + * [//tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/](/tools/android/errorprone_plugin/src/org/chromium/tools/errorprone/plugin/) +* Use ErrorProne checks when you need something more sophisticated than pattern + matching. +* Checks run on the entire codebase, not only on changed lines. +* Does not run when `chromium_code = false` (e.g. for //third_party). + +### [Android Lint](https://developer.android.com/studio/write/lint) +* Runs as part of normal compilation. Controlled by GN arg: `disable_android_lint` +* Most useful check: + * Enforcing `@TargetApi` annotations (ensure you don't call a function that + does not exist on all versions of Android unless guarded by an version + check). +* List of disabled checks: + * [//build/android/lint/suppressions.xml](/build/android/lint/suppressions.xml) +* Custom lint checks [are possible](lint_plugins), but we don't have any. +* Checks run on the entire codebase, not only on changed lines. +* Does not run when `chromium_code = false` (e.g. for //third_party). + +[lint_plugins]: http://tools.android.com/tips/lint-custom-rules + +### [Bytecode Processor](/build/android/bytecode/) +* Performs a single check: + * That target `deps` are not missing any entries. + * In other words: Enforces that targets do not rely on indirect dependencies + to populate their classpath. +* Checks run on the entire codebase, not only on changed lines. +* This is the only static analysis that runs on prebuilt .jar files. +* The same tool is also used for [bytecode rewriting](/docs/ui/android/bytecode_rewriting.md). + +### [PRESUBMIT.py](/PRESUBMIT.py): +* Checks for banned patterns via `_BANNED_JAVA_FUNCTIONS`. + * (These should likely be moved to checkstyle). +* Checks for a random set of things in `ChecksAndroidSpecificOnUpload()`. + * Including running Checkstyle. + * (Some of these other checks should likely also be moved to checkstyle). +* Checks run only on changed lines. + +### [Checkstyle](https://checkstyle.sourceforge.io/) +* Checks Java style rules that are not covered by clang-format. + * E.g.: Unused imports and naming conventions. +* Allows custom checks to be added via XML. Here [is ours]. +* Preferred over adding checks directly in PRESUBMIT.py because the tool + understands `@SuppressWarnings` annotations. +* Checks run only on changed lines. + +[is ours]: /tools/android/checkstyle/chromium-style-5.0.xml + +### [clang-format](https://clang.llvm.org/docs/ClangFormat.html) +* Formats `.java` files via `git cl format`. +* Can be toggle on/off with code comments. + ```java + // clang-format off + ... non-formatted code here ... + // clang-format on + ``` +* Does not work great for multiple annotations or on some lambda expressions, + but is generally agreed it is better than not having it at all. diff --git a/android/docs/life_of_a_resource.md b/android/docs/life_of_a_resource.md new file mode 100644 index 000000000000..5e46ef66af27 --- /dev/null +++ b/android/docs/life_of_a_resource.md @@ -0,0 +1,289 @@ +# Life of an Android Resource + +[TOC] + +## Overview + +This document describes how [Android Resources][android resources] +are built in Chromium's build system. It does not mention native resources +which are [processed differently][native resources]. + +[android resources]: https://developer.android.com/guide/topics/resources/providing-resources +[native resources]: https://www.chromium.org/developers/tools-we-use-in-chromium/grit/grit-users-guide + +The steps consume the following files as inputs: +* `AndroidManifest.xml` + * Including `AndroidManifest.xml` files from libraries, which get merged + together +* res/ directories + +The steps produce the following intermediate files: +* `R.srcjar` (contains `R.java` files) +* `R.txt` +* `.resources.zip` + +The steps produce the following files within an `.apk`: +* `AndroidManifest.xml` (a binary xml file) +* `resources.arsc` (contains all values and configuration metadata) +* `res/**` (drawables and layouts) +* `classes.dex` (just a small portion of classes from generated `R.java` files) + + +## The Build Steps + +Whenever you try to compile an apk or library target, resources go through the +following steps: + +### 1. Constructs .build\_config files: + +Inputs: +* GN target metadata +* Other `.build_config.json` files + +Outputs: +* Target-specific `.build_config.json` file + +`write_build_config.py` is run to record target metadata needed by future steps. +For more details, see [build_config.md](build_config.md). + + +### 2. Prepares resources: + +Inputs: +* Target-specific `.build_config.json` file +* Files listed as `sources` + +Outputs: +* Target-specific `resources.zip` (contains all resources listed in `sources`). +* Target-specific `R.txt` (list of all resources, including dependencies). + +`prepare_resources.py` zips up the target-specific resource files and generates +`R.txt`. No optimizations, crunching, etc are done on the resources. + +**The following steps apply only to apk & bundle targets (not to library +targets).** + +### 3. Create target-specific R.java files + +Inputs: +* `R.txt` from dependencies. + +Outputs: +* Target-specific (placeholder) `R.java` file. + +A target-specific `R.java` is generated for each `android_library()` target that +sets `resources_package`. Resource IDs are not known at this phase, so all +values are set as placeholders. This copy of `R` classes are discarded and +replaced with new copies at step 4. + +Example placeholder R.java file: +```java +package org.chromium.mypackage; + +public final class R { + public static class anim { + public static int abc_fade_in = 0; + public static int abc_fade_out = 0; + ... + } + ... +} +``` + +### 4. Finalizes apk resources: + +Inputs: +* Target-specific `.build_config.json` file +* Dependencies' `R.txt` files +* Dependencies' `resources.zip` files + +Output: +* Packaged `resources zip` (named `foo.ap_`) containing: + * `AndroidManifest.xml` (as binary xml) + * `resources.arsc` + * `res/**` +* Final `R.txt` + * Contains a list of resources and their ids (including of dependencies). +* Final `R.java` files + * See [What are `R.java` files and how are they generated]( + #how-r_java-files-are-generated) + + +#### 4(a). Compiles resources: + +For each library / resources target your apk depends on, the following happens: +* Use a regex (defined in the apk target) to remove select resources (optional). +* Convert png images to webp for binary size (optional). +* Move drawables in mdpi to non-mdpi directory ([why?](http://crbug.com/289843)) +* Use `aapt2 compile` to compile xml resources to binary xml (references to + other resources will now use the id rather than the name for faster lookup at + runtime). +* `aapt2 compile` adds headers/metadata to 9-patch images about which parts of + the image are stretchable vs static. +* `aapt2 compile` outputs a zip with the compiled resources (one for each + dependency). + + +#### 4(b). Links resources: + +After each dependency is compiled into an intermediate `.zip`, all those zips +are linked by the `aapt2 link` command which does the following: +* Use the order of dependencies supplied so that some resources clober each + other. +* Compile the `AndroidManifest.xml` to binary xml (references to resources are + now using ids rather than the string names) +* Create a `resources.arsc` file that has the name and values of string + resources as well as the name and path of non-string resources (ie. layouts + and drawables). +* Combine the compiled resources into one packaged resources apk (a zip file + with an `.ap_` extension) that has all the resources related files. + + +#### 4(c). Optimizes resources: + +Targets can opt into the following optimizations: +1) Resource name collapsing: Maps all resources to the same name. Access to + resources via `Resources.getIdentifier()` no longer work unless resources are + [allowlisted](#adding-resources-to-the-allowlist). +2) Resource filename obfuscation: Renames resource file paths from e.g.: + `res/drawable/something.png` to `res/a`. Rename mapping is stored alongside + APKs / bundles in a `.pathmap` file. Renames are based on hashes, and so are + stable between builds (unless a new hash collision occurs). +3) Unused resource removal: Referenced resources are extracted from the + optimized `.dex` and `AndroidManifest.xml`. Resources that are directly or + indirectly used by these files are removed. + +## App Bundles and Modules: + +Processing resources for bundles and modules is slightly different. Each module +has its resources compiled and linked separately (ie: it goes through the +entire process for each module). The modules are then combined to form a +bundle. Moreover, during "Finalizing the apk resources" step, bundle modules +produce a `resources.proto` file instead of a `resources.arsc` file. + +Resources in a dynamic feature module may reference resources in the base +module. During the link step for feature module resources, the linked resources +of the base module are passed in. However, linking against resources currently +works only with `resources.arsc` format. Thus, when building the base module, +resources are compiled as both `resources.arsc` and `resources.proto`. + +## Debugging resource related errors when resource names are obfuscated + +An example message from a stacktrace could be something like this: +``` +java.lang.IllegalStateException: Could not find CoordinatorLayout descendant +view with id org.chromium.chrome:id/0_resource_name_obfuscated to anchor view +android.view.ViewStub{be192d5 G.E...... ......I. 0,0-0,0 #7f0a02ad +app:id/0_resource_name_obfuscated} +``` + +`0_resource_name_obfuscated` is the resource name for all resources that had +their name obfuscated/stripped during the optimize resources step. To help with +debugging, the `R.txt` file is archived. The `R.txt` file contains a mapping +from resource ids to resource names and can be used to get the original resource +name from the id. In the above message the id is `0x7f0a02ad`. + +For local builds, `R.txt` files are output in the `out/*/apks` directory. + +For official builds, Googlers can get archived `R.txt` files next to archived +apks. + +### Adding resources to the allowlist + +If a resource is accessed via `getIdentifier()` it needs to be allowed by an +aapt2 resources config file. The config file looks like this: + +``` +/#no_obfuscate +``` +eg: +``` +string/app_name#no_obfuscate +id/toolbar#no_obfuscate +``` + +The aapt2 config file is passed to the ninja target through the +`resources_config_paths` variable. To add a resource to the allowlist, check +where the config is for your target and add a new line for your resource. If +none exist, create a new config file and pass its path in your target. + +### Webview resource ids + +The first two bytes of a resource id is the package id. For regular apks, this +is `0x7f`. However, Webview is a shared library which gets loaded into other +apks. The package id for webview resources is assigned dynamically at runtime. +When webview is loaded it calls this [R file's][Base Module R.java File] +`onResourcesLoaded()` function to have the correct package id. When +deobfuscating webview resource ids, disregard the first two bytes in the id when +looking it up in the `R.txt` file. + +Monochrome, when loaded as webview, rewrites the package ids of resources used +by the webview portion to the correct value at runtime, otherwise, its resources +have package id `0x7f` when run as a regular apk. + +[Base Module R.java File]: https://cs.chromium.org/chromium/src/out/android-Debug/gen/android_webview/system_webview_apk/generated_java/gen/base_module/R.java + +## How R.java files are generated + +`R.java` contain a set of nested static classes, each with static fields +containing ids. These ids are used in java code to reference resources in +the apk. + +There are three types of `R.java` files in Chrome. +1. Root / Base Module `R.java` Files +2. DFM `R.java` Files +3. Per-Library `R.java` Files + +### Root / Base Module `R.java` Files +Contain base android resources. All `R.java` files can access base module +resources through inheritance. + +Example Root / Base Module `R.java` File: +```java +package gen.base_module; + +public final class R { + public static class anim { + public static final int abc_fade_in = 0x7f010000; + public static final int abc_fade_out = 0x7f010001; + public static final int abc_slide_in_top = 0x7f010007; + } + public static class animator { + public static final int design_appbar_state_list_animator = 0x7f020000; + } +} +``` + +### DFM `R.java` Files +Extend base module root `R.java` files. This allows DFMs to access their own +resources as well as the base module's resources. + +Example DFM Root `R.java` File +```java +package gen.vr_module; + +public final class R { + public static class anim extends gen.base_module.R.anim { + } + public static class animator extends gen.base_module.R.animator { + public static final int design_appbar_state_list_animator = 0x7f030000; + } +} +``` + +### Per-Library `R.java` Files +Generated for each `android_library()` target that sets `resources_package`. +First a placeholder copy is generated in the `android_library()` step, and then +a final copy is created during finalization. + +Example final per-library `R.java`: +```java +package org.chromium.chrome.vr; + +public final class R { + public static final class anim extends + gen.vr_module.R.anim {} + public static final class animator extends + gen.vr_module.R.animator {} +} +``` diff --git a/android/docs/lint.md b/android/docs/lint.md new file mode 100644 index 000000000000..e97fd76f4caf --- /dev/null +++ b/android/docs/lint.md @@ -0,0 +1,132 @@ +# Lint + +Android's [**lint**](https://developer.android.com/tools/help/lint.html) is a +static analysis tool that Chromium uses to catch possible issues in Java code. + +This is a list of [**checks**](http://tools.android.com/tips/lint-checks) that +you might encounter. + +[TOC] + +## How Chromium uses lint + +Chromium only runs lint on apk or bundle targets that explicitly set +`enable_lint = true`. Some example targets that have this set are: + + - `//chrome/android:monochrome_public_bundle` + - `//android_webview/support_library/boundary_interfaces:boundary_interface_example_apk` + - `//remoting/android:remoting_apk` + +## My code has a lint error + +If lint reports an issue in your code, there are several possible remedies. +In descending order of preference: + +### Fix it + +While this isn't always the right response, fixing the lint error or warning +should be the default. + +### Suppress it locally + +Java provides an annotation, +[`@SuppressWarnings`](https://developer.android.com/reference/java/lang/SuppressWarnings), +that tells lint to ignore the annotated element. It can be used on classes, +constructors, methods, parameters, fields, or local variables, though usage in +Chromium is typically limited to the first three. You do not need to import it +since it is in the `java.lang` package. + +Like many suppression annotations, `@SuppressWarnings` takes a value that tells +**lint** what to ignore. It can be a single `String`: + +```java +@SuppressWarnings("NewApi") +public void foo() { + a.methodThatRequiresHighSdkLevel(); +} +``` + +It can also be a list of `String`s: + +```java +@SuppressWarnings({ + "NewApi", + "UseSparseArrays" + }) +public Map bar() { + Map shouldBeASparseArray = new HashMap(); + another.methodThatRequiresHighSdkLevel(shouldBeASparseArray); + return shouldBeASparseArray; +} +``` + +For resource xml files you can use `tools:ignore`: + +```xml + + + + @color/unused + +``` + +The examples above are the recommended ways of suppressing lint warnings. + +### Suppress it in a `lint-suppressions.xml` file + +**lint** can be given a per-target XML configuration file containing warnings or +errors that should be ignored. Each target defines its own configuration file +via the `lint_suppressions_file` gn variable. It is usually defined near its +`enable_lint` gn variable. + +These suppressions files should only be used for temporarily ignoring warnings +that are too hard (or not possible) to suppress locally, and permanently +ignoring warnings only for this target. To permanently ignore a warning for all +targets, add the warning to the `_DISABLED_ALWAYS` list in +[build/android/gyp/lint.py](https://source.chromium.org/chromium/chromium/src/+/main:build/android/gyp/lint.py). +Disabling globally makes lint a bit faster. + +The exception to the above rule is for warnings that affect multiple languages. +Feel free to suppress those in lint-suppressions.xml files since it is not +practical to suppress them in each language file and it is a lot of extra bloat +to list out every language for every violation in lint-baseline.xml files. + +Here is an example of how to structure a suppressions XML file: + +```xml + + + + + + + + + + + +``` + +## What are `lint-baseline.xml` files for? + +Baseline files are to help us introduce new lint warnings and errors without +blocking on fixing all our existing code that violate these new errors. Since +they are generated files, they should **not** be used to suppress lint warnings. +One of the approaches above should be used instead. Eventually all the errors in +baseline files should be either fixed or ignored permanently. + +Most devs do not need to update baseline files and should not need the script +below. Occasionally when making large build configuration changes it may be +necessary to update baseline files (e.g. increasing the min_sdk_version). + +Baseline files are defined via the `lint_baseline_file` gn variable. It is +usually defined near a target's `enable_lint` gn variable. To regenerate all +baseline files, run: + +``` +$ third_party/android_build_tools/lint/rebuild_baselines.py +``` + +This script will also update baseline files in downstream //clank if needed. +Since downstream and upstream use separate lint binaries, it is usually safe +to simply land the update CLs in any order. \ No newline at end of file diff --git a/android/download_doclava.py b/android/download_doclava.py new file mode 100755 index 000000000000..04db084fcb98 --- /dev/null +++ b/android/download_doclava.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Minimal tool to download doclava from Google storage when building for +Android.""" + +import os +import subprocess +import sys + + +def main(): + # Some Windows bots inadvertently have third_party/android_sdk installed, + # but are unable to run download_from_google_storage because depot_tools + # is not in their path, so avoid failure and bail. + if sys.platform == 'win32': + return 0 + subprocess.check_call([ + 'download_from_google_storage', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-doclava', + '--extract', + '-s', + os.path.join(os.path.dirname(__file__), '..', '..', 'buildtools', + 'android', 'doclava.tar.gz.sha1')]) + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/dump_apk_resource_strings.py b/android/dump_apk_resource_strings.py new file mode 100755 index 000000000000..962103e01938 --- /dev/null +++ b/android/dump_apk_resource_strings.py @@ -0,0 +1,662 @@ +#!/usr/bin/env vpython3 +# encoding: utf-8 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A script to parse and dump localized strings in resource.arsc files.""" + + +import argparse +import collections +import contextlib +import cProfile +import os +import re +import subprocess +import sys +import zipfile + +# pylint: disable=bare-except + +# Assuming this script is located under build/android, try to import +# build/android/gyp/bundletool.py to get the default path to the bundletool +# jar file. If this fail, using --bundletool-path will be required to parse +# bundles, allowing this script to be relocated or reused somewhere else. +try: + sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'gyp')) + import bundletool + + _DEFAULT_BUNDLETOOL_PATH = bundletool.BUNDLETOOL_JAR_PATH +except: + _DEFAULT_BUNDLETOOL_PATH = None + +# Try to get the path of the aapt build tool from catapult/devil. +try: + import devil_chromium # pylint: disable=unused-import + from devil.android.sdk import build_tools + _AAPT_DEFAULT_PATH = build_tools.GetPath('aapt') +except: + _AAPT_DEFAULT_PATH = None + + +def AutoIndentStringList(lines, indentation=2): + """Auto-indents a input list of text lines, based on open/closed braces. + + For example, the following input text: + + 'Foo {', + 'Bar {', + 'Zoo', + '}', + '}', + + Will return the following: + + 'Foo {', + ' Bar {', + ' Zoo', + ' }', + '}', + + The rules are pretty simple: + - A line that ends with an open brace ({) increments indentation. + - A line that starts with a closing brace (}) decrements it. + + The main idea is to make outputting structured text data trivial, + since it can be assumed that the final output will be passed through + this function to make it human-readable. + + Args: + lines: an iterator over input text lines. They should not contain + line terminator (e.g. '\n'). + Returns: + A new list of text lines, properly auto-indented. + """ + margin = '' + result = [] + # NOTE: Intentional but significant speed optimizations in this function: + # - |line and line[0] == | instead of |line.startswith()|. + # - |line and line[-1] == | instead of |line.endswith()|. + for line in lines: + if line and line[0] == '}': + margin = margin[:-indentation] + result.append(margin + line) + if line and line[-1] == '{': + margin += ' ' * indentation + + return result + + +# pylint: disable=line-too-long + +# NOTE: aapt dump will quote the following characters only: \n, \ and " +# see https://cs.android.com/search?q=f:ResourceTypes.cpp + +# pylint: enable=line-too-long + + +def UnquoteString(s): + """Unquote a given string from aapt dump. + + Args: + s: An UTF-8 encoded string that contains backslashes for quotes, as found + in the output of 'aapt dump resources --values'. + Returns: + The unquoted version of the input string. + """ + if not '\\' in s: + return s + + result = '' + start = 0 + size = len(s) + while start < size: + pos = s.find('\\', start) + if pos < 0: + break + + result += s[start:pos] + count = 1 + while pos + count < size and s[pos + count] == '\\': + count += 1 + + result += '\\' * (count // 2) + start = pos + count + if count & 1: + if start < size: + ch = s[start] + if ch == 'n': # \n is the only non-printable character supported. + ch = '\n' + result += ch + start += 1 + else: + result += '\\' + + result += s[start:] + return result + + +assert UnquoteString(r'foo bar') == 'foo bar' +assert UnquoteString(r'foo\nbar') == 'foo\nbar' +assert UnquoteString(r'foo\\nbar') == 'foo\\nbar' +assert UnquoteString(r'foo\\\nbar') == 'foo\\\nbar' +assert UnquoteString(r'foo\n\nbar') == 'foo\n\nbar' +assert UnquoteString(r'foo\\bar') == r'foo\bar' + + +def QuoteString(s): + """Quote a given string for external output. + + Args: + s: An input UTF-8 encoded string. + Returns: + A quoted version of the string, using the same rules as 'aapt dump'. + """ + # NOTE: Using repr() would escape all non-ASCII bytes in the string, which + # is undesirable. + return s.replace('\\', r'\\').replace('"', '\\"').replace('\n', '\\n') + + +assert QuoteString(r'foo "bar"') == 'foo \\"bar\\"' +assert QuoteString('foo\nbar') == 'foo\\nbar' + + +def ReadStringMapFromRTxt(r_txt_path): + """Read all string resource IDs and names from an R.txt file. + + Args: + r_txt_path: Input file path. + Returns: + A {res_id -> res_name} dictionary corresponding to the string resources + from the input R.txt file. + """ + # NOTE: Typical line of interest looks like: + # int string AllowedDomainsForAppsTitle 0x7f130001 + result = {} + prefix = 'int string ' + with open(r_txt_path) as f: + for line in f: + line = line.rstrip() + if line.startswith(prefix): + res_name, res_id = line[len(prefix):].split(' ') + result[int(res_id, 0)] = res_name + return result + + +class ResourceStringValues: + """Models all possible values for a named string.""" + + def __init__(self): + self.res_name = None + self.res_values = {} + + def AddValue(self, res_name, res_config, res_value): + """Add a new value to this entry. + + Args: + res_name: Resource name. If this is not the first time this method + is called with the same resource name, then |res_name| should match + previous parameters for sanity checking. + res_config: Config associated with this value. This can actually be + anything that can be converted to a string. + res_value: UTF-8 encoded string value. + """ + if res_name is not self.res_name and res_name != self.res_name: + if self.res_name is None: + self.res_name = res_name + else: + # Sanity check: the resource name should be the same for all chunks. + # Resource ID is redefined with a different name!! + print('WARNING: Resource key ignored (%s, should be %s)' % + (res_name, self.res_name)) + + if self.res_values.setdefault(res_config, res_value) is not res_value: + print('WARNING: Duplicate value definition for [config %s]: %s ' \ + '(already has %s)' % ( + res_config, res_value, self.res_values[res_config])) + + def ToStringList(self, res_id): + """Convert entry to string list for human-friendly output.""" + values = sorted([(str(config), value) + for config, value in self.res_values.items()]) + if res_id is None: + # res_id will be None when the resource ID should not be part + # of the output. + result = ['name=%s count=%d {' % (self.res_name, len(values))] + else: + result = [ + 'res_id=0x%08x name=%s count=%d {' % (res_id, self.res_name, + len(values)) + ] + for config, value in values: + result.append('%-16s "%s"' % (config, QuoteString(value))) + result.append('}') + return result + + +class ResourceStringMap: + """Convenience class to hold the set of all localized strings in a table. + + Usage is the following: + 1) Create new (empty) instance. + 2) Call AddValue() repeatedly to add new values. + 3) Eventually call RemapResourceNames() to remap resource names. + 4) Call ToStringList() to convert the instance to a human-readable + list of strings that can later be used with AutoIndentStringList() + for example. + """ + + def __init__(self): + self._res_map = collections.defaultdict(ResourceStringValues) + + def AddValue(self, res_id, res_name, res_config, res_value): + self._res_map[res_id].AddValue(res_name, res_config, res_value) + + def RemapResourceNames(self, id_name_map): + """Rename all entries according to a given {res_id -> res_name} map.""" + for res_id, res_name in id_name_map.items(): + if res_id in self._res_map: + self._res_map[res_id].res_name = res_name + + def ToStringList(self, omit_ids=False): + """Dump content to a human-readable string list. + + Note that the strings are ordered by their resource name first, and + resource id second. + + Args: + omit_ids: If True, do not put resource IDs in the result. This might + be useful when comparing the outputs of two different builds of the + same APK, or two related APKs (e.g. ChromePublic.apk vs Chrome.apk) + where the resource IDs might be slightly different, but not the + string contents. + Return: + A list of strings that can later be sent to AutoIndentStringList(). + """ + result = ['Resource strings (count=%d) {' % len(self._res_map)] + res_map = self._res_map + + # Compare two (res_id, values) tuples by resource name first, then resource + # ID. + for res_id, _ in sorted(res_map.items(), + key=lambda x: (x[1].res_name, x[0])): + result += res_map[res_id].ToStringList(None if omit_ids else res_id) + result.append('} # Resource strings') + return result + + +@contextlib.contextmanager +def ManagedOutput(output_file): + """Create an output File object that will be closed on exit if necessary. + + Args: + output_file: Optional output file path. + Yields: + If |output_file| is empty, this simply yields sys.stdout. Otherwise, this + opens the file path for writing text, and yields its File object. The + context will ensure that the object is always closed on scope exit. + """ + close_output = False + if output_file: + output = open(output_file, 'wt') + close_output = True + else: + output = sys.stdout + try: + yield output + finally: + if close_output: + output.close() + + +@contextlib.contextmanager +def ManagedPythonProfiling(enable_profiling, sort_key='tottime'): + """Enable Python profiling if needed. + + Args: + enable_profiling: Boolean flag. True to enable python profiling. + sort_key: Sorting key for the final stats dump. + Yields: + If |enable_profiling| is False, this yields False. Otherwise, this + yields a new Profile instance just after enabling it. The manager + ensures that profiling stops and prints statistics on scope exit. + """ + pr = None + if enable_profiling: + pr = cProfile.Profile() + pr.enable() + try: + yield pr + finally: + if pr: + pr.disable() + pr.print_stats(sort=sort_key) + + +def IsFilePathABundle(input_file): + """Return True iff |input_file| holds an Android app bundle.""" + try: + with zipfile.ZipFile(input_file) as input_zip: + _ = input_zip.getinfo('BundleConfig.pb') + return True + except: + return False + + +# Example output from 'bundletool dump resources --values' corresponding +# to strings: +# +# 0x7F1200A0 - string/abc_action_menu_overflow_description +# (default) - [STR] "More options" +# locale: "ca" - [STR] "Més opcions" +# locale: "da" - [STR] "Flere muligheder" +# locale: "fa" - [STR] " گزینههای بیشتر" +# locale: "ja" - [STR] "その他のオプション" +# locale: "ta" - [STR] "மேலும் விருப்பங்கள்" +# locale: "nb" - [STR] "Flere alternativer" +# ... +# +# Fun fact #1: Bundletool uses - instead of -r +# for locales! +# +# Fun fact #2: The is terminal output for \u200c, the output is +# really UTF-8 encoded when it is read by this script. +# +# Fun fact #3: Bundletool quotes \n, \\ and \" just like aapt since 0.8.0. +# +_RE_BUNDLE_STRING_RESOURCE_HEADER = re.compile( + r'^0x([0-9A-F]+)\s\-\sstring/(\w+)$') +assert _RE_BUNDLE_STRING_RESOURCE_HEADER.match( + '0x7F1200A0 - string/abc_action_menu_overflow_description') + +_RE_BUNDLE_STRING_DEFAULT_VALUE = re.compile( + r'^\s+\(default\) - \[STR\] "(.*)"$') +assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match( + ' (default) - [STR] "More options"') +assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match( + ' (default) - [STR] "More options"').group(1) == "More options" + +_RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile( + r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$') +assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match( + ' locale: "ar" - [STR] "گزینه\u200cهای بیشتر"') + + +def ParseBundleResources(bundle_tool_jar_path, bundle_path): + """Use bundletool to extract the localized strings of a given bundle. + + Args: + bundle_tool_jar_path: Path to bundletool .jar executable. + bundle_path: Path to input bundle. + Returns: + A new ResourceStringMap instance populated with the bundle's content. + """ + cmd_args = [ + 'java', '-jar', bundle_tool_jar_path, 'dump', 'resources', '--bundle', + bundle_path, '--values' + ] + p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE) + res_map = ResourceStringMap() + current_resource_id = None + current_resource_name = None + keep_parsing = True + need_value = False + while keep_parsing: + line = p.stdout.readline() + if not line: + break + # Do not use rstrip(), since this should only remove trailing newlines + # but not trailing whitespace that happen to be embedded in the string + # value for some reason. + line = line.rstrip('\n\r') + m = _RE_BUNDLE_STRING_RESOURCE_HEADER.match(line) + if m: + current_resource_id = int(m.group(1), 16) + current_resource_name = m.group(2) + need_value = True + continue + + if not need_value: + continue + + resource_config = None + m = _RE_BUNDLE_STRING_DEFAULT_VALUE.match(line) + if m: + resource_config = 'config (default)' + resource_value = m.group(1) + else: + m = _RE_BUNDLE_STRING_LOCALIZED_VALUE.match(line) + if m: + resource_config = 'config %s' % m.group(1) + resource_value = m.group(2) + + if resource_config is None: + need_value = False + continue + + res_map.AddValue(current_resource_id, current_resource_name, + resource_config, UnquoteString(resource_value)) + return res_map + + +# Name of the binary resources table file inside an APK. +RESOURCES_FILENAME = 'resources.arsc' + + +def IsFilePathAnApk(input_file): + """Returns True iff a ZipFile instance is for a regular APK.""" + try: + with zipfile.ZipFile(input_file) as input_zip: + _ = input_zip.getinfo(RESOURCES_FILENAME) + return True + except: + return False + + +# pylint: disable=line-too-long + +# Example output from 'aapt dump resources --values' corresponding +# to strings: +# +# config zh-rHK +# resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00) +# (string8) "瀏覽首頁" +# resource 0x7f12009d org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000c8e0 (s=0x0008 r=0x00) +# (string8) "向上瀏覽" +# + +# The following are compiled regular expressions used to recognize each +# of line and extract relevant information. +# +_RE_AAPT_CONFIG = re.compile(r'^\s+config (.+):$') +assert _RE_AAPT_CONFIG.match(' config (default):') +assert _RE_AAPT_CONFIG.match(' config zh-rTW:') + +# Match an ISO 639-1 or ISO 639-2 locale. +_RE_AAPT_ISO_639_LOCALE = re.compile(r'^[a-z]{2,3}(-r[A-Z]{2,3})?$') +assert _RE_AAPT_ISO_639_LOCALE.match('de') +assert _RE_AAPT_ISO_639_LOCALE.match('zh-rTW') +assert _RE_AAPT_ISO_639_LOCALE.match('fil') +assert not _RE_AAPT_ISO_639_LOCALE.match('land') + +_RE_AAPT_BCP47_LOCALE = re.compile(r'^b\+[a-z][a-zA-Z0-9\+]+$') +assert _RE_AAPT_BCP47_LOCALE.match('b+sr') +assert _RE_AAPT_BCP47_LOCALE.match('b+sr+Latn') +assert _RE_AAPT_BCP47_LOCALE.match('b+en+US') +assert not _RE_AAPT_BCP47_LOCALE.match('b+') +assert not _RE_AAPT_BCP47_LOCALE.match('b+1234') + +_RE_AAPT_STRING_RESOURCE_HEADER = re.compile( + r'^\s+resource 0x([0-9a-f]+) [a-zA-Z][a-zA-Z0-9.]+:string/(\w+):.*$') +assert _RE_AAPT_STRING_RESOURCE_HEADER.match( + r' resource 0x7f12009c org.chromium.chrome:string/0_resource_name_obfuscated: t=0x03 d=0x0000caa9 (s=0x0008 r=0x00)' +) + +_RE_AAPT_STRING_RESOURCE_VALUE = re.compile(r'^\s+\(string8\) "(.*)"$') +assert _RE_AAPT_STRING_RESOURCE_VALUE.match(r' (string8) "瀏覽首頁"') + +# pylint: enable=line-too-long + + +def _ConvertAaptLocaleToBcp47(locale): + """Convert a locale name from 'aapt dump' to its BCP-47 form.""" + if locale.startswith('b+'): + return '-'.join(locale[2:].split('+')) + lang, _, region = locale.partition('-r') + if region: + return '%s-%s' % (lang, region) + return lang + + +assert _ConvertAaptLocaleToBcp47('(default)') == '(default)' +assert _ConvertAaptLocaleToBcp47('en') == 'en' +assert _ConvertAaptLocaleToBcp47('en-rUS') == 'en-US' +assert _ConvertAaptLocaleToBcp47('en-US') == 'en-US' +assert _ConvertAaptLocaleToBcp47('fil') == 'fil' +assert _ConvertAaptLocaleToBcp47('b+sr+Latn') == 'sr-Latn' + + +def ParseApkResources(aapt_path, apk_path): + """Use aapt to extract the localized strings of a given bundle. + + Args: + bundle_tool_jar_path: Path to bundletool .jar executable. + bundle_path: Path to input bundle. + Returns: + A new ResourceStringMap instance populated with the bundle's content. + """ + cmd_args = [aapt_path, 'dump', '--values', 'resources', apk_path] + p = subprocess.Popen(cmd_args, bufsize=1, stdout=subprocess.PIPE) + + res_map = ResourceStringMap() + current_locale = None + current_resource_id = -1 # represents undefined. + current_resource_name = None + need_value = False + while True: + try: + line = p.stdout.readline().rstrip().decode('utf8') + except UnicodeDecodeError: + continue + + if not line: + break + m = _RE_AAPT_CONFIG.match(line) + if m: + locale = None + aapt_locale = m.group(1) + if aapt_locale == '(default)': + locale = aapt_locale + elif _RE_AAPT_ISO_639_LOCALE.match(aapt_locale): + locale = aapt_locale + elif _RE_AAPT_BCP47_LOCALE.match(aapt_locale): + locale = aapt_locale + if locale is not None: + current_locale = _ConvertAaptLocaleToBcp47(locale) + continue + + if current_locale is None: + continue + + if need_value: + m = _RE_AAPT_STRING_RESOURCE_VALUE.match(line) + if not m: + # Should not happen + sys.stderr.write('WARNING: Missing value for string ID 0x%08x "%s"' % + (current_resource_id, current_resource_name)) + resource_value = '' % current_resource_id + else: + resource_value = UnquoteString(m.group(1)) + + res_map.AddValue(current_resource_id, current_resource_name, + 'config %s' % current_locale, resource_value) + need_value = False + else: + m = _RE_AAPT_STRING_RESOURCE_HEADER.match(line) + if m: + current_resource_id = int(m.group(1), 16) + current_resource_name = m.group(2) + need_value = True + + return res_map + + +def main(args): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument( + 'input_file', + help='Input file path. This can be either an APK, or an app bundle.') + parser.add_argument('--output', help='Optional output file path.') + parser.add_argument( + '--omit-ids', + action='store_true', + help='Omit resource IDs in the output. This is useful ' + 'to compare the contents of two distinct builds of the ' + 'same APK.') + parser.add_argument( + '--aapt-path', + default=_AAPT_DEFAULT_PATH, + help='Path to aapt executable. Optional for APKs.') + parser.add_argument( + '--r-txt-path', + help='Path to an optional input R.txt file used to translate resource ' + 'IDs to string names. Useful when resources names in the input files ' + 'were obfuscated. NOTE: If ${INPUT_FILE}.R.txt exists, if will be used ' + 'automatically by this script.') + parser.add_argument( + '--bundletool-path', + default=_DEFAULT_BUNDLETOOL_PATH, + help='Path to alternate bundletool .jar file. Only used for bundles.') + parser.add_argument( + '--profile', action='store_true', help='Enable Python profiling.') + + options = parser.parse_args(args) + + # Create a {res_id -> res_name} map for unobfuscation, if needed. + res_id_name_map = {} + r_txt_path = options.r_txt_path + if not r_txt_path: + candidate_r_txt_path = options.input_file + '.R.txt' + if os.path.exists(candidate_r_txt_path): + r_txt_path = candidate_r_txt_path + + if r_txt_path: + res_id_name_map = ReadStringMapFromRTxt(r_txt_path) + + # Create a helper lambda that creates a new ResourceStringMap instance + # based on the input file's type. + if IsFilePathABundle(options.input_file): + if not options.bundletool_path: + parser.error( + '--bundletool-path is required to parse bundles.') + + # use bundletool to parse the bundle resources. + def create_string_map(): + return ParseBundleResources(options.bundletool_path, options.input_file) + + elif IsFilePathAnApk(options.input_file): + if not options.aapt_path: + parser.error('--aapt-path is required to parse APKs.') + + # Use aapt dump to parse the APK resources. + def create_string_map(): + return ParseApkResources(options.aapt_path, options.input_file) + + else: + parser.error('Unknown file format: %s' % options.input_file) + + # Print everything now. + with ManagedOutput(options.output) as output: + with ManagedPythonProfiling(options.profile): + res_map = create_string_map() + res_map.RemapResourceNames(res_id_name_map) + lines = AutoIndentStringList(res_map.ToStringList(options.omit_ids)) + for line in lines: + output.write(line) + output.write('\n') + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/android/envsetup.sh b/android/envsetup.sh new file mode 100755 index 000000000000..315db296a7ab --- /dev/null +++ b/android/envsetup.sh @@ -0,0 +1,35 @@ +#!/bin/bash +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Adds Android SDK tools and related helpers to PATH, useful for development. +# Not used on bots, nor required for any commands to succeed. +# Use like: source build/android/envsetup.sh + +# Make sure we're being sourced. +if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then + echo "ERROR: envsetup must be sourced." + exit 1 +fi + +# This only exists to set local variables. Don't call this manually. +android_envsetup_main() { + local SCRIPT_PATH="$1" + local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")" + local CHROME_SRC="$(readlink -f "${SCRIPT_DIR}/../../")" + + # Some tools expect these environmental variables. + export ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_sdk/public" + # ANDROID_HOME is deprecated, but generally means the same thing as + # ANDROID_SDK_ROOT and shouldn't hurt to set it. + export ANDROID_HOME="$ANDROID_SDK_ROOT" + + # Set up PATH to point to SDK-provided (and other) tools, such as 'adb'. + export PATH=${CHROME_SRC}/build/android:$PATH + export PATH=${ANDROID_SDK_ROOT}/tools/:$PATH + export PATH=${ANDROID_SDK_ROOT}/platform-tools:$PATH +} +# In zsh, $0 is the name of the file being sourced. +android_envsetup_main "${BASH_SOURCE:-$0}" +unset -f android_envsetup_main diff --git a/android/fast_local_dev_server.py b/android/fast_local_dev_server.py new file mode 100755 index 000000000000..282dcf553447 --- /dev/null +++ b/android/fast_local_dev_server.py @@ -0,0 +1,336 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates an server to offload non-critical-path GN targets.""" + +from __future__ import annotations + +import argparse +import json +import os +import queue +import shutil +import socket +import subprocess +import sys +import threading +from typing import Callable, Dict, List, Optional, Tuple + +sys.path.append(os.path.join(os.path.dirname(__file__), 'gyp')) +from util import server_utils + + +def log(msg: str, *, end: str = ''): + # Shrink the message (leaving a 2-char prefix and use the rest of the room + # for the suffix) according to terminal size so it is always one line. + width = shutil.get_terminal_size().columns + prefix = f'[{TaskStats.prefix()}] ' + max_msg_width = width - len(prefix) + if len(msg) > max_msg_width: + length_to_show = max_msg_width - 5 # Account for ellipsis and header. + msg = f'{msg[:2]}...{msg[-length_to_show:]}' + # \r to return the carriage to the beginning of line. + # \033[K to replace the normal \n to erase until the end of the line. + # Avoid the default line ending so the next \r overwrites the same line just + # like ninja's output. + print(f'\r{prefix}{msg}\033[K', end=end, flush=True) + + +class TaskStats: + """Class to keep track of aggregate stats for all tasks across threads.""" + _num_processes = 0 + _completed_tasks = 0 + _total_tasks = 0 + _lock = threading.Lock() + + @classmethod + def no_running_processes(cls): + return cls._num_processes == 0 + + @classmethod + def add_task(cls): + # Only the main thread calls this, so there is no need for locking. + cls._total_tasks += 1 + + @classmethod + def add_process(cls): + with cls._lock: + cls._num_processes += 1 + + @classmethod + def remove_process(cls): + with cls._lock: + cls._num_processes -= 1 + + @classmethod + def complete_task(cls): + with cls._lock: + cls._completed_tasks += 1 + + @classmethod + def prefix(cls): + # Ninja's prefix is: [205 processes, 6/734 @ 6.5/s : 0.922s ] + # Time taken and task completion rate are not important for the build server + # since it is always running in the background and uses idle priority for + # its tasks. + with cls._lock: + word = 'process' if cls._num_processes == 1 else 'processes' + return (f'{cls._num_processes} {word}, ' + f'{cls._completed_tasks}/{cls._total_tasks}') + + +class TaskManager: + """Class to encapsulate a threadsafe queue and handle deactivating it.""" + + def __init__(self): + self._queue: queue.SimpleQueue[Task] = queue.SimpleQueue() + self._deactivated = False + + def add_task(self, task: Task): + assert not self._deactivated + TaskStats.add_task() + self._queue.put(task) + log(f'QUEUED {task.name}') + self._maybe_start_tasks() + + def deactivate(self): + self._deactivated = True + while not self._queue.empty(): + try: + task = self._queue.get_nowait() + except queue.Empty: + return + task.terminate() + + @staticmethod + def _num_running_processes(): + with open('/proc/stat') as f: + for line in f: + if line.startswith('procs_running'): + return int(line.rstrip().split()[1]) + assert False, 'Could not read /proc/stat' + return 0 + + def _maybe_start_tasks(self): + if self._deactivated: + return + # Include load avg so that a small dip in the number of currently running + # processes will not cause new tasks to be started while the overall load is + # heavy. + cur_load = max(self._num_running_processes(), os.getloadavg()[0]) + num_started = 0 + # Always start a task if we don't have any running, so that all tasks are + # eventually finished. Try starting up tasks when the overall load is light. + # Limit to at most 2 new tasks to prevent ramping up too fast. There is a + # chance where multiple threads call _maybe_start_tasks and each gets to + # spawn up to 2 new tasks, but since the only downside is some build tasks + # get worked on earlier rather than later, it is not worth mitigating. + while num_started < 2 and (TaskStats.no_running_processes() + or num_started + cur_load < os.cpu_count()): + try: + next_task = self._queue.get_nowait() + except queue.Empty: + return + num_started += next_task.start(self._maybe_start_tasks) + + +# TODO(wnwen): Break this into Request (encapsulating what ninja sends) and Task +# when a Request starts to be run. This would eliminate ambiguity +# about when and whether _proc/_thread are initialized. +class Task: + """Class to represent one task and operations on it.""" + + def __init__(self, name: str, cwd: str, cmd: List[str], stamp_file: str): + self.name = name + self.cwd = cwd + self.cmd = cmd + self.stamp_file = stamp_file + self._terminated = False + self._lock = threading.Lock() + self._proc: Optional[subprocess.Popen] = None + self._thread: Optional[threading.Thread] = None + self._return_code: Optional[int] = None + + @property + def key(self): + return (self.cwd, self.name) + + def start(self, on_complete_callback: Callable[[], None]) -> int: + """Starts the task if it has not already been terminated. + + Returns the number of processes that have been started. This is called at + most once when the task is popped off the task queue.""" + + # The environment variable forces the script to actually run in order to + # avoid infinite recursion. + env = os.environ.copy() + env[server_utils.BUILD_SERVER_ENV_VARIABLE] = '1' + + with self._lock: + if self._terminated: + return 0 + # Use os.nice(19) to ensure the lowest priority (idle) for these analysis + # tasks since we want to avoid slowing down the actual build. + # TODO(wnwen): Use ionice to reduce resource consumption. + TaskStats.add_process() + log(f'STARTING {self.name}') + # This use of preexec_fn is sufficiently simple, just one os.nice call. + # pylint: disable=subprocess-popen-preexec-fn + self._proc = subprocess.Popen( + self.cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=self.cwd, + env=env, + text=True, + preexec_fn=lambda: os.nice(19), + ) + self._thread = threading.Thread( + target=self._complete_when_process_finishes, + args=(on_complete_callback, )) + self._thread.start() + return 1 + + def terminate(self): + """Can be called multiple times to cancel and ignore the task's output.""" + + with self._lock: + if self._terminated: + return + self._terminated = True + # It is safe to access _proc and _thread outside of _lock since they are + # only changed by self.start holding _lock when self._terminate is false. + # Since we have just set self._terminate to true inside of _lock, we know + # that neither _proc nor _thread will be changed from this point onwards. + if self._proc: + self._proc.terminate() + self._proc.wait() + # Ensure that self._complete is called either by the thread or by us. + if self._thread: + self._thread.join() + else: + self._complete() + + def _complete_when_process_finishes(self, + on_complete_callback: Callable[[], None]): + assert self._proc + # We know Popen.communicate will return a str and not a byte since it is + # constructed with text=True. + stdout: str = self._proc.communicate()[0] + self._return_code = self._proc.returncode + TaskStats.remove_process() + self._complete(stdout) + on_complete_callback() + + def _complete(self, stdout: str = ''): + """Update the user and ninja after the task has run or been terminated. + + This method should only be run once per task. Avoid modifying the task so + that this method does not need locking.""" + + TaskStats.complete_task() + failed = False + if self._terminated: + log(f'TERMINATED {self.name}') + # Ignore stdout as it is now outdated. + failed = True + else: + log(f'FINISHED {self.name}') + if stdout or self._return_code != 0: + failed = True + # An extra new line is needed since we want to preserve the previous + # _log line. Use a single print so that it is threadsafe. + # TODO(wnwen): Improve stdout display by parsing over it and moving the + # actual error to the bottom. Otherwise long command lines + # in the Traceback section obscure the actual error(s). + print('\n' + '\n'.join([ + f'FAILED: {self.name}', + f'Return code: {self._return_code}', + ' '.join(self.cmd), + stdout, + ])) + + if failed: + # Force ninja to consider failed targets as dirty. + try: + os.unlink(os.path.join(self.cwd, self.stamp_file)) + except FileNotFoundError: + pass + else: + # Ninja will rebuild targets when their inputs change even if their stamp + # file has a later modified time. Thus we do not need to worry about the + # script being run by the build server updating the mtime incorrectly. + pass + + +def _listen_for_request_data(sock: socket.socket): + while True: + conn = sock.accept()[0] + received = [] + with conn: + while True: + data = conn.recv(4096) + if not data: + break + received.append(data) + if received: + yield json.loads(b''.join(received)) + + +def _process_requests(sock: socket.socket): + # Since dicts in python can contain anything, explicitly type tasks to help + # make static type checking more useful. + tasks: Dict[Tuple[str, str], Task] = {} + task_manager = TaskManager() + try: + log('READY... Remember to set android_static_analysis="build_server" in ' + 'args.gn files') + for data in _listen_for_request_data(sock): + task = Task(name=data['name'], + cwd=data['cwd'], + cmd=data['cmd'], + stamp_file=data['stamp_file']) + existing_task = tasks.get(task.key) + if existing_task: + existing_task.terminate() + tasks[task.key] = task + task_manager.add_task(task) + except KeyboardInterrupt: + log('STOPPING SERVER...', end='\n') + # Gracefully shut down the task manager, terminating all queued tasks. + task_manager.deactivate() + # Terminate all currently running tasks. + for task in tasks.values(): + task.terminate() + log('STOPPED', end='\n') + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + '--fail-if-not-running', + action='store_true', + help='Used by GN to fail fast if the build server is not running.') + args = parser.parse_args() + if args.fail_if_not_running: + with socket.socket(socket.AF_UNIX) as sock: + try: + sock.connect(server_utils.SOCKET_ADDRESS) + except socket.error: + print('Build server is not running and ' + 'android_static_analysis="build_server" is set.\nPlease run ' + 'this command in a separate terminal:\n\n' + '$ build/android/fast_local_dev_server.py\n') + return 1 + else: + return 0 + with socket.socket(socket.AF_UNIX) as sock: + sock.bind(server_utils.SOCKET_ADDRESS) + sock.listen() + _process_requests(sock) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/generate_jacoco_report.py b/android/generate_jacoco_report.py new file mode 100755 index 000000000000..44e82acbf70f --- /dev/null +++ b/android/generate_jacoco_report.py @@ -0,0 +1,273 @@ +#!/usr/bin/env vpython3 + +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Aggregates Jacoco coverage files to produce output.""" + + +import argparse +import fnmatch +import json +import os +import sys + +import devil_chromium +from devil.utils import cmd_helper +from pylib.constants import host_paths + +# Source paths should be passed to Jacoco in a way that the relative file paths +# reflect the class package name. +_PARTIAL_PACKAGE_NAMES = ['com/google', 'org/chromium'] + +# The sources_json_file is generated by jacoco_instr.py with source directories +# and input path to non-instrumented jars. +# e.g. +# 'source_dirs': [ +# "chrome/android/java/src/org/chromium/chrome/browser/toolbar/bottom", +# "chrome/android/java/src/org/chromium/chrome/browser/ui/system", +# ...] +# 'input_path': +# '$CHROMIUM_OUTPUT_DIR/\ +# obj/chrome/android/features/tab_ui/java__process_prebuilt-filtered.jar' + +_SOURCES_JSON_FILES_SUFFIX = '__jacoco_sources.json' + + +def _CreateClassfileArgs(class_files, report_type, include_substr=None): + """Returns a filtered list of files with classfile option. + + Args: + class_files: A list of class files. + report_type: A string indicating if device or host files are desired. + include_substr: A substring that must be present to include the file. + + Returns: + A list of files that don't use the suffix. + """ + # These should match the jar class files generated in internal_rules.gni + search_jar_suffix = '%s.filter.jar' % report_type + result_class_files = [] + for f in class_files: + include_file = False + if f.endswith(search_jar_suffix): + include_file = True + + # If include_substr is specified, remove files that don't have the + # required substring. + if include_file and include_substr and include_substr not in f: + include_file = False + if include_file: + result_class_files += ['--classfiles', f] + + return result_class_files + + +def _GenerateReportOutputArgs(args, class_files, report_type): + cmd = _CreateClassfileArgs(class_files, report_type, + args.include_substr_filter) + if args.format == 'html': + report_dir = os.path.join(args.output_dir, report_type) + if not os.path.exists(report_dir): + os.makedirs(report_dir) + cmd += ['--html', report_dir] + elif args.format == 'xml': + cmd += ['--xml', args.output_file] + elif args.format == 'csv': + cmd += ['--csv', args.output_file] + + return cmd + + +def _GetFilesWithSuffix(root_dir, suffix): + """Gets all files with a given suffix. + + Args: + root_dir: Directory in which to search for files. + suffix: Suffix to look for. + + Returns: + A list of absolute paths to files that match. + """ + files = [] + for root, _, filenames in os.walk(root_dir): + basenames = fnmatch.filter(filenames, '*' + suffix) + files.extend([os.path.join(root, basename) for basename in basenames]) + + return files + + +def _GetExecFiles(root_dir, exclude_substr=None): + """ Gets all .exec files + + Args: + root_dir: Root directory in which to search for files. + exclude_substr: Substring which should be absent in filename. If None, all + files are selected. + + Returns: + A list of absolute paths to .exec files + + """ + all_exec_files = _GetFilesWithSuffix(root_dir, ".exec") + valid_exec_files = [] + for exec_file in all_exec_files: + if not exclude_substr or exclude_substr not in exec_file: + valid_exec_files.append(exec_file) + return valid_exec_files + + +def _ParseArguments(parser): + """Parses the command line arguments. + + Args: + parser: ArgumentParser object. + + Returns: + The parsed arguments. + """ + parser.add_argument( + '--format', + required=True, + choices=['html', 'xml', 'csv'], + help='Output report format. Choose one from html, xml and csv.') + parser.add_argument( + '--device-or-host', + choices=['device', 'host'], + help='Selection on whether to use the device classpath files or the ' + 'host classpath files. Host would typically be used for junit tests ' + ' and device for tests that run on the device. Only used for xml and csv' + ' reports.') + parser.add_argument('--include-substr-filter', + help='Substring that must be included in classjars.', + type=str, + default='') + parser.add_argument('--output-dir', help='html report output directory.') + parser.add_argument('--output-file', + help='xml file to write device coverage results.') + parser.add_argument( + '--coverage-dir', + required=True, + help='Root of the directory in which to search for ' + 'coverage data (.exec) files.') + parser.add_argument('--exec-filename-excludes', + required=False, + help='Excludes .exec files which contain a particular ' + 'substring in their name') + parser.add_argument( + '--sources-json-dir', + help='Root of the directory in which to search for ' + '*__jacoco_sources.json files.') + parser.add_argument( + '--class-files', + nargs='+', + help='Location of Java non-instrumented class files. ' + 'Use non-instrumented jars instead of instrumented jars. ' + 'e.g. use chrome_java__process_prebuilt_(host/device)_filter.jar instead' + 'of chrome_java__process_prebuilt-instrumented.jar') + parser.add_argument( + '--sources', + nargs='+', + help='Location of the source files. ' + 'Specified source folders must be the direct parent of the folders ' + 'that define the Java packages.' + 'e.g. /chrome/android/java/src/') + parser.add_argument( + '--cleanup', + action='store_true', + help='If set, removes coverage files generated at ' + 'runtime.') + args = parser.parse_args() + + if args.format == 'html' and not args.output_dir: + parser.error('--output-dir needed for report.') + if args.format in ('csv', 'xml'): + if not args.output_file: + parser.error('--output-file needed for xml/csv reports.') + if not args.device_or_host and args.sources_json_dir: + parser.error('--device-or-host selection needed with --sources-json-dir') + if not (args.sources_json_dir or args.class_files): + parser.error('At least either --sources-json-dir or --class-files needed.') + return args + + +def main(): + parser = argparse.ArgumentParser() + args = _ParseArguments(parser) + + devil_chromium.Initialize() + + coverage_files = _GetExecFiles(args.coverage_dir, args.exec_filename_excludes) + if not coverage_files: + parser.error('No coverage file found under %s' % args.coverage_dir) + print('Found coverage files: %s' % str(coverage_files)) + + class_files = [] + source_dirs = [] + if args.sources_json_dir: + sources_json_files = _GetFilesWithSuffix(args.sources_json_dir, + _SOURCES_JSON_FILES_SUFFIX) + for f in sources_json_files: + with open(f, 'r') as json_file: + data = json.load(json_file) + class_files.extend(data['input_path']) + source_dirs.extend(data['source_dirs']) + + # Fix source directories as direct parent of Java packages. + fixed_source_dirs = set() + for path in source_dirs: + for partial in _PARTIAL_PACKAGE_NAMES: + if partial in path: + fixed_dir = os.path.join(host_paths.DIR_SOURCE_ROOT, + path[:path.index(partial)]) + fixed_source_dirs.add(fixed_dir) + break + + if args.class_files: + class_files += args.class_files + if args.sources: + fixed_source_dirs.update(args.sources) + + cmd = [ + 'java', '-jar', + os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'jacoco', 'lib', + 'jacococli.jar'), 'report' + ] + coverage_files + + for source in fixed_source_dirs: + cmd += ['--sourcefiles', source] + + if args.format == 'html': + # Both reports are generated for html as the cq bot generates an html + # report and we wouldn't know which one a developer needed. + device_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'device') + host_cmd = cmd + _GenerateReportOutputArgs(args, class_files, 'host') + + device_exit_code = cmd_helper.RunCmd(device_cmd) + host_exit_code = cmd_helper.RunCmd(host_cmd) + exit_code = device_exit_code or host_exit_code + else: + cmd = cmd + _GenerateReportOutputArgs(args, class_files, + args.device_or_host) + exit_code = cmd_helper.RunCmd(cmd) + + if args.cleanup: + for f in coverage_files: + os.remove(f) + + # Command tends to exit with status 0 when it actually failed. + if not exit_code: + if args.format == 'html': + if not os.path.isdir(args.output_dir) or not os.listdir(args.output_dir): + print('No report generated at %s' % args.output_dir) + exit_code = 1 + elif not os.path.isfile(args.output_file): + print('No device coverage report generated at %s' % args.output_file) + exit_code = 1 + + return exit_code + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/gradle/AndroidManifest.xml b/android/gradle/AndroidManifest.xml new file mode 100644 index 000000000000..dfbb9bdf3145 --- /dev/null +++ b/android/gradle/AndroidManifest.xml @@ -0,0 +1,14 @@ + + + + + + diff --git a/android/gradle/OWNERS b/android/gradle/OWNERS new file mode 100644 index 000000000000..a0e082697243 --- /dev/null +++ b/android/gradle/OWNERS @@ -0,0 +1,2 @@ +agrieve@chromium.org +wnwen@chromium.org diff --git a/android/gradle/android.jinja b/android/gradle/android.jinja new file mode 100644 index 000000000000..3b66b97bab9b --- /dev/null +++ b/android/gradle/android.jinja @@ -0,0 +1,114 @@ +{# Copyright 2016 The Chromium Authors. All rights reserved. #} +{# Use of this source code is governed by a BSD-style license that can be #} +{# found in the LICENSE file. #} +{% macro expand_sourceset(variables, prefix) %} +{% if variables is defined %} + {{ prefix }} { +{% if variables.android_manifest is defined %} + manifest.srcFile "{{ variables.android_manifest }}" +{% endif %} +{% if variables.java_dirs is defined %} + java.srcDirs = [ +{% for path in variables.java_dirs %} + "{{ path }}", +{% endfor %} + ] +{% endif %} +{% if variables.java_excludes is defined %} + java.filter.exclude([ +{% for path in variables.java_excludes %} + "{{ path }}", +{% endfor %} + ]) +{% endif %} +{% if variables.jni_libs is defined %} + jniLibs.srcDirs = [ +{% for path in variables.jni_libs %} + "{{ path }}", +{% endfor %} + ] +{% endif %} +{% if variables.res_dirs is defined %} + res.srcDirs = [ +{% for path in variables.res_dirs %} + "{{ path }}", +{% endfor %} + ] +{% endif %} + } +{% endif %} +{% endmacro %} +// Generated by //build/android/generate_gradle.py + +{% if template_type in ('android_library', 'android_junit') %} +apply plugin: "com.android.library" +{% elif template_type == 'android_apk' %} +apply plugin: "com.android.application" +{% endif %} + +android { + compileSdkVersion "{{ compile_sdk_version }}" + + defaultConfig { + vectorDrawables.useSupportLibrary = true + minSdkVersion {{ min_sdk_version }} + targetSdkVersion {{ target_sdk_version }} + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_11 + targetCompatibility JavaVersion.VERSION_11 + } + +{% if native is defined %} + externalNativeBuild { + cmake { + path "CMakeLists.txt" + } + } +{% endif %} + + sourceSets { +{% for name in ['main', 'test', 'androidTest', 'debug', 'release'] %} + {{ name }} { + aidl.srcDirs = [] + assets.srcDirs = [] + java.srcDirs = [] + jni.srcDirs = [] + renderscript.srcDirs = [] + res.srcDirs = [] + resources.srcDirs = [] + } +{% endfor %} + +{{ expand_sourceset(main, 'main') }} +{{ expand_sourceset(test, 'test') }} +{% if android_test is defined %} +{% for t in android_test %} +{{ expand_sourceset(t, 'androidTest') }} +{% endfor %} +{% endif %} + } +} + +{% include 'dependencies.jinja' %} + +afterEvaluate { + def tasksToDisable = tasks.findAll { + return (it.name.equals('generateDebugSources') // causes unwanted AndroidManifest.java + || it.name.equals('generateReleaseSources') + || it.name.endsWith('BuildConfig') // causes unwanted BuildConfig.java + || it.name.equals('preDebugAndroidTestBuild') +{% if not use_gradle_process_resources %} + || it.name.endsWith('Assets') + || it.name.endsWith('Resources') + || it.name.endsWith('ResValues') +{% endif %} + || it.name.endsWith('Aidl') + || it.name.endsWith('Renderscript') + || it.name.endsWith('Shaders')) + } + tasksToDisable.each { Task task -> + task.enabled = false + } +} diff --git a/android/gradle/cmake.jinja b/android/gradle/cmake.jinja new file mode 100644 index 000000000000..b7273880cfe9 --- /dev/null +++ b/android/gradle/cmake.jinja @@ -0,0 +1,25 @@ +{# Copyright 2018 The Chromium Authors. All rights reserved. #} +{# Use of this source code is governed by a BSD-style license that can be #} +{# found in the LICENSE file. #} +# Generated by //build/android/generate_gradle.py + +cmake_minimum_required(VERSION 3.4.1) + +project(chrome C CXX) + +{% if native.includes is defined %} +include_directories( +{% for path in native.includes %} + {{ path }} +{% endfor %} +) +{% endif %} + +# Android studio will index faster when adding all sources into one library. +{% if native.sources is defined %} +add_library("chromium" +{% for path in native.sources %} + {{ path }} +{% endfor %} +) +{% endif %} diff --git a/android/gradle/dependencies.jinja b/android/gradle/dependencies.jinja new file mode 100644 index 000000000000..87bc31285300 --- /dev/null +++ b/android/gradle/dependencies.jinja @@ -0,0 +1,28 @@ +{# Copyright 2016 The Chromium Authors. All rights reserved. #} +{# Use of this source code is governed by a BSD-style license that can be #} +{# found in the LICENSE file. #} +{% macro expand_deps(variables, prefix) %} +{% if variables is defined %} +{% if variables.prebuilts is defined %} +{% for path in variables.prebuilts %} + {{ prefix }} files("{{ path }}") +{% endfor %} +{% endif %} +{% if variables.java_project_deps is defined %} +{% for proj in variables.java_project_deps %} + {{ prefix }} project(":{{ proj }}") +{% endfor %} +{% endif %} +{% if variables.android_project_deps is defined %} +{% for proj in variables.android_project_deps %} + {{ prefix }} project(path: ":{{ proj }}") +{% endfor %} +{% endif %} +{% endif %} +{% endmacro %} + +dependencies { +{{ expand_deps(main, 'implementation') }} +{{ expand_deps(test, 'testImplementation') }} +{{ expand_deps(android_test, 'androidTestImplementation') }} +} diff --git a/android/gradle/generate_gradle.py b/android/gradle/generate_gradle.py new file mode 100755 index 000000000000..bc05baf9bbf3 --- /dev/null +++ b/android/gradle/generate_gradle.py @@ -0,0 +1,937 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generates an Android Studio project from a GN target.""" + +import argparse +import codecs +import collections +import glob +import json +import logging +import os +import pathlib +import re +import shutil +import subprocess +import sys + +_BUILD_ANDROID = os.path.join(os.path.dirname(__file__), os.pardir) +sys.path.append(_BUILD_ANDROID) +import devil_chromium +from devil.utils import run_tests_helper +from pylib import constants +from pylib.constants import host_paths + +sys.path.append(os.path.join(_BUILD_ANDROID, 'gyp')) +import jinja_template +from util import build_utils +from util import resource_utils + +sys.path.append(os.path.dirname(_BUILD_ANDROID)) +import gn_helpers + +# Typically these should track the versions that works on the slowest release +# channel, i.e. Android Studio stable. +_DEFAULT_ANDROID_GRADLE_PLUGIN_VERSION = '7.3.1' +_DEFAULT_KOTLIN_GRADLE_PLUGIN_VERSION = '1.8.0' +_DEFAULT_GRADLE_WRAPPER_VERSION = '7.4' + +_DEPOT_TOOLS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', + 'depot_tools') +_DEFAULT_ANDROID_MANIFEST_PATH = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'gradle', + 'AndroidManifest.xml') +_FILE_DIR = os.path.dirname(__file__) +_GENERATED_JAVA_SUBDIR = 'generated_java' +_JNI_LIBS_SUBDIR = 'symlinked-libs' +_ARMEABI_SUBDIR = 'armeabi' +_GRADLE_BUILD_FILE = 'build.gradle' +_CMAKE_FILE = 'CMakeLists.txt' +# This needs to come first alphabetically among all modules. +_MODULE_ALL = '_all' +_INSTRUMENTATION_TARGET_SUFFIX = '_test_apk__test_apk__apk' + +_DEFAULT_TARGETS = [ + '//android_webview/test/embedded_test_server:aw_net_test_support_apk', + '//android_webview/test:webview_instrumentation_apk', + '//android_webview/test:webview_instrumentation_test_apk', + '//base:base_junit_tests', + '//chrome/android:chrome_junit_tests', + '//chrome/android:chrome_public_apk', + '//chrome/android:chrome_public_test_apk', + '//chrome/android:chrome_public_unit_test_apk', + '//content/public/android:content_junit_tests', + '//content/shell/android:content_shell_apk', + # Below must be included even with --all since they are libraries. + '//base/android/jni_generator:jni_processor', + '//tools/android/errorprone_plugin:errorprone_plugin_java', +] + + +def _TemplatePath(name): + return os.path.join(_FILE_DIR, '{}.jinja'.format(name)) + + +def _RebasePath(path_or_list, new_cwd=None, old_cwd=None): + """Makes the given path(s) relative to new_cwd, or absolute if not specified. + + If new_cwd is not specified, absolute paths are returned. + If old_cwd is not specified, constants.GetOutDirectory() is assumed. + """ + if path_or_list is None: + return [] + if not isinstance(path_or_list, str): + return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list] + if old_cwd is None: + old_cwd = constants.GetOutDirectory() + old_cwd = os.path.abspath(old_cwd) + if new_cwd: + new_cwd = os.path.abspath(new_cwd) + return os.path.relpath(os.path.join(old_cwd, path_or_list), new_cwd) + return os.path.abspath(os.path.join(old_cwd, path_or_list)) + + +def _WriteFile(path, data): + """Writes |data| to |path|, constucting parent directories if necessary.""" + logging.info('Writing %s', path) + dirname = os.path.dirname(path) + if not os.path.exists(dirname): + os.makedirs(dirname) + with codecs.open(path, 'w', 'utf-8') as output_file: + output_file.write(data) + + +def _RunGnGen(output_dir, args=None): + cmd = [os.path.join(_DEPOT_TOOLS_PATH, 'gn'), 'gen', output_dir] + if args: + cmd.extend(args) + logging.info('Running: %r', cmd) + subprocess.check_call(cmd) + + +def _RunNinja(output_dir, args): + # Don't use version within _DEPOT_TOOLS_PATH, since most devs don't use + # that one when building. + cmd = ['autoninja', '-C', output_dir] + cmd.extend(args) + logging.info('Running: %r', cmd) + subprocess.check_call(cmd) + + +def _QueryForAllGnTargets(output_dir): + cmd = [ + os.path.join(_BUILD_ANDROID, 'list_java_targets.py'), '--gn-labels', + '--nested', '--build', '--output-directory', output_dir + ] + logging.info('Running: %r', cmd) + return subprocess.check_output(cmd, encoding='UTF-8').splitlines() + + +class _ProjectEntry: + """Helper class for project entries.""" + + _cached_entries = {} + + def __init__(self, gn_target): + # Use _ProjectEntry.FromGnTarget instead for caching. + self._gn_target = gn_target + self._build_config = None + self._java_files = None + self._all_entries = None + self.android_test_entries = [] + + @classmethod + def FromGnTarget(cls, gn_target): + assert gn_target.startswith('//'), gn_target + if ':' not in gn_target: + gn_target = '%s:%s' % (gn_target, os.path.basename(gn_target)) + if gn_target not in cls._cached_entries: + cls._cached_entries[gn_target] = cls(gn_target) + return cls._cached_entries[gn_target] + + @classmethod + def FromBuildConfigPath(cls, path): + prefix = 'gen/' + suffix = '.build_config.json' + assert path.startswith(prefix) and path.endswith(suffix), path + subdir = path[len(prefix):-len(suffix)] + gn_target = '//%s:%s' % (os.path.split(subdir)) + return cls.FromGnTarget(gn_target) + + def __hash__(self): + return hash(self._gn_target) + + def __eq__(self, other): + return self._gn_target == other.GnTarget() + + def GnTarget(self): + return self._gn_target + + def NinjaTarget(self): + return self._gn_target[2:] + + def GradleSubdir(self): + """Returns the output subdirectory.""" + ninja_target = self.NinjaTarget() + # Support targets at the root level. e.g. //:foo + if ninja_target[0] == ':': + ninja_target = ninja_target[1:] + return ninja_target.replace(':', os.path.sep) + + def GeneratedJavaSubdir(self): + return _RebasePath( + os.path.join('gen', self.GradleSubdir(), _GENERATED_JAVA_SUBDIR)) + + def ProjectName(self): + """Returns the Gradle project name.""" + return self.GradleSubdir().replace(os.path.sep, '.') + + def BuildConfig(self): + """Reads and returns the project's .build_config.json JSON.""" + if not self._build_config: + path = os.path.join('gen', self.GradleSubdir() + '.build_config.json') + with open(_RebasePath(path)) as jsonfile: + self._build_config = json.load(jsonfile) + return self._build_config + + def DepsInfo(self): + return self.BuildConfig()['deps_info'] + + def Gradle(self): + return self.BuildConfig()['gradle'] + + def Javac(self): + return self.BuildConfig()['javac'] + + def GetType(self): + """Returns the target type from its .build_config.""" + return self.DepsInfo()['type'] + + def IsValid(self): + return self.GetType() in ( + 'android_apk', + 'android_app_bundle_module', + 'java_library', + "java_annotation_processor", + 'java_binary', + 'robolectric_binary', + ) + + def ResSources(self): + return self.DepsInfo().get('lint_resource_sources', []) + + def JavaFiles(self): + if self._java_files is None: + target_sources_file = self.DepsInfo().get('target_sources_file') + java_files = [] + if target_sources_file: + target_sources_file = _RebasePath(target_sources_file) + java_files = build_utils.ReadSourcesList(target_sources_file) + self._java_files = java_files + return self._java_files + + def PrebuiltJars(self): + return self.Gradle().get('dependent_prebuilt_jars', []) + + def AllEntries(self): + """Returns a list of all entries that the current entry depends on. + + This includes the entry itself to make iterating simpler.""" + if self._all_entries is None: + logging.debug('Generating entries for %s', self.GnTarget()) + deps = [_ProjectEntry.FromBuildConfigPath(p) + for p in self.Gradle()['dependent_android_projects']] + deps.extend(_ProjectEntry.FromBuildConfigPath(p) + for p in self.Gradle()['dependent_java_projects']) + all_entries = set() + for dep in deps: + all_entries.update(dep.AllEntries()) + all_entries.add(self) + self._all_entries = list(all_entries) + return self._all_entries + + +class _ProjectContextGenerator: + """Helper class to generate gradle build files""" + def __init__(self, project_dir, build_vars, use_gradle_process_resources, + jinja_processor, split_projects): + self.project_dir = project_dir + self.build_vars = build_vars + self.use_gradle_process_resources = use_gradle_process_resources + self.jinja_processor = jinja_processor + self.split_projects = split_projects + self.processed_java_dirs = set() + self.processed_prebuilts = set() + self.processed_res_dirs = set() + + def _GenJniLibs(self, root_entry): + libraries = [] + for entry in self._GetEntries(root_entry): + libraries += entry.BuildConfig().get('native', {}).get('libraries', []) + if libraries: + return _CreateJniLibsDir(constants.GetOutDirectory(), + self.EntryOutputDir(root_entry), libraries) + return [] + + def _GenJavaDirs(self, root_entry): + java_files = [] + for entry in self._GetEntries(root_entry): + java_files += entry.JavaFiles() + java_dirs, excludes = _ComputeJavaSourceDirsAndExcludes( + constants.GetOutDirectory(), java_files) + return java_dirs, excludes + + def _GenCustomManifest(self, entry): + """Returns the path to the generated AndroidManifest.xml. + + Gradle uses package id from manifest when generating R.class. So, we need + to generate a custom manifest if we let gradle process resources. We cannot + simply set android.defaultConfig.applicationId because it is not supported + for library targets.""" + resource_packages = entry.Javac().get('resource_packages') + if not resource_packages: + logging.debug( + 'Target %s includes resources from unknown package. ' + 'Unable to process with gradle.', entry.GnTarget()) + return _DEFAULT_ANDROID_MANIFEST_PATH + if len(resource_packages) > 1: + logging.debug( + 'Target %s includes resources from multiple packages. ' + 'Unable to process with gradle.', entry.GnTarget()) + return _DEFAULT_ANDROID_MANIFEST_PATH + + variables = {'package': resource_packages[0]} + data = self.jinja_processor.Render(_TemplatePath('manifest'), variables) + output_file = os.path.join( + self.EntryOutputDir(entry), 'AndroidManifest.xml') + _WriteFile(output_file, data) + + return output_file + + def _Relativize(self, entry, paths): + return _RebasePath(paths, self.EntryOutputDir(entry)) + + def _GetEntries(self, entry): + if self.split_projects: + return [entry] + return entry.AllEntries() + + def EntryOutputDir(self, entry): + return os.path.join(self.project_dir, entry.GradleSubdir()) + + def GeneratedInputs(self, root_entry): + generated_inputs = set() + for entry in self._GetEntries(root_entry): + generated_inputs.update(entry.PrebuiltJars()) + return generated_inputs + + def GenerateManifest(self, root_entry): + android_manifest = root_entry.DepsInfo().get('android_manifest') + if not android_manifest: + android_manifest = self._GenCustomManifest(root_entry) + return self._Relativize(root_entry, android_manifest) + + def Generate(self, root_entry): + # TODO(agrieve): Add an option to use interface jars and see if that speeds + # things up at all. + variables = {} + java_dirs, excludes = self._GenJavaDirs(root_entry) + java_dirs.extend( + e.GeneratedJavaSubdir() for e in self._GetEntries(root_entry)) + self.processed_java_dirs.update(java_dirs) + java_dirs.sort() + variables['java_dirs'] = self._Relativize(root_entry, java_dirs) + variables['java_excludes'] = excludes + variables['jni_libs'] = self._Relativize( + root_entry, set(self._GenJniLibs(root_entry))) + prebuilts = set( + p for e in self._GetEntries(root_entry) for p in e.PrebuiltJars()) + self.processed_prebuilts.update(prebuilts) + variables['prebuilts'] = self._Relativize(root_entry, prebuilts) + res_sources_files = _RebasePath( + set(p for e in self._GetEntries(root_entry) for p in e.ResSources())) + res_sources = [] + for res_sources_file in res_sources_files: + res_sources.extend(build_utils.ReadSourcesList(res_sources_file)) + res_dirs = resource_utils.DeduceResourceDirsFromFileList(res_sources) + # Do not add generated resources for the all module since it creates many + # duplicates, and currently resources are only used for editing. + self.processed_res_dirs.update(res_dirs) + variables['res_dirs'] = self._Relativize(root_entry, res_dirs) + if self.split_projects: + deps = [_ProjectEntry.FromBuildConfigPath(p) + for p in root_entry.Gradle()['dependent_android_projects']] + variables['android_project_deps'] = [d.ProjectName() for d in deps] + deps = [_ProjectEntry.FromBuildConfigPath(p) + for p in root_entry.Gradle()['dependent_java_projects']] + variables['java_project_deps'] = [d.ProjectName() for d in deps] + return variables + + +def _ComputeJavaSourceDirs(java_files): + """Returns a dictionary of source dirs with each given files in one.""" + found_roots = {} + for path in java_files: + path_root = path + # Recognize these tokens as top-level. + while True: + path_root = os.path.dirname(path_root) + basename = os.path.basename(path_root) + assert basename, 'Failed to find source dir for ' + path + if basename in ('java', 'src'): + break + if basename in ('javax', 'org', 'com'): + path_root = os.path.dirname(path_root) + break + if path_root not in found_roots: + found_roots[path_root] = [] + found_roots[path_root].append(path) + return found_roots + + +def _ComputeExcludeFilters(wanted_files, unwanted_files, parent_dir): + """Returns exclude patters to exclude unwanted files but keep wanted files. + + - Shortens exclude list by globbing if possible. + - Exclude patterns are relative paths from the parent directory. + """ + excludes = [] + files_to_include = set(wanted_files) + files_to_exclude = set(unwanted_files) + while files_to_exclude: + unwanted_file = files_to_exclude.pop() + target_exclude = os.path.join( + os.path.dirname(unwanted_file), '*.java') + found_files = set(glob.glob(target_exclude)) + valid_files = found_files & files_to_include + if valid_files: + excludes.append(os.path.relpath(unwanted_file, parent_dir)) + else: + excludes.append(os.path.relpath(target_exclude, parent_dir)) + files_to_exclude -= found_files + return excludes + + +def _ComputeJavaSourceDirsAndExcludes(output_dir, source_files): + """Computes the list of java source directories and exclude patterns. + + This includes both Java and Kotlin files since both are listed in the same + "java" section for gradle. + + 1. Computes the root source directories from the list of files. + 2. Compute exclude patterns that exclude all extra files only. + 3. Returns the list of source directories and exclude patterns. + """ + java_dirs = [] + excludes = [] + if source_files: + source_files = _RebasePath(source_files) + computed_dirs = _ComputeJavaSourceDirs(source_files) + java_dirs = list(computed_dirs.keys()) + all_found_source_files = set() + + for directory, files in computed_dirs.items(): + found_source_files = (build_utils.FindInDirectory(directory, '*.java') + + build_utils.FindInDirectory(directory, '*.kt')) + all_found_source_files.update(found_source_files) + unwanted_source_files = set(found_source_files) - set(files) + if unwanted_source_files: + logging.debug('Directory requires excludes: %s', directory) + excludes.extend( + _ComputeExcludeFilters(files, unwanted_source_files, directory)) + + missing_source_files = set(source_files) - all_found_source_files + # Warn only about non-generated files that are missing. + missing_source_files = [ + p for p in missing_source_files if not p.startswith(output_dir) + ] + if missing_source_files: + logging.warning('Some source files were not found: %s', + missing_source_files) + + return java_dirs, excludes + + +def _CreateRelativeSymlink(target_path, link_path): + link_dir = os.path.dirname(link_path) + relpath = os.path.relpath(target_path, link_dir) + logging.debug('Creating symlink %s -> %s', link_path, relpath) + os.symlink(relpath, link_path) + + +def _CreateJniLibsDir(output_dir, entry_output_dir, so_files): + """Creates directory with symlinked .so files if necessary. + + Returns list of JNI libs directories.""" + + if so_files: + symlink_dir = os.path.join(entry_output_dir, _JNI_LIBS_SUBDIR) + shutil.rmtree(symlink_dir, True) + abi_dir = os.path.join(symlink_dir, _ARMEABI_SUBDIR) + if not os.path.exists(abi_dir): + os.makedirs(abi_dir) + for so_file in so_files: + target_path = os.path.join(output_dir, so_file) + symlinked_path = os.path.join(abi_dir, so_file) + _CreateRelativeSymlink(target_path, symlinked_path) + + return [symlink_dir] + + return [] + + +def _ParseVersionFromFile(file_path, version_regex_string, default_version): + if os.path.exists(file_path): + content = pathlib.Path(file_path).read_text() + match = re.search(version_regex_string, content) + if match: + version = match.group(1) + logging.info('Using existing version %s in %s.', version, file_path) + return version + logging.warning('Unable to find %s in %s:\n%s', version_regex_string, + file_path, content) + return default_version + + +def _GenerateLocalProperties(sdk_dir): + """Returns the data for local.properties as a string.""" + return '\n'.join([ + '# Generated by //build/android/gradle/generate_gradle.py', + 'sdk.dir=%s' % sdk_dir, + '', + ]) + + +def _GenerateGradleWrapperProperties(file_path): + """Returns the data for gradle-wrapper.properties as a string.""" + + version = _ParseVersionFromFile(file_path, + r'/distributions/gradle-([\d.]+)-all.zip', + _DEFAULT_GRADLE_WRAPPER_VERSION) + + return '\n'.join([ + '# Generated by //build/android/gradle/generate_gradle.py', + ('distributionUrl=https\\://services.gradle.org' + f'/distributions/gradle-{version}-all.zip'), + '', + ]) + + +def _GenerateGradleProperties(): + """Returns the data for gradle.properties as a string.""" + return '\n'.join([ + '# Generated by //build/android/gradle/generate_gradle.py', + '', + '# Tells Gradle to show warnings during project sync.', + 'org.gradle.warning.mode=all', + '', + ]) + + +def _GenerateBaseVars(generator, build_vars): + variables = {} + # Avoid pre-release SDKs since Studio might not know how to download them. + variables['compile_sdk_version'] = ('android-%s' % + build_vars['public_android_sdk_version']) + target_sdk_version = build_vars['public_android_sdk_version'] + if str(target_sdk_version).isalpha(): + target_sdk_version = '"{}"'.format(target_sdk_version) + variables['target_sdk_version'] = target_sdk_version + variables['min_sdk_version'] = build_vars['default_min_sdk_version'] + variables['use_gradle_process_resources'] = ( + generator.use_gradle_process_resources) + return variables + + +def _GenerateGradleFile(entry, generator, build_vars, jinja_processor): + """Returns the data for a project's build.gradle.""" + deps_info = entry.DepsInfo() + variables = _GenerateBaseVars(generator, build_vars) + sourceSetName = 'main' + + if deps_info['type'] == 'android_apk': + target_type = 'android_apk' + elif deps_info['type'] in ('java_library', 'java_annotation_processor'): + is_prebuilt = deps_info.get('is_prebuilt', False) + gradle_treat_as_prebuilt = deps_info.get('gradle_treat_as_prebuilt', False) + if is_prebuilt or gradle_treat_as_prebuilt: + return None + if deps_info['requires_android']: + target_type = 'android_library' + else: + target_type = 'java_library' + elif deps_info['type'] == 'java_binary': + target_type = 'java_binary' + variables['main_class'] = deps_info.get('main_class') + elif deps_info['type'] == 'robolectric_binary': + target_type = 'android_junit' + sourceSetName = 'test' + else: + return None + + variables['target_name'] = os.path.splitext(deps_info['name'])[0] + variables['template_type'] = target_type + variables['main'] = {} + variables[sourceSetName] = generator.Generate(entry) + variables['main']['android_manifest'] = generator.GenerateManifest(entry) + + if entry.android_test_entries: + variables['android_test'] = [] + for e in entry.android_test_entries: + test_entry = generator.Generate(e) + test_entry['android_manifest'] = generator.GenerateManifest(e) + variables['android_test'].append(test_entry) + for key, value in test_entry.items(): + if isinstance(value, list): + test_entry[key] = sorted(set(value) - set(variables['main'][key])) + + return jinja_processor.Render( + _TemplatePath(target_type.split('_')[0]), variables) + + +# Example: //chrome/android:monochrome +def _GetNative(relative_func, target_names): + """Returns an object containing native c++ sources list and its included path + + Iterate through all target_names and their deps to get the list of included + paths and sources.""" + out_dir = constants.GetOutDirectory() + with open(os.path.join(out_dir, 'project.json'), 'r') as project_file: + projects = json.load(project_file) + project_targets = projects['targets'] + root_dir = projects['build_settings']['root_path'] + includes = set() + processed_target = set() + targets_stack = list(target_names) + sources = [] + + while targets_stack: + target_name = targets_stack.pop() + if target_name in processed_target: + continue + processed_target.add(target_name) + target = project_targets[target_name] + includes.update(target.get('include_dirs', [])) + targets_stack.extend(target.get('deps', [])) + # Ignore generated files + sources.extend(f for f in target.get('sources', []) + if f.endswith('.cc') and not f.startswith('//out')) + + def process_paths(paths): + # Ignores leading // + return relative_func( + sorted(os.path.join(root_dir, path[2:]) for path in paths)) + + return { + 'sources': process_paths(sources), + 'includes': process_paths(includes), + } + + +def _GenerateModuleAll(gradle_output_dir, generator, build_vars, + jinja_processor, native_targets): + """Returns the data for a pseudo build.gradle of all dirs. + + See //docs/android_studio.md for more details.""" + variables = _GenerateBaseVars(generator, build_vars) + target_type = 'android_apk' + variables['target_name'] = _MODULE_ALL + variables['template_type'] = target_type + java_dirs = sorted(generator.processed_java_dirs) + prebuilts = sorted(generator.processed_prebuilts) + res_dirs = sorted(generator.processed_res_dirs) + def Relativize(paths): + return _RebasePath(paths, os.path.join(gradle_output_dir, _MODULE_ALL)) + + # As after clank modularization, the java and javatests code will live side by + # side in the same module, we will list both of them in the main target here. + main_java_dirs = [d for d in java_dirs if 'junit/' not in d] + junit_test_java_dirs = [d for d in java_dirs if 'junit/' in d] + variables['main'] = { + 'android_manifest': Relativize(_DEFAULT_ANDROID_MANIFEST_PATH), + 'java_dirs': Relativize(main_java_dirs), + 'prebuilts': Relativize(prebuilts), + 'java_excludes': ['**/*.java', '**/*.kt'], + 'res_dirs': Relativize(res_dirs), + } + variables['android_test'] = [{ + 'java_dirs': Relativize(junit_test_java_dirs), + 'java_excludes': ['**/*.java', '**/*.kt'], + }] + if native_targets: + variables['native'] = _GetNative( + relative_func=Relativize, target_names=native_targets) + data = jinja_processor.Render( + _TemplatePath(target_type.split('_')[0]), variables) + _WriteFile( + os.path.join(gradle_output_dir, _MODULE_ALL, _GRADLE_BUILD_FILE), data) + if native_targets: + cmake_data = jinja_processor.Render(_TemplatePath('cmake'), variables) + _WriteFile( + os.path.join(gradle_output_dir, _MODULE_ALL, _CMAKE_FILE), cmake_data) + + +def _GenerateRootGradle(jinja_processor, file_path): + """Returns the data for the root project's build.gradle.""" + android_gradle_plugin_version = _ParseVersionFromFile( + file_path, r'com.android.tools.build:gradle:([\d.]+)', + _DEFAULT_ANDROID_GRADLE_PLUGIN_VERSION) + kotlin_gradle_plugin_version = _ParseVersionFromFile( + file_path, r'org.jetbrains.kotlin:kotlin-gradle-plugin:([\d.]+)', + _DEFAULT_KOTLIN_GRADLE_PLUGIN_VERSION) + + return jinja_processor.Render( + _TemplatePath('root'), { + 'android_gradle_plugin_version': android_gradle_plugin_version, + 'kotlin_gradle_plugin_version': kotlin_gradle_plugin_version, + }) + + +def _GenerateSettingsGradle(project_entries): + """Returns the data for settings.gradle.""" + project_name = os.path.basename(os.path.dirname(host_paths.DIR_SOURCE_ROOT)) + lines = [] + lines.append('// Generated by //build/android/gradle/generate_gradle.py') + lines.append('rootProject.name = "%s"' % project_name) + lines.append('rootProject.projectDir = settingsDir') + lines.append('') + for name, subdir in project_entries: + # Example target: + # android_webview:android_webview_java__build_config_crbug_908819 + lines.append('include ":%s"' % name) + lines.append('project(":%s").projectDir = new File(settingsDir, "%s")' % + (name, subdir)) + return '\n'.join(lines) + + +def _FindAllProjectEntries(main_entries): + """Returns the list of all _ProjectEntry instances given the root project.""" + found = set() + to_scan = list(main_entries) + while to_scan: + cur_entry = to_scan.pop() + if cur_entry in found: + continue + found.add(cur_entry) + sub_config_paths = cur_entry.DepsInfo()['deps_configs'] + to_scan.extend( + _ProjectEntry.FromBuildConfigPath(p) for p in sub_config_paths) + return list(found) + + +def _CombineTestEntries(entries): + """Combines test apks into the androidTest source set of their target. + + - Speeds up android studio + - Adds proper dependency between test and apk_under_test + - Doesn't work for junit yet due to resulting circular dependencies + - e.g. base_junit_tests > base_junit_test_support > base_java + """ + combined_entries = [] + android_test_entries = collections.defaultdict(list) + for entry in entries: + target_name = entry.GnTarget() + if (target_name.endswith(_INSTRUMENTATION_TARGET_SUFFIX) + and 'apk_under_test' in entry.Gradle()): + apk_name = entry.Gradle()['apk_under_test'] + android_test_entries[apk_name].append(entry) + else: + combined_entries.append(entry) + for entry in combined_entries: + target_name = entry.DepsInfo()['name'] + if target_name in android_test_entries: + entry.android_test_entries = android_test_entries[target_name] + del android_test_entries[target_name] + # Add unmatched test entries as individual targets. + combined_entries.extend(e for l in android_test_entries.values() for e in l) + return combined_entries + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--output-directory', + help='Path to the root build directory.') + parser.add_argument('-v', + '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level') + parser.add_argument('--target', + dest='targets', + action='append', + help='GN target to generate project for. Replaces set of ' + 'default targets. May be repeated.') + parser.add_argument('--extra-target', + dest='extra_targets', + action='append', + help='GN target to generate project for, in addition to ' + 'the default ones. May be repeated.') + parser.add_argument('--project-dir', + help='Root of the output project.', + default=os.path.join('$CHROMIUM_OUTPUT_DIR', 'gradle')) + parser.add_argument('--all', + action='store_true', + help='Include all .java files reachable from any ' + 'apk/test/binary target. On by default unless ' + '--split-projects is used (--split-projects can ' + 'slow down Studio given too many targets).') + parser.add_argument('--use-gradle-process-resources', + action='store_true', + help='Have gradle generate R.java rather than ninja') + parser.add_argument('--split-projects', + action='store_true', + help='Split projects by their gn deps rather than ' + 'combining all the dependencies of each target') + parser.add_argument('--native-target', + dest='native_targets', + action='append', + help='GN native targets to generate for. May be ' + 'repeated.') + parser.add_argument( + '--sdk-path', + default=os.path.expanduser('~/Android/Sdk'), + help='The path to use as the SDK root, overrides the ' + 'default at ~/Android/Sdk.') + args = parser.parse_args() + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + constants.CheckOutputDirectory() + output_dir = constants.GetOutDirectory() + devil_chromium.Initialize(output_directory=output_dir) + run_tests_helper.SetLogLevel(args.verbose_count) + + if args.use_gradle_process_resources: + assert args.split_projects, ( + 'Gradle resources does not work without --split-projects.') + + _gradle_output_dir = os.path.abspath( + args.project_dir.replace('$CHROMIUM_OUTPUT_DIR', output_dir)) + logging.warning('Creating project at: %s', _gradle_output_dir) + + # Generate for "all targets" by default when not using --split-projects (too + # slow), and when no --target has been explicitly set. "all targets" means all + # java targets that are depended on by an apk or java_binary (leaf + # java_library targets will not be included). + args.all = args.all or (not args.split_projects and not args.targets) + + targets_from_args = set(args.targets or _DEFAULT_TARGETS) + if args.extra_targets: + targets_from_args.update(args.extra_targets) + + if args.all: + if args.native_targets: + _RunGnGen(output_dir, ['--ide=json']) + elif not os.path.exists(os.path.join(output_dir, 'build.ninja')): + _RunGnGen(output_dir) + else: + # Faster than running "gn gen" in the no-op case. + _RunNinja(output_dir, ['build.ninja']) + # Query ninja for all __build_config_crbug_908819 targets. + targets = _QueryForAllGnTargets(output_dir) + else: + assert not args.native_targets, 'Native editing requires --all.' + targets = [ + re.sub(r'_test_apk$', _INSTRUMENTATION_TARGET_SUFFIX, t) + for t in targets_from_args + ] + # Necessary after "gn clean" + if not os.path.exists( + os.path.join(output_dir, gn_helpers.BUILD_VARS_FILENAME)): + _RunGnGen(output_dir) + + build_vars = gn_helpers.ReadBuildVars(output_dir) + jinja_processor = jinja_template.JinjaProcessor(_FILE_DIR) + generator = _ProjectContextGenerator(_gradle_output_dir, build_vars, + args.use_gradle_process_resources, + jinja_processor, args.split_projects) + + main_entries = [_ProjectEntry.FromGnTarget(t) for t in targets] + + if args.all: + # There are many unused libraries, so restrict to those that are actually + # used by apks/bundles/binaries/tests or that are explicitly mentioned in + # --targets. + BASE_TYPES = ('android_apk', 'android_app_bundle_module', 'java_binary', + 'robolectric_binary') + main_entries = [ + e for e in main_entries + if (e.GetType() in BASE_TYPES or e.GnTarget() in targets_from_args + or e.GnTarget().endswith(_INSTRUMENTATION_TARGET_SUFFIX)) + ] + + if args.split_projects: + main_entries = _FindAllProjectEntries(main_entries) + + logging.info('Generating for %d targets.', len(main_entries)) + + entries = [e for e in _CombineTestEntries(main_entries) if e.IsValid()] + logging.info('Creating %d projects for targets.', len(entries)) + + logging.warning('Writing .gradle files...') + project_entries = [] + # When only one entry will be generated we want it to have a valid + # build.gradle file with its own AndroidManifest. + for entry in entries: + data = _GenerateGradleFile(entry, generator, build_vars, jinja_processor) + if data and not args.all: + project_entries.append((entry.ProjectName(), entry.GradleSubdir())) + _WriteFile( + os.path.join(generator.EntryOutputDir(entry), _GRADLE_BUILD_FILE), + data) + if args.all: + project_entries.append((_MODULE_ALL, _MODULE_ALL)) + _GenerateModuleAll(_gradle_output_dir, generator, build_vars, + jinja_processor, args.native_targets) + + root_gradle_path = os.path.join(generator.project_dir, _GRADLE_BUILD_FILE) + _WriteFile(root_gradle_path, + _GenerateRootGradle(jinja_processor, root_gradle_path)) + + _WriteFile(os.path.join(generator.project_dir, 'settings.gradle'), + _GenerateSettingsGradle(project_entries)) + + # Ensure the Android Studio sdk is correctly initialized. + if not os.path.exists(args.sdk_path): + # Help first-time users avoid Android Studio forcibly changing back to + # the previous default due to not finding a valid sdk under this dir. + shutil.copytree(_RebasePath(build_vars['android_sdk_root']), args.sdk_path) + _WriteFile( + os.path.join(generator.project_dir, 'local.properties'), + _GenerateLocalProperties(args.sdk_path)) + _WriteFile(os.path.join(generator.project_dir, 'gradle.properties'), + _GenerateGradleProperties()) + + wrapper_properties = os.path.join(generator.project_dir, 'gradle', 'wrapper', + 'gradle-wrapper.properties') + _WriteFile(wrapper_properties, + _GenerateGradleWrapperProperties(wrapper_properties)) + + generated_inputs = set() + for entry in entries: + entries_to_gen = [entry] + entries_to_gen.extend(entry.android_test_entries) + for entry_to_gen in entries_to_gen: + # Build all paths references by .gradle that exist within output_dir. + generated_inputs.update(generator.GeneratedInputs(entry_to_gen)) + if generated_inputs: + # Skip targets outside the output_dir since those are not generated. + targets = [ + p for p in _RebasePath(generated_inputs, output_dir) + if not p.startswith(os.pardir) + ] + _RunNinja(output_dir, targets) + + print('Generated projects for Android Studio.') + print('** Building using Android Studio / Gradle does not work.') + print('** This project is only for IDE editing & tools.') + print('Note: Generated files will appear only if they have been built') + print('For more tips: https://chromium.googlesource.com/chromium/src.git/' + '+/main/docs/android_studio.md') + + +if __name__ == '__main__': + main() diff --git a/android/gradle/java.jinja b/android/gradle/java.jinja new file mode 100644 index 000000000000..61886e918a29 --- /dev/null +++ b/android/gradle/java.jinja @@ -0,0 +1,41 @@ +{# Copyright 2016 The Chromium Authors. All rights reserved. #} +{# Use of this source code is governed by a BSD-style license that can be #} +{# found in the LICENSE file. #} +// Generated by //build/android/generate_gradle.py + +apply plugin: "java" +{% if template_type == 'java_binary' %} +apply plugin: "application" +{% endif %} + +sourceSets { + main { + java.srcDirs = [ +{% for path in main.java_dirs %} + "{{ path }}", +{% endfor %} + ] +{% if main.java_excludes is defined %} + java.filter.exclude([ +{% for path in main.java_excludes %} + "{{ path }}", +{% endfor %} + ]) +{% endif %} + } +} + +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 + +{% if template_type == 'java_binary' %} +applicationName = "{{ target_name }}" +{% if main_class %} +mainClassName = "{{ main_class }}" +{% endif %} +{% endif %} +{% if template_type in ('java_binary', 'java_library') %} +archivesBaseName = "{{ target_name }}" +{% endif %} + +{% include 'dependencies.jinja' %} diff --git a/android/gradle/manifest.jinja b/android/gradle/manifest.jinja new file mode 100644 index 000000000000..dea7071eb6e3 --- /dev/null +++ b/android/gradle/manifest.jinja @@ -0,0 +1,7 @@ +{# Copyright 2017 The Chromium Authors. All rights reserved. #} +{# Use of this source code is governed by a BSD-style license that can be #} +{# found in the LICENSE file. #} + + + diff --git a/android/gradle/root.jinja b/android/gradle/root.jinja new file mode 100644 index 000000000000..8009ebe0715e --- /dev/null +++ b/android/gradle/root.jinja @@ -0,0 +1,24 @@ +{# Copyright 2016 The Chromium Authors. All rights reserved. #} +{# Use of this source code is governed by a BSD-style license that can be #} +{# found in the LICENSE file. #} +// Generated by //build/android/generate_gradle.py + +// This section is used to find the plugins. +buildscript { + repositories { + google() + mavenCentral() + } + dependencies { + classpath "com.android.tools.build:gradle:{{ android_gradle_plugin_version }}" + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:{{ kotlin_gradle_plugin_version }}" + } +} + +// This is used by individual modules to find/fetch dependencies. +allprojects { + repositories { + google() + mavenCentral() + } +} \ No newline at end of file diff --git a/android/gtest_apk/BUILD.gn b/android/gtest_apk/BUILD.gn new file mode 100644 index 000000000000..69b0889c9f41 --- /dev/null +++ b/android/gtest_apk/BUILD.gn @@ -0,0 +1,15 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +android_library("native_test_instrumentation_test_runner_java") { + testonly = true + sources = [ + "java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java", + "java/src/org/chromium/build/gtest_apk/NativeTestIntent.java", + "java/src/org/chromium/build/gtest_apk/TestStatusIntent.java", + "java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java", + ] +} diff --git a/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java new file mode 100644 index 000000000000..7f5c4a8172c2 --- /dev/null +++ b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestInstrumentationTestRunner.java @@ -0,0 +1,281 @@ +// Copyright 2014 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.gtest_apk; + +import android.annotation.SuppressLint; +import android.app.Activity; +import android.app.ActivityManager; +import android.app.Instrumentation; +import android.content.ComponentName; +import android.content.Context; +import android.content.Intent; +import android.os.Bundle; +import android.os.Environment; +import android.os.Handler; +import android.os.Process; +import android.text.TextUtils; +import android.util.Log; +import android.util.SparseArray; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Queue; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * An Instrumentation that runs tests based on NativeTest. + */ +public class NativeTestInstrumentationTestRunner extends Instrumentation { + private static final String EXTRA_NATIVE_TEST_ACTIVITY = + "org.chromium.native_test.NativeTestInstrumentationTestRunner.NativeTestActivity"; + private static final String EXTRA_SHARD_NANO_TIMEOUT = + "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardNanoTimeout"; + private static final String EXTRA_SHARD_SIZE_LIMIT = + "org.chromium.native_test.NativeTestInstrumentationTestRunner.ShardSizeLimit"; + private static final String EXTRA_STDOUT_FILE = + "org.chromium.native_test.NativeTestInstrumentationTestRunner.StdoutFile"; + private static final String EXTRA_TEST_LIST_FILE = + "org.chromium.native_test.NativeTestInstrumentationTestRunner.TestList"; + private static final String EXTRA_TEST = + "org.chromium.native_test.NativeTestInstrumentationTestRunner.Test"; + + private static final String TAG = "NativeTest"; + + private static final long DEFAULT_SHARD_NANO_TIMEOUT = 60 * 1000000000L; + // Default to no size limit. + private static final int DEFAULT_SHARD_SIZE_LIMIT = 0; + + private Handler mHandler = new Handler(); + private Bundle mLogBundle = new Bundle(); + private SparseArray mMonitors = new SparseArray(); + private String mNativeTestActivity; + private TestStatusReceiver mReceiver; + private Queue mShards = new ArrayDeque(); + private long mShardNanoTimeout = DEFAULT_SHARD_NANO_TIMEOUT; + private int mShardSizeLimit = DEFAULT_SHARD_SIZE_LIMIT; + private File mStdoutFile; + private Bundle mTransparentArguments; + + @Override + public void onCreate(Bundle arguments) { + Context context = getContext(); + mTransparentArguments = new Bundle(arguments); + + mNativeTestActivity = arguments.getString(EXTRA_NATIVE_TEST_ACTIVITY); + if (mNativeTestActivity == null) { + Log.e(TAG, + "Unable to find org.chromium.native_test.NativeUnitTestActivity extra on " + + "NativeTestInstrumentationTestRunner launch intent."); + finish(Activity.RESULT_CANCELED, new Bundle()); + return; + } + mTransparentArguments.remove(EXTRA_NATIVE_TEST_ACTIVITY); + + String shardNanoTimeout = arguments.getString(EXTRA_SHARD_NANO_TIMEOUT); + if (shardNanoTimeout != null) mShardNanoTimeout = Long.parseLong(shardNanoTimeout); + mTransparentArguments.remove(EXTRA_SHARD_NANO_TIMEOUT); + + String shardSizeLimit = arguments.getString(EXTRA_SHARD_SIZE_LIMIT); + if (shardSizeLimit != null) mShardSizeLimit = Integer.parseInt(shardSizeLimit); + mTransparentArguments.remove(EXTRA_SHARD_SIZE_LIMIT); + + String stdoutFile = arguments.getString(EXTRA_STDOUT_FILE); + if (stdoutFile != null) { + mStdoutFile = new File(stdoutFile); + } else { + try { + mStdoutFile = File.createTempFile( + ".temp_stdout_", ".txt", Environment.getExternalStorageDirectory()); + Log.i(TAG, "stdout file created: " + mStdoutFile.getAbsolutePath()); + } catch (IOException e) { + Log.e(TAG, "Unable to create temporary stdout file.", e); + finish(Activity.RESULT_CANCELED, new Bundle()); + return; + } + } + + mTransparentArguments.remove(EXTRA_STDOUT_FILE); + + String singleTest = arguments.getString(EXTRA_TEST); + if (singleTest != null) { + mShards.add(singleTest); + } + + String testListFilePath = arguments.getString(EXTRA_TEST_LIST_FILE); + if (testListFilePath != null) { + File testListFile = new File(testListFilePath); + try { + BufferedReader testListFileReader = + new BufferedReader(new FileReader(testListFile)); + + String test; + ArrayList workingShard = new ArrayList(); + while ((test = testListFileReader.readLine()) != null) { + workingShard.add(test); + if (workingShard.size() == mShardSizeLimit) { + mShards.add(TextUtils.join(":", workingShard)); + workingShard = new ArrayList(); + } + } + + if (!workingShard.isEmpty()) { + mShards.add(TextUtils.join(":", workingShard)); + } + + testListFileReader.close(); + } catch (IOException e) { + Log.e(TAG, "Error reading " + testListFile.getAbsolutePath(), e); + } + } + mTransparentArguments.remove(EXTRA_TEST_LIST_FILE); + + start(); + } + + @Override + @SuppressLint("DefaultLocale") + public void onStart() { + super.onStart(); + + mReceiver = new TestStatusReceiver(); + mReceiver.register(getContext()); + mReceiver.registerCallback(new TestStatusReceiver.TestRunCallback() { + @Override + public void testRunStarted(int pid) { + if (pid != Process.myPid()) { + ShardMonitor m = new ShardMonitor(pid, System.nanoTime() + mShardNanoTimeout); + mMonitors.put(pid, m); + mHandler.post(m); + } + } + + @Override + public void testRunFinished(int pid) { + ShardMonitor m = mMonitors.get(pid); + if (m != null) { + m.stopped(); + mMonitors.remove(pid); + } + mHandler.post(new ShardEnder(pid)); + } + + @Override + public void uncaughtException(int pid, String stackTrace) { + mLogBundle.putString(Instrumentation.REPORT_KEY_STREAMRESULT, + String.format("Uncaught exception in test process (pid: %d)%n%s%n", pid, + stackTrace)); + sendStatus(0, mLogBundle); + } + }); + + mHandler.post(new ShardStarter()); + } + + /** Monitors a test shard's execution. */ + private class ShardMonitor implements Runnable { + private static final int MONITOR_FREQUENCY_MS = 1000; + + private long mExpirationNanoTime; + private int mPid; + private AtomicBoolean mStopped; + + public ShardMonitor(int pid, long expirationNanoTime) { + mPid = pid; + mExpirationNanoTime = expirationNanoTime; + mStopped = new AtomicBoolean(false); + } + + public void stopped() { + mStopped.set(true); + } + + @Override + public void run() { + if (mStopped.get()) { + return; + } + + if (isAppProcessAlive(getContext(), mPid)) { + if (System.nanoTime() > mExpirationNanoTime) { + Log.e(TAG, String.format("Test process %d timed out.", mPid)); + mHandler.post(new ShardEnder(mPid)); + return; + } else { + mHandler.postDelayed(this, MONITOR_FREQUENCY_MS); + return; + } + } + + Log.e(TAG, String.format("Test process %d died unexpectedly.", mPid)); + mHandler.post(new ShardEnder(mPid)); + } + } + + private static boolean isAppProcessAlive(Context context, int pid) { + ActivityManager activityManager = + (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); + for (ActivityManager.RunningAppProcessInfo processInfo : + activityManager.getRunningAppProcesses()) { + if (processInfo.pid == pid) return true; + } + return false; + } + + protected Intent createShardMainIntent() { + Intent i = new Intent(Intent.ACTION_MAIN); + i.setComponent(new ComponentName(getContext().getPackageName(), mNativeTestActivity)); + i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); + i.putExtras(mTransparentArguments); + if (mShards != null && !mShards.isEmpty()) { + String gtestFilter = mShards.remove(); + i.putExtra(NativeTestIntent.EXTRA_GTEST_FILTER, gtestFilter); + } + i.putExtra(NativeTestIntent.EXTRA_STDOUT_FILE, mStdoutFile.getAbsolutePath()); + return i; + } + + /** + * Starts the NativeTest Activity. + */ + private class ShardStarter implements Runnable { + @Override + public void run() { + getContext().startActivity(createShardMainIntent()); + } + } + + private class ShardEnder implements Runnable { + private static final int WAIT_FOR_DEATH_MILLIS = 10; + + private int mPid; + + public ShardEnder(int pid) { + mPid = pid; + } + + @Override + public void run() { + if (mPid != Process.myPid()) { + Process.killProcess(mPid); + try { + while (isAppProcessAlive(getContext(), mPid)) { + Thread.sleep(WAIT_FOR_DEATH_MILLIS); + } + } catch (InterruptedException e) { + Log.e(TAG, String.format("%d may still be alive.", mPid), e); + } + } + if (mShards != null && !mShards.isEmpty()) { + mHandler.post(new ShardStarter()); + } else { + finish(Activity.RESULT_OK, new Bundle()); + } + } + } +} diff --git a/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java new file mode 100644 index 000000000000..202078445c01 --- /dev/null +++ b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/NativeTestIntent.java @@ -0,0 +1,22 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.gtest_apk; + +/** + * Extras for intent sent by NativeTestInstrumentationTestRunner. + */ +public class NativeTestIntent { + public static final String EXTRA_COMMAND_LINE_FILE = + "org.chromium.native_test.NativeTest.CommandLineFile"; + public static final String EXTRA_COMMAND_LINE_FLAGS = + "org.chromium.native_test.NativeTest.CommandLineFlags"; + public static final String EXTRA_RUN_IN_SUB_THREAD = + "org.chromium.native_test.NativeTest.RunInSubThread"; + public static final String EXTRA_GTEST_FILTER = + "org.chromium.native_test.NativeTest.GtestFilter"; + public static final String EXTRA_STDOUT_FILE = "org.chromium.native_test.NativeTest.StdoutFile"; + public static final String EXTRA_COVERAGE_DEVICE_FILE = + "org.chromium.native_test.NativeTest.CoverageDeviceFile"; +} diff --git a/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java new file mode 100644 index 000000000000..98ebf443b3d5 --- /dev/null +++ b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusIntent.java @@ -0,0 +1,21 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.gtest_apk; + +/** + * Intent action and extras of broadcasts intercepted by TestStatusReceiver. + */ +public class TestStatusIntent { + public static final String ACTION_TEST_RUN_STARTED = + "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_STARTED"; + public static final String ACTION_TEST_RUN_FINISHED = + "org.chromium.test.reporter.TestStatusReporter.TEST_RUN_FINISHED"; + public static final String ACTION_UNCAUGHT_EXCEPTION = + "org.chromium.test.reporter.TestStatusReporter.UNCAUGHT_EXCEPTION"; + public static final String DATA_TYPE_RESULT = "org.chromium.test.reporter/result"; + public static final String EXTRA_PID = "org.chromium.test.reporter.TestStatusReporter.PID"; + public static final String EXTRA_STACK_TRACE = + "org.chromium.test.reporter.TestStatusReporter.STACK_TRACE"; +} diff --git a/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java new file mode 100644 index 000000000000..71c56a6edfd5 --- /dev/null +++ b/android/gtest_apk/java/src/org/chromium/build/gtest_apk/TestStatusReceiver.java @@ -0,0 +1,89 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.gtest_apk; + +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.util.Log; + +import java.util.ArrayList; +import java.util.List; + +/** + Receives test status broadcasts sent from + {@link org.chromium.test.reporter.TestStatusReporter}. + */ +public class TestStatusReceiver extends BroadcastReceiver { + private static final String TAG = "test_reporter"; + + private final List mTestRunCallbacks = new ArrayList(); + + /** An IntentFilter that matches the intents that this class can receive. */ + private static final IntentFilter INTENT_FILTER; + static { + IntentFilter filter = new IntentFilter(); + filter.addAction(TestStatusIntent.ACTION_TEST_RUN_STARTED); + filter.addAction(TestStatusIntent.ACTION_TEST_RUN_FINISHED); + filter.addAction(TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION); + try { + filter.addDataType(TestStatusIntent.DATA_TYPE_RESULT); + } catch (IntentFilter.MalformedMimeTypeException e) { + Log.wtf(TAG, "Invalid MIME type", e); + } + INTENT_FILTER = filter; + } + + /** A callback used when a test run has started or finished. */ + public interface TestRunCallback { + void testRunStarted(int pid); + void testRunFinished(int pid); + void uncaughtException(int pid, String stackTrace); + } + + /** Register a callback for when a test run has started or finished. */ + public void registerCallback(TestRunCallback c) { + mTestRunCallbacks.add(c); + } + + /** Register this receiver using the provided context. */ + public void register(Context c) { + c.registerReceiver(this, INTENT_FILTER); + } + + /** + * Receive a broadcast intent. + * + * @param context The Context in which the receiver is running. + * @param intent The intent received. + */ + @Override + public void onReceive(Context context, Intent intent) { + int pid = intent.getIntExtra(TestStatusIntent.EXTRA_PID, 0); + String stackTrace = intent.getStringExtra(TestStatusIntent.EXTRA_STACK_TRACE); + + switch (intent.getAction()) { + case TestStatusIntent.ACTION_TEST_RUN_STARTED: + for (TestRunCallback c : mTestRunCallbacks) { + c.testRunStarted(pid); + } + break; + case TestStatusIntent.ACTION_TEST_RUN_FINISHED: + for (TestRunCallback c : mTestRunCallbacks) { + c.testRunFinished(pid); + } + break; + case TestStatusIntent.ACTION_UNCAUGHT_EXCEPTION: + for (TestRunCallback c : mTestRunCallbacks) { + c.uncaughtException(pid, stackTrace); + } + break; + default: + Log.e(TAG, "Unrecognized intent received: " + intent.toString()); + break; + } + } +} diff --git a/android/gyp/OWNERS b/android/gyp/OWNERS new file mode 100644 index 000000000000..df0fa641f838 --- /dev/null +++ b/android/gyp/OWNERS @@ -0,0 +1,6 @@ +agrieve@chromium.org +digit@chromium.org +smaier@chromium.org +wnwen@chromium.org + +per-file create_unwind_table*.py=file://base/profiler/OWNERS \ No newline at end of file diff --git a/android/gyp/aar.py b/android/gyp/aar.py new file mode 100755 index 000000000000..512d5dbe4c7a --- /dev/null +++ b/android/gyp/aar.py @@ -0,0 +1,216 @@ +#!/usr/bin/env python3 +# +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Processes an Android AAR file.""" + +import argparse +import os +import posixpath +import re +import shutil +import sys +from xml.etree import ElementTree +import zipfile + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import gn_helpers + + +_PROGUARD_TXT = 'proguard.txt' + + +def _GetManifestPackage(doc): + """Returns the package specified in the manifest. + + Args: + doc: an XML tree parsed by ElementTree + + Returns: + String representing the package name. + """ + return doc.attrib['package'] + + +def _IsManifestEmpty(doc): + """Decides whether the given manifest has merge-worthy elements. + + E.g.: , , etc. + + Args: + doc: an XML tree parsed by ElementTree + + Returns: + Whether the manifest has merge-worthy elements. + """ + for node in doc: + if node.tag == 'application': + if list(node): + return False + elif node.tag != 'uses-sdk': + return False + + return True + + +def _CreateInfo(aar_file, resource_exclusion_globs): + """Extracts and return .info data from an .aar file. + + Args: + aar_file: Path to an input .aar file. + resource_exclusion_globs: List of globs that exclude res/ files. + + Returns: + A dict containing .info data. + """ + data = {} + data['aidl'] = [] + data['assets'] = [] + data['resources'] = [] + data['subjars'] = [] + data['subjar_tuples'] = [] + data['has_classes_jar'] = False + data['has_proguard_flags'] = False + data['has_native_libraries'] = False + data['has_r_text_file'] = False + with zipfile.ZipFile(aar_file) as z: + manifest_xml = ElementTree.fromstring(z.read('AndroidManifest.xml')) + data['is_manifest_empty'] = _IsManifestEmpty(manifest_xml) + manifest_package = _GetManifestPackage(manifest_xml) + if manifest_package: + data['manifest_package'] = manifest_package + + for name in z.namelist(): + if name.endswith('/'): + continue + if name.startswith('aidl/'): + data['aidl'].append(name) + elif name.startswith('res/'): + if not build_utils.MatchesGlob(name, resource_exclusion_globs): + data['resources'].append(name) + elif name.startswith('libs/') and name.endswith('.jar'): + label = posixpath.basename(name)[:-4] + label = re.sub(r'[^a-zA-Z0-9._]', '_', label) + data['subjars'].append(name) + data['subjar_tuples'].append([label, name]) + elif name.startswith('assets/'): + data['assets'].append(name) + elif name.startswith('jni/'): + data['has_native_libraries'] = True + if 'native_libraries' in data: + data['native_libraries'].append(name) + else: + data['native_libraries'] = [name] + elif name == 'classes.jar': + data['has_classes_jar'] = True + elif name == _PROGUARD_TXT: + data['has_proguard_flags'] = True + elif name == 'R.txt': + # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs + # have no resources as well. We treat empty R.txt as having no R.txt. + data['has_r_text_file'] = bool(z.read('R.txt').strip()) + + return data + + +def _PerformExtract(aar_file, output_dir, name_allowlist): + with build_utils.TempDir() as tmp_dir: + tmp_dir = os.path.join(tmp_dir, 'staging') + os.mkdir(tmp_dir) + build_utils.ExtractAll( + aar_file, path=tmp_dir, predicate=name_allowlist.__contains__) + # Write a breadcrumb so that SuperSize can attribute files back to the .aar. + with open(os.path.join(tmp_dir, 'source.info'), 'w') as f: + f.write('source={}\n'.format(aar_file)) + + shutil.rmtree(output_dir, ignore_errors=True) + shutil.move(tmp_dir, output_dir) + + +def _AddCommonArgs(parser): + parser.add_argument( + 'aar_file', help='Path to the AAR file.', type=os.path.normpath) + parser.add_argument('--ignore-resources', + action='store_true', + help='Whether to skip extraction of res/') + parser.add_argument('--resource-exclusion-globs', + help='GN list of globs for res/ files to ignore') + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + command_parsers = parser.add_subparsers(dest='command') + subp = command_parsers.add_parser( + 'list', help='Output a GN scope describing the contents of the .aar.') + _AddCommonArgs(subp) + subp.add_argument('--output', help='Output file.', default='-') + + subp = command_parsers.add_parser('extract', help='Extracts the .aar') + _AddCommonArgs(subp) + subp.add_argument( + '--output-dir', + help='Output directory for the extracted files.', + required=True, + type=os.path.normpath) + subp.add_argument( + '--assert-info-file', + help='Path to .info file. Asserts that it matches what ' + '"list" would output.', + type=argparse.FileType('r')) + + args = parser.parse_args() + + args.resource_exclusion_globs = action_helpers.parse_gn_list( + args.resource_exclusion_globs) + if args.ignore_resources: + args.resource_exclusion_globs.append('res/*') + + aar_info = _CreateInfo(args.aar_file, args.resource_exclusion_globs) + formatted_info = """\ +# Generated by //build/android/gyp/aar.py +# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen". + +""" + gn_helpers.ToGNString(aar_info, pretty=True) + + if args.command == 'extract': + if args.assert_info_file: + cached_info = args.assert_info_file.read() + if formatted_info != cached_info: + raise Exception('android_aar_prebuilt() cached .info file is ' + 'out-of-date. Run gn gen with ' + 'update_android_aar_prebuilts=true to update it.') + + # Extract all files except for filtered res/ files. + with zipfile.ZipFile(args.aar_file) as zf: + names = {n for n in zf.namelist() if not n.startswith('res/')} + names.update(aar_info['resources']) + + _PerformExtract(args.aar_file, args.output_dir, names) + + elif args.command == 'list': + aar_output_present = args.output != '-' and os.path.isfile(args.output) + if aar_output_present: + # Some .info files are read-only, for examples the cipd-controlled ones + # under third_party/android_deps/repository. To deal with these, first + # that its content is correct, and if it is, exit without touching + # the file system. + file_info = open(args.output, 'r').read() + if file_info == formatted_info: + return + + # Try to write the file. This may fail for read-only ones that were + # not updated. + try: + with open(args.output, 'w') as f: + f.write(formatted_info) + except IOError as e: + if not aar_output_present: + raise e + raise Exception('Could not update output file: %s\n' % args.output) from e + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/gyp/aar.pydeps b/android/gyp/aar.pydeps new file mode 100644 index 000000000000..56f860e25761 --- /dev/null +++ b/android/gyp/aar.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py +../../action_helpers.py +../../gn_helpers.py +aar.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/aidl.py b/android/gyp/aidl.py new file mode 100755 index 000000000000..8eab45dd7fc1 --- /dev/null +++ b/android/gyp/aidl.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Invokes Android's aidl +""" + +import optparse +import os +import re +import sys +import zipfile + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def main(argv): + option_parser = optparse.OptionParser() + option_parser.add_option('--aidl-path', help='Path to the aidl binary.') + option_parser.add_option('--imports', help='Files to import.') + option_parser.add_option('--includes', + help='Directories to add as import search paths.') + option_parser.add_option('--srcjar', help='Path for srcjar output.') + action_helpers.add_depfile_arg(option_parser) + options, args = option_parser.parse_args(argv[1:]) + + options.includes = action_helpers.parse_gn_list(options.includes) + + with build_utils.TempDir() as temp_dir: + for f in args: + classname = os.path.splitext(os.path.basename(f))[0] + output = os.path.join(temp_dir, classname + '.java') + aidl_cmd = [options.aidl_path] + aidl_cmd += [ + '-p' + s for s in action_helpers.parse_gn_list(options.imports) + ] + aidl_cmd += ['-I' + s for s in options.includes] + aidl_cmd += [ + f, + output + ] + build_utils.CheckOutput(aidl_cmd) + + with action_helpers.atomic_output(options.srcjar) as f: + with zipfile.ZipFile(f, 'w') as srcjar: + for path in build_utils.FindInDirectory(temp_dir, '*.java'): + with open(path) as fileobj: + data = fileobj.read() + pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1) + arcname = '%s/%s' % ( + pkg_name.replace('.', '/'), os.path.basename(path)) + zip_helpers.add_to_zip_hermetic(srcjar, arcname, data=data) + + if options.depfile: + include_files = [] + for include_dir in options.includes: + include_files += build_utils.FindInDirectory(include_dir, '*.java') + action_helpers.write_depfile(options.depfile, options.srcjar, include_files) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/android/gyp/aidl.pydeps b/android/gyp/aidl.pydeps new file mode 100644 index 000000000000..d841c9451fca --- /dev/null +++ b/android/gyp/aidl.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +aidl.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/allot_native_libraries.py b/android/gyp/allot_native_libraries.py new file mode 100755 index 000000000000..61daac224407 --- /dev/null +++ b/android/gyp/allot_native_libraries.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python3 +# +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Allots libraries to modules to be packaged into. + +All libraries that are depended on by a single module will be allotted to this +module. All other libraries will be allotted to the closest ancestor. + +Example: + Given the module dependency structure + + c + / \ + b d + / \ + a e + + and libraries assignment + + a: ['lib1.so'] + e: ['lib2.so', 'lib1.so'] + + will make the allotment decision + + c: ['lib1.so'] + e: ['lib2.so'] + + The above example is invoked via: + + ./allot_native_libraries \ + --libraries 'a,["1.so"]' \ + --libraries 'e,["2.so", "1.so"]' \ + --dep c:b \ + --dep b:a \ + --dep c:d \ + --dep d:e \ + --output +""" + +import argparse +import collections +import json +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _ModuleLibrariesPair(arg): + pos = arg.find(',') + assert pos > 0 + return (arg[:pos], arg[pos + 1:]) + + +def _DepPair(arg): + parent, child = arg.split(':') + return (parent, child) + + +def _PathFromRoot(module_tree, module): + """Computes path from root to a module. + + Parameters: + module_tree: Dictionary mapping each module to its parent. + module: Module to which to compute the path. + + Returns: + Path from root the the module. + """ + path = [module] + while module_tree.get(module): + module = module_tree[module] + path = [module] + path + return path + + +def _ClosestCommonAncestor(module_tree, modules): + """Computes the common ancestor of a set of modules. + + Parameters: + module_tree: Dictionary mapping each module to its parent. + modules: Set of modules for which to find the closest common ancestor. + + Returns: + The closest common ancestor. + """ + paths = [_PathFromRoot(module_tree, m) for m in modules] + assert len(paths) > 0 + ancestor = None + for level in zip(*paths): + if len(set(level)) != 1: + return ancestor + ancestor = level[0] + return ancestor + + +def _AllotLibraries(module_tree, libraries_map): + """Allot all libraries to a module. + + Parameters: + module_tree: Dictionary mapping each module to its parent. Modules can map + to None, which is considered the root of the tree. + libraries_map: Dictionary mapping each library to a set of modules, which + depend on the library. + + Returns: + A dictionary mapping mapping each module name to a set of libraries allotted + to the module such that libraries with multiple dependees are allotted to + the closest ancestor. + + Raises: + Exception if some libraries can only be allotted to the None root. + """ + allotment_map = collections.defaultdict(set) + for library, modules in libraries_map.items(): + ancestor = _ClosestCommonAncestor(module_tree, modules) + if not ancestor: + raise Exception('Cannot allot libraries for given dependency tree') + allotment_map[ancestor].add(library) + return allotment_map + + +def main(args): + parser = argparse.ArgumentParser() + parser.add_argument( + '--libraries', + action='append', + type=_ModuleLibrariesPair, + required=True, + help='A pair of module name and GN list of libraries a module depends ' + 'on. Can be specified multiple times.') + parser.add_argument( + '--output', + required=True, + help='A JSON file with a key for each module mapping to a list of ' + 'libraries, which should be packaged into this module.') + parser.add_argument( + '--dep', + action='append', + type=_DepPair, + dest='deps', + default=[], + help='A pair of parent module name and child module name ' + '(format: ":"). Can be specified multiple times.') + options = parser.parse_args(build_utils.ExpandFileArgs(args)) + options.libraries = [(m, action_helpers.parse_gn_list(l)) + for m, l in options.libraries] + + # Parse input creating libraries and dependency tree. + libraries_map = collections.defaultdict(set) # Maps each library to its + # dependee modules. + module_tree = {} # Maps each module name to its parent. + for module, libraries in options.libraries: + module_tree[module] = None + for library in libraries: + libraries_map[library].add(module) + for parent, child in options.deps: + if module_tree.get(child): + raise Exception('%s cannot have multiple parents' % child) + module_tree[child] = parent + module_tree[parent] = module_tree.get(parent) + + # Allot all libraries to a module such that libraries with multiple dependees + # are allotted to the closest ancestor. + allotment_map = _AllotLibraries(module_tree, libraries_map) + + # The build system expects there to be a set of libraries even for the modules + # that don't have any libraries allotted. + for module in module_tree: + # Creates missing sets because of defaultdict. + allotment_map[module] = allotment_map[module] + + with open(options.output, 'w') as f: + # Write native libraries config and ensure the output is deterministic. + json.dump({m: sorted(l) + for m, l in allotment_map.items()}, + f, + sort_keys=True, + indent=2) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/allot_native_libraries.pydeps b/android/gyp/allot_native_libraries.pydeps new file mode 100644 index 000000000000..aacaafffeb6d --- /dev/null +++ b/android/gyp/allot_native_libraries.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/allot_native_libraries.pydeps build/android/gyp/allot_native_libraries.py +../../action_helpers.py +../../gn_helpers.py +allot_native_libraries.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/apkbuilder.py b/android/gyp/apkbuilder.py new file mode 100755 index 000000000000..fa5701b6db37 --- /dev/null +++ b/android/gyp/apkbuilder.py @@ -0,0 +1,533 @@ +#!/usr/bin/env python3 +# +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Adds the code parts to a resource APK.""" + +import argparse +import logging +import os +import posixpath +import shutil +import sys +import tempfile +import zipfile +import zlib + +import finalize_apk + +from util import build_utils +from util import diff_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +# Taken from aapt's Package.cpp: +_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2', + '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid', + '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf', + '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2', + '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm') + + +def _ParseArgs(args): + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--assets', + action='append', + help='GYP-list of files to add as assets in the form ' + '"srcPath:zipPath", where ":zipPath" is optional.') + parser.add_argument( + '--java-resources', help='GYP-list of java_resources JARs to include.') + parser.add_argument('--write-asset-list', + action='store_true', + help='Whether to create an assets/assets_list file.') + parser.add_argument( + '--uncompressed-assets', + help='Same as --assets, except disables compression.') + parser.add_argument('--resource-apk', + help='An .ap_ file built using aapt', + required=True) + parser.add_argument('--output-apk', + help='Path to the output file', + required=True) + parser.add_argument('--format', choices=['apk', 'bundle-module'], + default='apk', help='Specify output format.') + parser.add_argument('--dex-file', + help='Path to the classes.dex to use') + parser.add_argument('--uncompress-dex', action='store_true', + help='Store .dex files uncompressed in the APK') + parser.add_argument('--native-libs', + action='append', + help='GYP-list of native libraries to include. ' + 'Can be specified multiple times.', + default=[]) + parser.add_argument('--secondary-native-libs', + action='append', + help='GYP-list of native libraries for secondary ' + 'android-abi. Can be specified multiple times.', + default=[]) + parser.add_argument('--android-abi', + help='Android architecture to use for native libraries') + parser.add_argument('--secondary-android-abi', + help='The secondary Android architecture to use for' + 'secondary native libraries') + parser.add_argument( + '--is-multi-abi', + action='store_true', + help='Will add a placeholder for the missing ABI if no native libs or ' + 'placeholders are set for either the primary or secondary ABI. Can only ' + 'be set if both --android-abi and --secondary-android-abi are set.') + parser.add_argument( + '--native-lib-placeholders', + help='GYP-list of native library placeholders to add.') + parser.add_argument( + '--secondary-native-lib-placeholders', + help='GYP-list of native library placeholders to add ' + 'for the secondary ABI') + parser.add_argument('--uncompress-shared-libraries', default='False', + choices=['true', 'True', 'false', 'False'], + help='Whether to uncompress native shared libraries. Argument must be ' + 'a boolean value.') + parser.add_argument( + '--apksigner-jar', help='Path to the apksigner executable.') + parser.add_argument('--zipalign-path', + help='Path to the zipalign executable.') + parser.add_argument('--key-path', + help='Path to keystore for signing.') + parser.add_argument('--key-passwd', + help='Keystore password') + parser.add_argument('--key-name', + help='Keystore name') + parser.add_argument( + '--min-sdk-version', required=True, help='Value of APK\'s minSdkVersion') + parser.add_argument( + '--best-compression', + action='store_true', + help='Use zip -9 rather than zip -1') + parser.add_argument( + '--library-always-compress', + action='append', + help='The list of library files that we always compress.') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + diff_utils.AddCommandLineFlags(parser) + options = parser.parse_args(args) + options.assets = action_helpers.parse_gn_list(options.assets) + options.uncompressed_assets = action_helpers.parse_gn_list( + options.uncompressed_assets) + options.native_lib_placeholders = action_helpers.parse_gn_list( + options.native_lib_placeholders) + options.secondary_native_lib_placeholders = action_helpers.parse_gn_list( + options.secondary_native_lib_placeholders) + options.java_resources = action_helpers.parse_gn_list(options.java_resources) + options.native_libs = action_helpers.parse_gn_list(options.native_libs) + options.secondary_native_libs = action_helpers.parse_gn_list( + options.secondary_native_libs) + options.library_always_compress = action_helpers.parse_gn_list( + options.library_always_compress) + + if not options.android_abi and (options.native_libs or + options.native_lib_placeholders): + raise Exception('Must specify --android-abi with --native-libs') + if not options.secondary_android_abi and (options.secondary_native_libs or + options.secondary_native_lib_placeholders): + raise Exception('Must specify --secondary-android-abi with' + ' --secondary-native-libs') + if options.is_multi_abi and not (options.android_abi + and options.secondary_android_abi): + raise Exception('Must specify --is-multi-abi with both --android-abi ' + 'and --secondary-android-abi.') + return options + + +def _SplitAssetPath(path): + """Returns (src, dest) given an asset path in the form src[:dest].""" + path_parts = path.split(':') + src_path = path_parts[0] + if len(path_parts) > 1: + dest_path = path_parts[1] + else: + dest_path = os.path.basename(src_path) + return src_path, dest_path + + +def _ExpandPaths(paths): + """Converts src:dst into tuples and enumerates files within directories. + + Args: + paths: Paths in the form "src_path:dest_path" + + Returns: + A list of (src_path, dest_path) tuples sorted by dest_path (for stable + ordering within output .apk). + """ + ret = [] + for path in paths: + src_path, dest_path = _SplitAssetPath(path) + if os.path.isdir(src_path): + for f in build_utils.FindInDirectory(src_path, '*'): + ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:]))) + else: + ret.append((src_path, dest_path)) + ret.sort(key=lambda t:t[1]) + return ret + + +def _GetAssetsToAdd(path_tuples, + fast_align, + disable_compression=False, + allow_reads=True, + apk_root_dir=''): + """Returns the list of file_detail tuples for assets in the apk. + + Args: + path_tuples: List of src_path, dest_path tuples to add. + fast_align: Whether to perform alignment in python zipfile (alternatively + alignment can be done using the zipalign utility out of band). + disable_compression: Whether to disable compression. + allow_reads: If false, we do not try to read the files from disk (to find + their size for example). + + Returns: A list of (src_path, apk_path, compress, alignment) tuple + representing what and how assets are added. + """ + assets_to_add = [] + + # Group all uncompressed assets together in the hope that it will increase + # locality of mmap'ed files. + for target_compress in (False, True): + for src_path, dest_path in path_tuples: + compress = not disable_compression and ( + os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS) + + if target_compress == compress: + # add_to_zip_hermetic() uses this logic to avoid growing small files. + # We need it here in order to set alignment correctly. + if allow_reads and compress and os.path.getsize(src_path) < 16: + compress = False + + if dest_path.startswith('../'): + # posixpath.join('', 'foo') == 'foo' + apk_path = posixpath.join(apk_root_dir, dest_path[3:]) + else: + apk_path = 'assets/' + dest_path + alignment = 0 if compress and not fast_align else 4 + assets_to_add.append((apk_path, src_path, compress, alignment)) + return assets_to_add + + +def _AddFiles(apk, details): + """Adds files to the apk. + + Args: + apk: path to APK to add to. + details: A list of file detail tuples (src_path, apk_path, compress, + alignment) representing what and how files are added to the APK. + """ + for apk_path, src_path, compress, alignment in details: + # This check is only relevant for assets, but it should not matter if it is + # checked for the whole list of files. + try: + apk.getinfo(apk_path) + # Should never happen since write_build_config.py handles merging. + raise Exception( + 'Multiple targets specified the asset path: %s' % apk_path) + except KeyError: + zip_helpers.add_to_zip_hermetic(apk, + apk_path, + src_path=src_path, + compress=compress, + alignment=alignment) + + +def _GetNativeLibrariesToAdd(native_libs, android_abi, fast_align, + lib_always_compress): + """Returns the list of file_detail tuples for native libraries in the apk. + + Returns: A list of (src_path, apk_path, compress, alignment) tuple + representing what and how native libraries are added. + """ + libraries_to_add = [] + + + for path in native_libs: + basename = os.path.basename(path) + compress = any(lib_name in basename for lib_name in lib_always_compress) + lib_android_abi = android_abi + if path.startswith('android_clang_arm64_hwasan/'): + lib_android_abi = 'arm64-v8a-hwasan' + + apk_path = 'lib/%s/%s' % (lib_android_abi, basename) + alignment = 0 if compress and not fast_align else 0x1000 + libraries_to_add.append((apk_path, path, compress, alignment)) + + return libraries_to_add + + +def _CreateExpectationsData(native_libs, assets): + """Creates list of native libraries and assets.""" + native_libs = sorted(native_libs) + assets = sorted(assets) + + ret = [] + for apk_path, _, compress, alignment in native_libs + assets: + ret.append('apk_path=%s, compress=%s, alignment=%s\n' % + (apk_path, compress, alignment)) + return ''.join(ret) + + +def main(args): + build_utils.InitLogging('APKBUILDER_DEBUG') + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + # Until Python 3.7, there's no better way to set compression level. + # The default is 6. + if options.best_compression: + # Compresses about twice as slow as the default. + zlib.Z_DEFAULT_COMPRESSION = 9 + else: + # Compresses about twice as fast as the default. + zlib.Z_DEFAULT_COMPRESSION = 1 + + # Python's zip implementation duplicates file comments in the central + # directory, whereas zipalign does not, so use zipalign for official builds. + requires_alignment = options.format == 'apk' + run_zipalign = requires_alignment and options.best_compression + fast_align = bool(requires_alignment and not run_zipalign) + + native_libs = sorted(options.native_libs) + + # Include native libs in the depfile_deps since GN doesn't know about the + # dependencies when is_component_build=true. + depfile_deps = list(native_libs) + + # For targets that depend on static library APKs, dex paths are created by + # the static library's dexsplitter target and GN doesn't know about these + # paths. + if options.dex_file: + depfile_deps.append(options.dex_file) + + secondary_native_libs = [] + if options.secondary_native_libs: + secondary_native_libs = sorted(options.secondary_native_libs) + depfile_deps += secondary_native_libs + + if options.java_resources: + # Included via .build_config.json, so need to write it to depfile. + depfile_deps.extend(options.java_resources) + + assets = _ExpandPaths(options.assets) + uncompressed_assets = _ExpandPaths(options.uncompressed_assets) + + # Included via .build_config.json, so need to write it to depfile. + depfile_deps.extend(x[0] for x in assets) + depfile_deps.extend(x[0] for x in uncompressed_assets) + depfile_deps.append(options.resource_apk) + + # Bundle modules have a structure similar to APKs, except that resources + # are compiled in protobuf format (instead of binary xml), and that some + # files are located into different top-level directories, e.g.: + # AndroidManifest.xml -> manifest/AndroidManifest.xml + # classes.dex -> dex/classes.dex + # res/ -> res/ (unchanged) + # assets/ -> assets/ (unchanged) + # -> root/ + # + # Hence, the following variables are used to control the location of files in + # the final archive. + if options.format == 'bundle-module': + apk_manifest_dir = 'manifest/' + apk_root_dir = 'root/' + apk_dex_dir = 'dex/' + else: + apk_manifest_dir = '' + apk_root_dir = '' + apk_dex_dir = '' + + def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads): + ret = _GetAssetsToAdd(assets, + fast_align, + disable_compression=False, + allow_reads=allow_reads, + apk_root_dir=apk_root_dir) + ret.extend( + _GetAssetsToAdd(uncompressed_assets, + fast_align, + disable_compression=True, + allow_reads=allow_reads, + apk_root_dir=apk_root_dir)) + return ret + + libs_to_add = _GetNativeLibrariesToAdd(native_libs, options.android_abi, + fast_align, + options.library_always_compress) + if options.secondary_android_abi: + libs_to_add.extend( + _GetNativeLibrariesToAdd(secondary_native_libs, + options.secondary_android_abi, + fast_align, options.library_always_compress)) + + if options.expected_file: + # We compute expectations without reading the files. This allows us to check + # expectations for different targets by just generating their build_configs + # and not have to first generate all the actual files and all their + # dependencies (for example by just passing --only-verify-expectations). + asset_details = _GetAssetDetails(assets, + uncompressed_assets, + fast_align, + allow_reads=False) + + actual_data = _CreateExpectationsData(libs_to_add, asset_details) + diff_utils.CheckExpectations(actual_data, options) + + if options.only_verify_expectations: + if options.depfile: + action_helpers.write_depfile(options.depfile, + options.actual_file, + inputs=depfile_deps) + return + + # If we are past this point, we are going to actually create the final apk so + # we should recompute asset details again but maybe perform some optimizations + # based on the size of the files on disk. + assets_to_add = _GetAssetDetails( + assets, uncompressed_assets, fast_align, allow_reads=True) + + # Targets generally do not depend on apks, so no need for only_if_changed. + with action_helpers.atomic_output(options.output_apk, + only_if_changed=False) as f: + with zipfile.ZipFile(options.resource_apk) as resource_apk, \ + zipfile.ZipFile(f, 'w') as out_apk: + + def add_to_zip(zip_path, data, compress=True, alignment=4): + zip_helpers.add_to_zip_hermetic( + out_apk, + zip_path, + data=data, + compress=compress, + alignment=0 if compress and not fast_align else alignment) + + def copy_resource(zipinfo, out_dir=''): + add_to_zip( + out_dir + zipinfo.filename, + resource_apk.read(zipinfo.filename), + compress=zipinfo.compress_type != zipfile.ZIP_STORED) + + # Make assets come before resources in order to maintain the same file + # ordering as GYP / aapt. http://crbug.com/561862 + resource_infos = resource_apk.infolist() + + # 1. AndroidManifest.xml + logging.debug('Adding AndroidManifest.xml') + copy_resource( + resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir) + + # 2. Assets + logging.debug('Adding assets/') + _AddFiles(out_apk, assets_to_add) + + # 3. Dex files + logging.debug('Adding classes.dex') + if options.dex_file: + with open(options.dex_file, 'rb') as dex_file_obj: + if options.dex_file.endswith('.dex'): + max_dex_number = 1 + # This is the case for incremental_install=true. + add_to_zip( + apk_dex_dir + 'classes.dex', + dex_file_obj.read(), + compress=not options.uncompress_dex) + else: + max_dex_number = 0 + with zipfile.ZipFile(dex_file_obj) as dex_zip: + for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')): + max_dex_number += 1 + add_to_zip( + apk_dex_dir + dex, + dex_zip.read(dex), + compress=not options.uncompress_dex) + + # 4. Native libraries. + logging.debug('Adding lib/') + _AddFiles(out_apk, libs_to_add) + + # Add a placeholder lib if the APK should be multi ABI but is missing libs + # for one of the ABIs. + native_lib_placeholders = options.native_lib_placeholders + secondary_native_lib_placeholders = ( + options.secondary_native_lib_placeholders) + if options.is_multi_abi: + if ((secondary_native_libs or secondary_native_lib_placeholders) + and not native_libs and not native_lib_placeholders): + native_lib_placeholders += ['libplaceholder.so'] + if ((native_libs or native_lib_placeholders) + and not secondary_native_libs + and not secondary_native_lib_placeholders): + secondary_native_lib_placeholders += ['libplaceholder.so'] + + # Add placeholder libs. + for name in sorted(native_lib_placeholders): + # Note: Empty libs files are ignored by md5check (can cause issues + # with stale builds when the only change is adding/removing + # placeholders). + apk_path = 'lib/%s/%s' % (options.android_abi, name) + add_to_zip(apk_path, '', alignment=0x1000) + + for name in sorted(secondary_native_lib_placeholders): + # Note: Empty libs files are ignored by md5check (can cause issues + # with stale builds when the only change is adding/removing + # placeholders). + apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name) + add_to_zip(apk_path, '', alignment=0x1000) + + # 5. Resources + logging.debug('Adding res/') + for info in sorted(resource_infos, key=lambda i: i.filename): + if info.filename != 'AndroidManifest.xml': + copy_resource(info) + + # 6. Java resources that should be accessible via + # Class.getResourceAsStream(), in particular parts of Emma jar. + # Prebuilt jars may contain class files which we shouldn't include. + logging.debug('Adding Java resources') + for java_resource in options.java_resources: + with zipfile.ZipFile(java_resource, 'r') as java_resource_jar: + for apk_path in sorted(java_resource_jar.namelist()): + apk_path_lower = apk_path.lower() + + if apk_path_lower.startswith('meta-inf/'): + continue + if apk_path_lower.endswith('/'): + continue + if apk_path_lower.endswith('.class'): + continue + + add_to_zip(apk_root_dir + apk_path, + java_resource_jar.read(apk_path)) + + if options.format == 'apk' and options.key_path: + zipalign_path = None if fast_align else options.zipalign_path + finalize_apk.FinalizeApk(options.apksigner_jar, + zipalign_path, + f.name, + f.name, + options.key_path, + options.key_passwd, + options.key_name, + int(options.min_sdk_version), + warnings_as_errors=options.warnings_as_errors) + logging.debug('Moving file into place') + + if options.depfile: + action_helpers.write_depfile(options.depfile, + options.output_apk, + inputs=depfile_deps) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/apkbuilder.pydeps b/android/gyp/apkbuilder.pydeps new file mode 100644 index 000000000000..28dfdb035496 --- /dev/null +++ b/android/gyp/apkbuilder.pydeps @@ -0,0 +1,10 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +apkbuilder.py +finalize_apk.py +util/__init__.py +util/build_utils.py +util/diff_utils.py diff --git a/android/gyp/assert_static_initializers.py b/android/gyp/assert_static_initializers.py new file mode 100755 index 000000000000..fd0bb02ac80b --- /dev/null +++ b/android/gyp/assert_static_initializers.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Checks the number of static initializers in an APK's library.""" + + +import argparse +import os +import re +import subprocess +import sys +import tempfile +import zipfile + +from util import build_utils + +_DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, + 'tools', 'linux', + 'dump-static-initializers.py') + + +def _RunReadelf(so_path, options, tool_prefix=''): + return subprocess.check_output( + [tool_prefix + 'readobj', '--elf-output-style=GNU'] + options + + [so_path]).decode('utf8') + + +def _ParseLibBuildId(so_path, tool_prefix): + """Returns the Build ID of the given native library.""" + stdout = _RunReadelf(so_path, ['-n'], tool_prefix) + match = re.search(r'Build ID: (\w+)', stdout) + return match.group(1) if match else None + + +def _VerifyLibBuildIdsMatch(tool_prefix, *so_files): + if len(set(_ParseLibBuildId(f, tool_prefix) for f in so_files)) > 1: + raise Exception('Found differing build ids in output directory and apk. ' + 'Your output directory is likely stale.') + + +def _DumpStaticInitializers(apk_so_name, unzipped_so, out_dir, tool_prefix): + so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', + os.path.basename(apk_so_name)) + if not os.path.exists(so_with_symbols_path): + raise Exception('Unstripped .so not found. Looked here: %s' % + so_with_symbols_path) + _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path) + subprocess.check_call([_DUMP_STATIC_INITIALIZERS_PATH, so_with_symbols_path]) + + +def _ReadInitArray(so_path, tool_prefix, expect_no_initializers): + stdout = _RunReadelf(so_path, ['-SW'], tool_prefix) + # Matches: .init_array INIT_ARRAY 000000000516add0 5169dd0 000010 00 WA 0 0 8 + match = re.search(r'\.init_array.*$', stdout, re.MULTILINE) + if expect_no_initializers: + if match: + raise Exception( + 'Expected no initializers for %s, yet some were found' % so_path) + return 0 + if not match: + raise Exception('Did not find section: .init_array in {}:\n{}'.format( + so_path, stdout)) + size_str = re.split(r'\W+', match.group(0))[5] + return int(size_str, 16) + + +def _CountStaticInitializers(so_path, tool_prefix, expect_no_initializers): + # Find the number of files with at least one static initializer. + # First determine if we're 32 or 64 bit + stdout = _RunReadelf(so_path, ['-h'], tool_prefix) + elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0) + elf_class = re.split(r'\W+', elf_class_line)[1] + if elf_class == 'ELF32': + word_size = 4 + else: + word_size = 8 + + # Then find the number of files with global static initializers. + # NOTE: this is very implementation-specific and makes assumptions + # about how compiler and linker implement global static initializers. + init_array_size = _ReadInitArray(so_path, tool_prefix, expect_no_initializers) + assert init_array_size % word_size == 0 + return init_array_size // word_size + + +def _AnalyzeStaticInitializers(apk_or_aab, tool_prefix, dump_sis, out_dir, + ignored_libs, no_initializers_libs): + with zipfile.ZipFile(apk_or_aab) as z: + so_files = [ + f for f in z.infolist() if f.filename.endswith('.so') + and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs + ] + # Skip checking static initializers for secondary abi libs. They will be + # checked by 32-bit bots. This avoids the complexity of finding 32 bit .so + # files in the output directory in 64 bit builds. + has_64 = any('64' in f.filename for f in so_files) + files_to_check = [f for f in so_files if not has_64 or '64' in f.filename] + + # Do not check partitioned libs. They have no ".init_array" section since + # all SIs are considered "roots" by the linker, and so end up in the base + # module. + files_to_check = [ + f for f in files_to_check if not f.filename.endswith('_partition.so') + ] + + si_count = 0 + for f in files_to_check: + lib_basename = os.path.basename(f.filename) + expect_no_initializers = lib_basename in no_initializers_libs + with tempfile.NamedTemporaryFile(prefix=lib_basename) as temp: + temp.write(z.read(f)) + temp.flush() + si_count += _CountStaticInitializers(temp.name, tool_prefix, + expect_no_initializers) + if dump_sis: + _DumpStaticInitializers(f.filename, temp.name, out_dir, tool_prefix) + return si_count + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--touch', help='File to touch upon success') + parser.add_argument('--tool-prefix', required=True, + help='Prefix for nm and friends') + parser.add_argument('--expected-count', required=True, type=int, + help='Fail if number of static initializers is not ' + 'equal to this value.') + parser.add_argument('apk_or_aab', help='Path to .apk or .aab file.') + args = parser.parse_args() + + # TODO(crbug.com/838414): add support for files included via loadable_modules. + ignored_libs = { + 'libarcore_sdk_c.so', 'libcrashpad_handler_trampoline.so', + 'libsketchology_native.so' + } + # The chromium linker doesn't have static initializers, which makes the + # regular check throw. It should not have any. + no_initializers_libs = ['libchromium_android_linker.so'] + + si_count = _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, + False, '.', ignored_libs, + no_initializers_libs) + if si_count != args.expected_count: + print('Expected {} static initializers, but found {}.'.format( + args.expected_count, si_count)) + if args.expected_count > si_count: + print('You have removed one or more static initializers. Thanks!') + print('To fix the build, update the expectation in:') + print(' //chrome/android/static_initializers.gni') + print() + + print('Dumping static initializers via dump-static-initializers.py:') + sys.stdout.flush() + _AnalyzeStaticInitializers(args.apk_or_aab, args.tool_prefix, True, '.', + ignored_libs, no_initializers_libs) + print() + print('For more information:') + print(' https://chromium.googlesource.com/chromium/src/+/main/docs/' + 'static_initializers.md') + sys.exit(1) + + if args.touch: + open(args.touch, 'w') + + +if __name__ == '__main__': + main() diff --git a/android/gyp/assert_static_initializers.pydeps b/android/gyp/assert_static_initializers.pydeps new file mode 100644 index 000000000000..b574d817a1a7 --- /dev/null +++ b/android/gyp/assert_static_initializers.pydeps @@ -0,0 +1,6 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/assert_static_initializers.pydeps build/android/gyp/assert_static_initializers.py +../../gn_helpers.py +assert_static_initializers.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/binary_baseline_profile.py b/android/gyp/binary_baseline_profile.py new file mode 100755 index 000000000000..40498050a61d --- /dev/null +++ b/android/gyp/binary_baseline_profile.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates a binary profile from an HRF + dex + mapping.""" + +import argparse +import sys + +from util import build_utils +import action_helpers + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__) + action_helpers.add_depfile_arg(parser) + parser.add_argument('--output-profile', + required=True, + help='Path to output binary profile.') + parser.add_argument('--output-metadata', + required=True, + help='Path to output binary profile metadata.') + parser.add_argument('--profgen', + required=True, + help='Path to profgen binary.') + parser.add_argument('--dex', + required=True, + help='Path to a zip containing release dex files.') + parser.add_argument('--proguard-mapping', + required=True, + help='Path to proguard mapping for release dex.') + parser.add_argument('--input-profile-path', + required=True, + help='Path to HRF baseline profile to apply.') + options = parser.parse_args(build_utils.ExpandFileArgs(args)) + + cmd = [ + options.profgen, + 'bin', + options.input_profile_path, + '-o', + options.output_profile, + '-om', + options.output_metadata, + '-a', + options.dex, + '-m', + options.proguard_mapping, + ] + build_utils.CheckOutput(cmd, env={'JAVA_HOME': build_utils.JAVA_HOME}) + action_helpers.write_depfile(options.depfile, + options.output_profile, + inputs=[options.dex]) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/binary_baseline_profile.pydeps b/android/gyp/binary_baseline_profile.pydeps new file mode 100644 index 000000000000..944f6abed8a8 --- /dev/null +++ b/android/gyp/binary_baseline_profile.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/binary_baseline_profile.pydeps build/android/gyp/binary_baseline_profile.py +../../action_helpers.py +../../gn_helpers.py +binary_baseline_profile.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/bundletool.py b/android/gyp/bundletool.py new file mode 100755 index 000000000000..79151335ce37 --- /dev/null +++ b/android/gyp/bundletool.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Simple wrapper around the bundletool tool. + +Bundletool is distributed as a versioned jar file. This script abstracts the +location and version of this jar file, as well as the JVM invokation.""" + +# Warning: Check if still being run as python2: https://crbug.com/1322618 + +import logging +import os +import sys + +from util import build_utils + +# Assume this is stored under build/android/gyp/ +BUNDLETOOL_DIR = os.path.abspath(os.path.join( + __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools', + 'bundletool')) + +BUNDLETOOL_JAR_PATH = os.path.join(BUNDLETOOL_DIR, 'bundletool.jar') + + +def RunBundleTool(args, print_stdout=False): + # ASAN builds failed with the default of 1GB (crbug.com/1120202). + # Bug for bundletool: https://issuetracker.google.com/issues/165911616 + cmd = build_utils.JavaCmd(xmx='4G') + cmd += ['-jar', BUNDLETOOL_JAR_PATH] + cmd += args + logging.debug(' '.join(cmd)) + return build_utils.CheckOutput( + cmd, + print_stdout=print_stdout, + print_stderr=True, + fail_on_output=False, + stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings) + + +if __name__ == '__main__': + RunBundleTool(sys.argv[1:], print_stdout=True) diff --git a/android/gyp/bytecode_processor.py b/android/gyp/bytecode_processor.py new file mode 100755 index 000000000000..f6065dbe7154 --- /dev/null +++ b/android/gyp/bytecode_processor.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wraps bin/helper/bytecode_processor and expands @FileArgs.""" + +import argparse +import sys + +import javac_output_processor +from util import build_utils +from util import server_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _AddSwitch(parser, val): + parser.add_argument( + val, action='store_const', default='--disabled', const=val) + + +def main(argv): + argv = build_utils.ExpandFileArgs(argv[1:]) + parser = argparse.ArgumentParser() + parser.add_argument('--target-name', help='Fully qualified GN target name.') + parser.add_argument('--use-build-server', + action='store_true', + help='Always use the build server.') + parser.add_argument('--script', required=True, + help='Path to the java binary wrapper script.') + parser.add_argument('--gn-target', required=True) + parser.add_argument('--input-jar', required=True) + parser.add_argument('--direct-classpath-jars') + parser.add_argument('--sdk-classpath-jars') + parser.add_argument('--full-classpath-jars') + parser.add_argument('--full-classpath-gn-targets') + parser.add_argument('--stamp') + parser.add_argument('-v', '--verbose', action='store_true') + parser.add_argument('--missing-classes-allowlist') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + _AddSwitch(parser, '--is-prebuilt') + args = parser.parse_args(argv) + + if server_utils.MaybeRunCommand(name=args.target_name, + argv=sys.argv, + stamp_file=args.stamp, + force=args.use_build_server): + return + + args.sdk_classpath_jars = action_helpers.parse_gn_list( + args.sdk_classpath_jars) + args.direct_classpath_jars = action_helpers.parse_gn_list( + args.direct_classpath_jars) + args.full_classpath_jars = action_helpers.parse_gn_list( + args.full_classpath_jars) + args.full_classpath_gn_targets = action_helpers.parse_gn_list( + args.full_classpath_gn_targets) + args.missing_classes_allowlist = action_helpers.parse_gn_list( + args.missing_classes_allowlist) + + verbose = '--verbose' if args.verbose else '--not-verbose' + + cmd = [args.script, args.gn_target, args.input_jar, verbose, args.is_prebuilt] + cmd += [str(len(args.missing_classes_allowlist))] + cmd += args.missing_classes_allowlist + cmd += [str(len(args.sdk_classpath_jars))] + cmd += args.sdk_classpath_jars + cmd += [str(len(args.direct_classpath_jars))] + cmd += args.direct_classpath_jars + cmd += [str(len(args.full_classpath_jars))] + cmd += args.full_classpath_jars + cmd += [str(len(args.full_classpath_gn_targets))] + cmd += [ + javac_output_processor.ReplaceGmsPackageIfNeeded(t) + for t in args.full_classpath_gn_targets + ] + try: + build_utils.CheckOutput(cmd, + print_stdout=True, + fail_func=None, + fail_on_output=args.warnings_as_errors) + except build_utils.CalledProcessError as e: + # Do not output command line because it is massive and makes the actual + # error message hard to find. + sys.stderr.write(e.output) + sys.exit(1) + + if args.stamp: + build_utils.Touch(args.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/android/gyp/bytecode_processor.pydeps b/android/gyp/bytecode_processor.pydeps new file mode 100644 index 000000000000..e7f1d98bdfdb --- /dev/null +++ b/android/gyp/bytecode_processor.pydeps @@ -0,0 +1,28 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py +../../gn_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py +bytecode_processor.py +javac_output_processor.py +util/__init__.py +util/build_utils.py +util/server_utils.py diff --git a/android/gyp/bytecode_rewriter.py b/android/gyp/bytecode_rewriter.py new file mode 100755 index 000000000000..d16fee5237e4 --- /dev/null +++ b/android/gyp/bytecode_rewriter.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Wrapper script around ByteCodeRewriter subclass scripts.""" + +import argparse +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def main(argv): + argv = build_utils.ExpandFileArgs(argv[1:]) + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--script', + required=True, + help='Path to the java binary wrapper script.') + parser.add_argument('--classpath', action='append', nargs='+') + parser.add_argument('--input-jar', required=True) + parser.add_argument('--output-jar', required=True) + args = parser.parse_args(argv) + + classpath = action_helpers.parse_gn_list(args.classpath) + action_helpers.write_depfile(args.depfile, args.output_jar, inputs=classpath) + + classpath.append(args.input_jar) + cmd = [ + args.script, '--classpath', ':'.join(classpath), args.input_jar, + args.output_jar + ] + build_utils.CheckOutput(cmd, print_stdout=True) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/android/gyp/bytecode_rewriter.pydeps b/android/gyp/bytecode_rewriter.pydeps new file mode 100644 index 000000000000..b0a656036738 --- /dev/null +++ b/android/gyp/bytecode_rewriter.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_rewriter.pydeps build/android/gyp/bytecode_rewriter.py +../../action_helpers.py +../../gn_helpers.py +bytecode_rewriter.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/check_flag_expectations.py b/android/gyp/check_flag_expectations.py new file mode 100755 index 000000000000..97be53d864ee --- /dev/null +++ b/android/gyp/check_flag_expectations.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse + +from util import build_utils +from util import diff_utils + +IGNORE_FLAG_PREFIXES = [ + # For cflags. + '-DANDROID_NDK_VERSION_ROLL', + '-DCR_LIBCXX_REVISION', + '-I', + '-g', + '-fcrash-diagnostics-dir=', + '-fprofile', + '--no-system-header-prefix', + '--system-header-prefix', + '-isystem', + '-iquote', + '-fmodule-map', + '-frandom-seed', + '-c ', + '-o ', + '-fmodule-name=', + '--sysroot=', + '-fcolor-diagnostics', + '-MF ', + '-MD', + + # For ldflags. + '-Wl,--thinlto-cache-dir', + '-Wl,--thinlto-cache-policy', + '-Wl,--thinlto-jobs', + '-Wl,--start-lib', + '-Wl,--end-lib', + '-Wl,-whole-archive', + '-Wl,-no-whole-archive', + '-l', + '-L', + '-Wl,-soname', + '-Wl,-version-script', + '-Wl,--version-script', + '-fdiagnostics-color', + '-Wl,--color-diagnostics', + '-B', + '-Wl,--dynamic-linker', + '-DCR_CLANG_REVISION=', +] + +FLAGS_WITH_PARAMS = ( + '-Xclang', + '-mllvm', + '-Xclang -fdebug-compilation-dir', + '-Xclang -add-plugin', +) + + +def KeepFlag(flag): + return not any(flag.startswith(prefix) for prefix in IGNORE_FLAG_PREFIXES) + + +def MergeFlags(flags): + flags = _MergeFlagsHelper(flags) + # For double params eg: -Xclang -fdebug-compilation-dir + flags = _MergeFlagsHelper(flags) + return flags + + +def _MergeFlagsHelper(flags): + merged_flags = [] + while flags: + current_flag = flags.pop(0) + if flags: + next_flag = flags[0] + else: + next_flag = None + merge_flags = False + + # Special case some flags that always come with params. + if current_flag in FLAGS_WITH_PARAMS: + merge_flags = True + # Assume flags without '-' are a param. + if next_flag and not next_flag.startswith('-'): + merge_flags = True + # Special case -plugin-arg prefix because it has the plugin name. + if current_flag.startswith('-Xclang -plugin-arg'): + merge_flags = True + if merge_flags: + merged_flag = '{} {}'.format(current_flag, next_flag) + merged_flags.append(merged_flag) + flags.pop(0) + else: + merged_flags.append(current_flag) + return merged_flags + + +def ParseFlags(flag_file_path): + flags = [] + with open(flag_file_path) as f: + for flag in f.read().splitlines(): + if KeepFlag(flag): + flags.append(flag) + return flags + + +def main(): + """Compare the flags with the checked in list.""" + parser = argparse.ArgumentParser() + diff_utils.AddCommandLineFlags(parser) + parser.add_argument('--current-flags', + help='Path to flags to check against expectations.') + options = parser.parse_args() + + flags = ParseFlags(options.current_flags) + flags = MergeFlags(flags) + + msg = """ +This expectation file is meant to inform the build team about changes to +flags used when building native libraries in chrome (most importantly any +that relate to security). This is to ensure the flags are replicated when +building native libraries outside of the repo. Please update the .expected +files and a WATCHLIST entry will alert the build team to your change.""" + diff_utils.CheckExpectations('\n'.join(sorted(flags)), + options, + custom_msg=msg) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/check_flag_expectations.pydeps b/android/gyp/check_flag_expectations.pydeps new file mode 100644 index 000000000000..6bade9490b5c --- /dev/null +++ b/android/gyp/check_flag_expectations.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/check_flag_expectations.pydeps build/android/gyp/check_flag_expectations.py +../../action_helpers.py +../../gn_helpers.py +check_flag_expectations.py +util/__init__.py +util/build_utils.py +util/diff_utils.py diff --git a/android/gyp/compile_java.py b/android/gyp/compile_java.py new file mode 100755 index 000000000000..5fee0d77c3ef --- /dev/null +++ b/android/gyp/compile_java.py @@ -0,0 +1,865 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import functools +import logging +import multiprocessing +import optparse +import os +import re +import shutil +import sys +import time +import zipfile +import pathlib + +import javac_output_processor +from util import build_utils +from util import md5_check +from util import jar_info_utils +from util import server_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + +_JAVAC_EXTRACTOR = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', + 'android_prebuilts', 'build_tools', 'common', + 'framework', 'javac_extractor.jar') + +# Add a check here to cause the suggested fix to be applied while compiling. +# Use this when trying to enable more checks. +ERRORPRONE_CHECKS_TO_APPLY = [] + +# Full list of checks: https://errorprone.info/bugpatterns +ERRORPRONE_WARNINGS_TO_DISABLE = [ + # Temporarily disabling to roll doubledown. + # TODO(wnwen): Re-enable this upstream. + 'InlineMeInliner', + # The following are super useful, but existing issues need to be fixed first + # before they can start failing the build on new errors. + 'InvalidParam', + 'InvalidLink', + 'InvalidInlineTag', + 'EmptyBlockTag', + 'PublicConstructorForAbstractClass', + 'InvalidBlockTag', + 'StaticAssignmentInConstructor', + 'MutablePublicArray', + 'UnescapedEntity', + 'NonCanonicalType', + 'AlmostJavadoc', + 'ReturnValueIgnored', + # The following are added for errorprone update: https://crbug.com/1216032 + 'InlineMeSuggester', + 'DoNotClaimAnnotations', + 'JavaUtilDate', + 'IdentityHashMapUsage', + 'UnnecessaryMethodReference', + 'LongFloatConversion', + 'CharacterGetNumericValue', + 'ErroneousThreadPoolConstructorChecker', + 'StaticMockMember', + 'MissingSuperCall', + 'ToStringReturnsNull', + # If possible, this should be automatically fixed if turned on: + 'MalformedInlineTag', + # TODO(crbug.com/834807): Follow steps in bug + 'DoubleBraceInitialization', + # TODO(crbug.com/834790): Follow steps in bug. + 'CatchAndPrintStackTrace', + # TODO(crbug.com/801210): Follow steps in bug. + 'SynchronizeOnNonFinalField', + # TODO(crbug.com/802073): Follow steps in bug. + 'TypeParameterUnusedInFormals', + # TODO(crbug.com/803484): Follow steps in bug. + 'CatchFail', + # TODO(crbug.com/803485): Follow steps in bug. + 'JUnitAmbiguousTestClass', + # Android platform default is always UTF-8. + # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset() + 'DefaultCharset', + # Low priority since there are lots of tags that don't fit this check. + 'UnrecognisedJavadocTag', + # Low priority since the alternatives still work. + 'JdkObsolete', + # We don't use that many lambdas. + 'FunctionalInterfaceClash', + # There are lots of times when we just want to post a task. + 'FutureReturnValueIgnored', + # Nice to be explicit about operators, but not necessary. + 'OperatorPrecedence', + # Just false positives in our code. + 'ThreadJoinLoop', + # Low priority corner cases with String.split. + # Linking Guava and using Splitter was rejected + # in the https://chromium-review.googlesource.com/c/chromium/src/+/871630. + 'StringSplitter', + # Preferred to use another method since it propagates exceptions better. + 'ClassNewInstance', + # Nice to have static inner classes but not necessary. + 'ClassCanBeStatic', + # Explicit is better than implicit. + 'FloatCast', + # Results in false positives. + 'ThreadLocalUsage', + # Also just false positives. + 'Finally', + # False positives for Chromium. + 'FragmentNotInstantiable', + # Low priority to fix. + 'HidingField', + # Low priority. + 'IntLongMath', + # Low priority. + 'BadComparable', + # Low priority. + 'EqualsHashCode', + # Nice to fix but low priority. + 'TypeParameterShadowing', + # Good to have immutable enums, also low priority. + 'ImmutableEnumChecker', + # False positives for testing. + 'InputStreamSlowMultibyteRead', + # Nice to have better primitives. + 'BoxedPrimitiveConstructor', + # Not necessary for tests. + 'OverrideThrowableToString', + # Nice to have better type safety. + 'CollectionToArraySafeParameter', + # Makes logcat debugging more difficult, and does not provide obvious + # benefits in the Chromium codebase. + 'ObjectToString', + # Triggers on private methods that are @CalledByNative. + 'UnusedMethod', + # Triggers on generated R.java files. + 'UnusedVariable', + # Not that useful. + 'UnsafeReflectiveConstructionCast', + # Not that useful. + 'MixedMutabilityReturnType', + # Nice to have. + 'EqualsGetClass', + # A lot of false-positives from CharSequence.equals(). + 'UndefinedEquals', + # Nice to have. + 'ExtendingJUnitAssert', + # Nice to have. + 'SystemExitOutsideMain', + # Nice to have. + 'TypeParameterNaming', + # Nice to have. + 'UnusedException', + # Nice to have. + 'UngroupedOverloads', + # Nice to have. + 'FunctionalInterfaceClash', + # Nice to have. + 'InconsistentOverloads', + # Dagger generated code triggers this. + 'SameNameButDifferent', + # Nice to have. + 'UnnecessaryLambda', + # Nice to have. + 'UnnecessaryAnonymousClass', + # Nice to have. + 'LiteProtoToString', + # Nice to have. + 'MissingSummary', + # Nice to have. + 'ReturnFromVoid', + # Nice to have. + 'EmptyCatch', + # Nice to have. + 'BadImport', + # Nice to have. + 'UseCorrectAssertInTests', + # Nice to have. + 'InlineFormatString', + # Nice to have. + 'DefaultPackage', + # Must be off since we are now passing in annotation processor generated + # code as a source jar (deduplicating work with turbine). + 'RefersToDaggerCodegen', + # We already have presubmit checks for this. Not necessary to warn on + # every build. + 'RemoveUnusedImports', + # We do not care about unnecessary parenthesis enough to check for them. + 'UnnecessaryParentheses', + # The only time we trigger this is when it is better to be explicit in a + # list of unicode characters, e.g. FindAddress.java + 'UnicodeEscape', + # Nice to have. + 'AlreadyChecked', +] + +# Full list of checks: https://errorprone.info/bugpatterns +# Only those marked as "experimental" need to be listed here in order to be +# enabled. +ERRORPRONE_WARNINGS_TO_ENABLE = [ + 'BinderIdentityRestoredDangerously', + 'EmptyIf', + 'EqualsBrokenForNull', + 'InvalidThrows', + 'LongLiteralLowerCaseSuffix', + 'MultiVariableDeclaration', + 'RedundantOverride', + 'StaticQualifiedUsingExpression', + 'StringEquality', + 'TimeUnitMismatch', + 'UnnecessaryStaticImport', + 'UseBinds', + 'WildcardImport', +] + + +def ProcessJavacOutput(output, target_name): + # These warnings cannot be suppressed even for third party code. Deprecation + # warnings especially do not help since we must support older android version. + deprecated_re = re.compile( + r'(Note: .* uses? or overrides? a deprecated API.)$') + unchecked_re = re.compile( + r'(Note: .* uses? unchecked or unsafe operations.)$') + recompile_re = re.compile(r'(Note: Recompile with -Xlint:.* for details.)$') + + activity_re = re.compile(r'^(?P\s*location: )class Activity$') + + def ApplyFilters(line): + return not (deprecated_re.match(line) or unchecked_re.match(line) + or recompile_re.match(line)) + + def Elaborate(line): + if activity_re.match(line): + prefix = ' ' * activity_re.match(line).end('prefix') + return '{}\n{}Expecting a FragmentActivity? See {}'.format( + line, prefix, 'docs/ui/android/bytecode_rewriting.md') + return line + + output = build_utils.FilterReflectiveAccessJavaWarnings(output) + + # Warning currently cannot be silenced via javac flag. + if 'Unsafe is internal proprietary API' in output: + # Example: + # HiddenApiBypass.java:69: warning: Unsafe is internal proprietary API and + # may be removed in a future release + # import sun.misc.Unsafe; + # ^ + output = re.sub(r'.*?Unsafe is internal proprietary API[\s\S]*?\^\n', '', + output) + output = re.sub(r'\d+ warnings\n', '', output) + + lines = (l for l in output.split('\n') if ApplyFilters(l)) + lines = (Elaborate(l) for l in lines) + + output_processor = javac_output_processor.JavacOutputProcessor(target_name) + lines = output_processor.Process(lines) + + return '\n'.join(lines) + + +def CreateJarFile(jar_path, + classes_dir, + service_provider_configuration_dir=None, + additional_jar_files=None, + extra_classes_jar=None): + """Zips files from compilation into a single jar.""" + logging.info('Start creating jar file: %s', jar_path) + with action_helpers.atomic_output(jar_path) as f: + with zipfile.ZipFile(f.name, 'w') as z: + zip_helpers.zip_directory(z, classes_dir) + if service_provider_configuration_dir: + config_files = build_utils.FindInDirectory( + service_provider_configuration_dir) + for config_file in config_files: + zip_path = os.path.relpath(config_file, + service_provider_configuration_dir) + zip_helpers.add_to_zip_hermetic(z, zip_path, src_path=config_file) + + if additional_jar_files: + for src_path, zip_path in additional_jar_files: + zip_helpers.add_to_zip_hermetic(z, zip_path, src_path=src_path) + if extra_classes_jar: + path_transform = lambda p: p if p.endswith('.class') else None + zip_helpers.merge_zips(z, [extra_classes_jar], + path_transform=path_transform) + logging.info('Completed jar file: %s', jar_path) + + +def _ParsePackageAndClassNames(source_file): + """This should support both Java and Kotlin files.""" + package_name = '' + class_names = [] + with open(source_file) as f: + for l in f: + # Strip unindented comments. + # Considers a leading * as a continuation of a multi-line comment (our + # linter doesn't enforce a space before it like there should be). + l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l) + # Stripping things between double quotes (strings), so if the word "class" + # shows up in a string this doesn't trigger. This isn't strictly correct + # (with escaped quotes) but covers a very large percentage of cases. + l = re.sub('(?:".*?")', '', l) + + # Java lines end in semicolon, whereas Kotlin lines do not. + m = re.match(r'package\s+(.*?)(;|\s*$)', l) + if m and not package_name: + package_name = m.group(1) + + # Not exactly a proper parser, but works for sources that Chrome uses. + # In order to not match nested classes, it just checks for lack of indent. + m = re.match(r'(?:\S.*?)?(?:class|@?interface|enum)\s+(.+?)\b', l) + if m: + class_names.append(m.group(1)) + return package_name, class_names + + +def _ProcessSourceFileForInfo(source_file): + package_name, class_names = _ParsePackageAndClassNames(source_file) + return source_file, package_name, class_names + + +class _InfoFileContext: + """Manages the creation of the class->source file .info file.""" + + def __init__(self, chromium_code, excluded_globs): + self._chromium_code = chromium_code + self._excluded_globs = excluded_globs + # Map of .java path -> .srcjar/nested/path.java. + self._srcjar_files = {} + # List of generators from pool.imap_unordered(). + self._results = [] + # Lazily created multiprocessing.Pool. + self._pool = None + + def AddSrcJarSources(self, srcjar_path, extracted_paths, parent_dir): + for path in extracted_paths: + # We want the path inside the srcjar so the viewer can have a tree + # structure. + self._srcjar_files[path] = '{}/{}'.format( + srcjar_path, os.path.relpath(path, parent_dir)) + + def SubmitFiles(self, source_files): + if not source_files: + return + if self._pool is None: + # Restrict to just one process to not slow down compiling. Compiling + # is always slower. + self._pool = multiprocessing.Pool(1) + logging.info('Submitting %d files for info', len(source_files)) + self._results.append( + self._pool.imap_unordered(_ProcessSourceFileForInfo, + source_files, + chunksize=1000)) + + def _CheckPathMatchesClassName(self, source_file, package_name, class_name): + if source_file.endswith('.java'): + parts = package_name.split('.') + [class_name + '.java'] + else: + parts = package_name.split('.') + [class_name + '.kt'] + expected_suffix = os.path.sep.join(parts) + if not source_file.endswith(expected_suffix): + raise Exception(('Source package+class name do not match its path.\n' + 'Actual path: %s\nExpected path: %s') % + (source_file, expected_suffix)) + + def _ProcessInfo(self, java_file, package_name, class_names, source): + for class_name in class_names: + yield '{}.{}'.format(package_name, class_name) + # Skip aidl srcjars since they don't indent code correctly. + if '_aidl.srcjar' in source: + continue + assert not self._chromium_code or len(class_names) == 1, ( + 'Chromium java files must only have one class: {}'.format(source)) + if self._chromium_code: + # This check is not necessary but nice to check this somewhere. + self._CheckPathMatchesClassName(java_file, package_name, class_names[0]) + + def _ShouldIncludeInJarInfo(self, fully_qualified_name): + name_as_class_glob = fully_qualified_name.replace('.', '/') + '.class' + return not build_utils.MatchesGlob(name_as_class_glob, self._excluded_globs) + + def _Collect(self): + if self._pool is None: + return {} + ret = {} + for result in self._results: + for java_file, package_name, class_names in result: + source = self._srcjar_files.get(java_file, java_file) + for fully_qualified_name in self._ProcessInfo(java_file, package_name, + class_names, source): + if self._ShouldIncludeInJarInfo(fully_qualified_name): + ret[fully_qualified_name] = java_file + return ret + + def Close(self): + # Work around for Python 2.x bug with multiprocessing and daemon threads: + # https://bugs.python.org/issue4106 + if self._pool is not None: + logging.info('Joining multiprocessing.Pool') + self._pool.terminate() + self._pool.join() + logging.info('Done.') + + def Commit(self, output_path): + """Writes a .jar.info file. + + Maps fully qualified names for classes to either the java file that they + are defined in or the path of the srcjar that they came from. + """ + logging.info('Collecting info file entries') + entries = self._Collect() + + logging.info('Writing info file: %s', output_path) + with action_helpers.atomic_output(output_path, mode='wb') as f: + jar_info_utils.WriteJarInfoFile(f, entries, self._srcjar_files) + logging.info('Completed info file: %s', output_path) + + +def _OnStaleMd5(changes, options, javac_cmd, javac_args, java_files, kt_files): + logging.info('Starting _OnStaleMd5') + if options.enable_kythe_annotations: + # Kythe requires those env variables to be set and compile_java.py does the + # same + if not os.environ.get('KYTHE_ROOT_DIRECTORY') or \ + not os.environ.get('KYTHE_OUTPUT_DIRECTORY'): + raise Exception('--enable-kythe-annotations requires ' + 'KYTHE_ROOT_DIRECTORY and KYTHE_OUTPUT_DIRECTORY ' + 'environment variables to be set.') + javac_extractor_cmd = build_utils.JavaCmd() + [ + '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED', + '-jar', + _JAVAC_EXTRACTOR, + ] + try: + # _RunCompiler()'s partial javac implementation does not support + # generating outputs in $KYTHE_OUTPUT_DIRECTORY. + _RunCompiler(changes, + options, + javac_extractor_cmd + javac_args, + java_files, + options.jar_path + '.javac_extractor', + enable_partial_javac=False) + except build_utils.CalledProcessError as e: + # Having no index for particular target is better than failing entire + # codesearch. Log and error and move on. + logging.error('Could not generate kzip: %s', e) + + intermediates_out_dir = None + jar_info_path = None + if not options.enable_errorprone: + # Delete any stale files in the generated directory. The purpose of + # options.generated_dir is for codesearch. + shutil.rmtree(options.generated_dir, True) + intermediates_out_dir = options.generated_dir + + jar_info_path = options.jar_path + '.info' + + # Compiles with Error Prone take twice as long to run as pure javac. Thus GN + # rules run both in parallel, with Error Prone only used for checks. + try: + _RunCompiler(changes, + options, + javac_cmd + javac_args, + java_files, + options.jar_path, + kt_files=kt_files, + jar_info_path=jar_info_path, + intermediates_out_dir=intermediates_out_dir, + enable_partial_javac=True) + except build_utils.CalledProcessError as e: + # Do not output stacktrace as it takes up space on gerrit UI, forcing + # you to click though to find the actual compilation error. It's never + # interesting to see the Python stacktrace for a Java compilation error. + sys.stderr.write(e.output) + sys.exit(1) + + logging.info('Completed all steps in _OnStaleMd5') + + +def _RunCompiler(changes, + options, + javac_cmd, + java_files, + jar_path, + kt_files=None, + jar_info_path=None, + intermediates_out_dir=None, + enable_partial_javac=False): + """Runs java compiler. + + Args: + changes: md5_check.Changes object. + options: Object with command line flags. + javac_cmd: Command to execute. + java_files: List of java files passed from command line. + jar_path: Path of output jar file. + kt_files: List of Kotlin files passed from command line if any. + jar_info_path: Path of the .info file to generate. + If None, .info file will not be generated. + intermediates_out_dir: Directory for saving intermediate outputs. + If None a temporary directory is used. + enable_partial_javac: Enables compiling only Java files which have changed + in the special case that no method signatures have changed. This is + useful for large GN targets. + Not supported if compiling generates outputs other than |jar_path| and + |jar_info_path|. + """ + logging.info('Starting _RunCompiler') + + java_files = java_files.copy() + java_srcjars = options.java_srcjars + save_info_file = jar_info_path is not None + + # Use jar_path's directory to ensure paths are relative (needed for goma). + temp_dir = jar_path + '.staging' + build_utils.DeleteDirectory(temp_dir) + os.makedirs(temp_dir) + info_file_context = None + try: + classes_dir = os.path.join(temp_dir, 'classes') + service_provider_configuration = os.path.join( + temp_dir, 'service_provider_configuration') + + if java_files: + os.makedirs(classes_dir) + + if enable_partial_javac: + all_changed_paths_are_java = all( + p.endswith(".java") for p in changes.IterChangedPaths()) + if (all_changed_paths_are_java and not changes.HasStringChanges() + and os.path.exists(jar_path) + and (jar_info_path is None or os.path.exists(jar_info_path))): + # Log message is used by tests to determine whether partial javac + # optimization was used. + logging.info('Using partial javac optimization for %s compile' % + (jar_path)) + + # Header jar corresponding to |java_files| did not change. + # As a build speed optimization (crbug.com/1170778), re-compile only + # java files which have changed. Re-use old jar .info file. + java_files = list(changes.IterChangedPaths()) + java_srcjars = None + + # Reuse old .info file. + save_info_file = False + + build_utils.ExtractAll(jar_path, classes_dir, pattern='*.class') + + if save_info_file: + info_file_context = _InfoFileContext(options.chromium_code, + options.jar_info_exclude_globs) + + if intermediates_out_dir is None: + intermediates_out_dir = temp_dir + + input_srcjars_dir = os.path.join(intermediates_out_dir, 'input_srcjars') + + if java_srcjars: + logging.info('Extracting srcjars to %s', input_srcjars_dir) + build_utils.MakeDirectory(input_srcjars_dir) + for srcjar in options.java_srcjars: + extracted_files = build_utils.ExtractAll( + srcjar, no_clobber=True, path=input_srcjars_dir, pattern='*.java') + java_files.extend(extracted_files) + if save_info_file: + info_file_context.AddSrcJarSources(srcjar, extracted_files, + input_srcjars_dir) + logging.info('Done extracting srcjars') + + if options.header_jar: + logging.info('Extracting service provider configs') + # Extract META-INF/services/* so that it can be copied into the output + # .jar + build_utils.ExtractAll(options.header_jar, + no_clobber=True, + path=service_provider_configuration, + pattern='META-INF/services/*') + logging.info('Done extracting service provider configs') + + if save_info_file and java_files: + info_file_context.SubmitFiles(java_files) + info_file_context.SubmitFiles(kt_files) + + if java_files: + # Don't include the output directory in the initial set of args since it + # being in a temp dir makes it unstable (breaks md5 stamping). + cmd = list(javac_cmd) + cmd += ['-d', classes_dir] + + if options.classpath: + cmd += ['-classpath', ':'.join(options.classpath)] + + # Pass source paths as response files to avoid extremely long command + # lines that are tedius to debug. + java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') + with open(java_files_rsp_path, 'w') as f: + f.write(' '.join(java_files)) + cmd += ['@' + java_files_rsp_path] + + process_javac_output_partial = functools.partial( + ProcessJavacOutput, target_name=options.target_name) + + logging.debug('Build command %s', cmd) + start = time.time() + build_utils.CheckOutput(cmd, + print_stdout=options.chromium_code, + stdout_filter=process_javac_output_partial, + stderr_filter=process_javac_output_partial, + fail_on_output=options.warnings_as_errors) + end = time.time() - start + logging.info('Java compilation took %ss', end) + + CreateJarFile(jar_path, classes_dir, service_provider_configuration, + options.additional_jar_files, options.kotlin_jar_path) + + if save_info_file: + info_file_context.Commit(jar_info_path) + + logging.info('Completed all steps in _RunCompiler') + finally: + if info_file_context: + info_file_context.Close() + shutil.rmtree(temp_dir) + + +def _ParseOptions(argv): + parser = optparse.OptionParser() + action_helpers.add_depfile_arg(parser) + + parser.add_option('--target-name', help='Fully qualified GN target name.') + parser.add_option('--skip-build-server', + action='store_true', + help='Avoid using the build server.') + parser.add_option('--use-build-server', + action='store_true', + help='Always use the build server.') + parser.add_option( + '--java-srcjars', + action='append', + default=[], + help='List of srcjars to include in compilation.') + parser.add_option( + '--generated-dir', + help='Subdirectory within target_gen_dir to place extracted srcjars and ' + 'annotation processor output for codesearch to find.') + parser.add_option('--classpath', action='append', help='Classpath to use.') + parser.add_option( + '--processorpath', + action='append', + help='GN list of jars that comprise the classpath used for Annotation ' + 'Processors.') + parser.add_option( + '--processor-arg', + dest='processor_args', + action='append', + help='key=value arguments for the annotation processors.') + parser.add_option( + '--additional-jar-file', + dest='additional_jar_files', + action='append', + help='Additional files to package into jar. By default, only Java .class ' + 'files are packaged into the jar. Files should be specified in ' + 'format :.') + parser.add_option( + '--jar-info-exclude-globs', + help='GN list of exclude globs to filter from generated .info files.') + parser.add_option( + '--chromium-code', + type='int', + help='Whether code being compiled should be built with stricter ' + 'warnings for chromium code.') + parser.add_option( + '--gomacc-path', help='When set, prefix javac command with gomacc') + parser.add_option( + '--errorprone-path', help='Use the Errorprone compiler at this path.') + parser.add_option( + '--enable-errorprone', + action='store_true', + help='Enable errorprone checks') + parser.add_option( + '--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + parser.add_option('--jar-path', help='Jar output path.') + parser.add_option( + '--javac-arg', + action='append', + default=[], + help='Additional arguments to pass to javac.') + parser.add_option( + '--enable-kythe-annotations', + action='store_true', + help='Enable generation of Kythe kzip, used for codesearch. Ensure ' + 'proper environment variables are set before using this flag.') + parser.add_option( + '--header-jar', + help='This is the header jar for the current target that contains ' + 'META-INF/services/* files to be included in the output jar.') + parser.add_option( + '--kotlin-jar-path', + help='Kotlin jar to be merged into the output jar. This contains the ' + ".class files from this target's .kt files.") + + options, args = parser.parse_args(argv) + build_utils.CheckOptions(options, parser, required=('jar_path', )) + + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.processorpath = action_helpers.parse_gn_list(options.processorpath) + options.java_srcjars = action_helpers.parse_gn_list(options.java_srcjars) + options.jar_info_exclude_globs = action_helpers.parse_gn_list( + options.jar_info_exclude_globs) + + additional_jar_files = [] + for arg in options.additional_jar_files or []: + filepath, jar_filepath = arg.split(':') + additional_jar_files.append((filepath, jar_filepath)) + options.additional_jar_files = additional_jar_files + + files = [] + for arg in args: + # Interpret a path prefixed with @ as a file containing a list of sources. + if arg.startswith('@'): + files.extend(build_utils.ReadSourcesList(arg[1:])) + else: + files.append(arg) + + # The target's .sources file contains both Java and Kotlin files. We use + # compile_kt.py to compile the Kotlin files to .class and header jars. Javac + # is run only on .java files. + java_files = [f for f in files if f.endswith('.java')] + # Kotlin files are needed to populate the info file and attribute size in + # supersize back to the appropriate Kotlin file. + kt_files = [f for f in files if f.endswith('.kt')] + + return options, java_files, kt_files + + +def main(argv): + build_utils.InitLogging('JAVAC_DEBUG') + argv = build_utils.ExpandFileArgs(argv) + options, java_files, kt_files = _ParseOptions(argv) + + # Only use the build server for errorprone runs. + if (options.enable_errorprone and not options.skip_build_server + and server_utils.MaybeRunCommand(name=options.target_name, + argv=sys.argv, + stamp_file=options.jar_path, + force=options.use_build_server)): + return + + javac_cmd = [] + if options.gomacc_path: + javac_cmd.append(options.gomacc_path) + javac_cmd.append(build_utils.JAVAC_PATH) + + javac_args = [ + '-g', + # We currently target JDK 11 everywhere, since Mockito is broken by JDK17. + # See crbug.com/1409661 for more details. + '--release', + '11', + # Chromium only allows UTF8 source files. Being explicit avoids + # javac pulling a default encoding from the user's environment. + '-encoding', + 'UTF-8', + # Prevent compiler from compiling .java files not listed as inputs. + # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ + '-sourcepath', + ':', + # protobuf-generated files fail this check (javadoc has @deprecated, + # but method missing @Deprecated annotation). + '-Xlint:-dep-ann', + ] + + if options.enable_errorprone: + # All errorprone args are passed space-separated in a single arg. + errorprone_flags = ['-Xplugin:ErrorProne'] + # Make everything a warning so that when treat_warnings_as_errors is false, + # they do not fail the build. + errorprone_flags += ['-XepAllErrorsAsWarnings'] + # Don't check generated files. + errorprone_flags += ['-XepDisableWarningsInGeneratedCode'] + errorprone_flags.extend('-Xep:{}:OFF'.format(x) + for x in ERRORPRONE_WARNINGS_TO_DISABLE) + errorprone_flags.extend('-Xep:{}:WARN'.format(x) + for x in ERRORPRONE_WARNINGS_TO_ENABLE) + + if ERRORPRONE_CHECKS_TO_APPLY: + errorprone_flags += [ + '-XepPatchLocation:IN_PLACE', + '-XepPatchChecks:,' + ','.join(ERRORPRONE_CHECKS_TO_APPLY) + ] + + # These are required to use JDK 16, and are taken directly from + # https://errorprone.info/docs/installation + javac_args += [ + '-J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.processing=' + 'ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED', + '-J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED', + '-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED', + '-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED', + ] + + javac_args += ['-XDcompilePolicy=simple', ' '.join(errorprone_flags)] + + # This flag quits errorprone after checks and before code generation, since + # we do not need errorprone outputs, this speeds up errorprone by 4 seconds + # for chrome_java. + if not ERRORPRONE_CHECKS_TO_APPLY: + javac_args += ['-XDshould-stop.ifNoError=FLOW'] + + # This effectively disables all annotation processors, even including + # annotation processors in service provider configuration files named + # META-INF/. See the following link for reference: + # https://docs.oracle.com/en/java/javase/11/tools/javac.html + javac_args.extend(['-proc:none']) + + if options.processorpath: + javac_args.extend(['-processorpath', ':'.join(options.processorpath)]) + if options.processor_args: + for arg in options.processor_args: + javac_args.extend(['-A%s' % arg]) + + javac_args.extend(options.javac_arg) + + classpath_inputs = options.classpath + options.processorpath + + depfile_deps = classpath_inputs + # Files that are already inputs in GN should go in input_paths. + input_paths = depfile_deps + options.java_srcjars + java_files + kt_files + if options.header_jar: + input_paths.append(options.header_jar) + input_paths += [x[0] for x in options.additional_jar_files] + + output_paths = [options.jar_path] + if not options.enable_errorprone: + output_paths += [options.jar_path + '.info'] + + input_strings = (javac_cmd + javac_args + options.classpath + java_files + + kt_files + + [options.warnings_as_errors, options.jar_info_exclude_globs]) + + # Use md5_check for |pass_changes| feature. + md5_check.CallAndWriteDepfileIfStale(lambda changes: _OnStaleMd5( + changes, options, javac_cmd, javac_args, java_files, kt_files), + options, + depfile_deps=depfile_deps, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + pass_changes=True) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/compile_java.pydeps b/android/gyp/compile_java.pydeps new file mode 100644 index 000000000000..45617b15075d --- /dev/null +++ b/android/gyp/compile_java.pydeps @@ -0,0 +1,32 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_java.pydeps build/android/gyp/compile_java.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py +compile_java.py +javac_output_processor.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py +util/md5_check.py +util/server_utils.py diff --git a/android/gyp/compile_kt.py b/android/gyp/compile_kt.py new file mode 100755 index 000000000000..4c7eb6ff8432 --- /dev/null +++ b/android/gyp/compile_kt.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import logging +import os +import shutil +import sys +import time + +import compile_java + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _RunCompiler(args, + kotlinc_cmd, + source_files, + jar_path, + intermediates_out_dir=None): + """Runs the Kotlin compiler.""" + logging.info('Starting _RunCompiler') + + source_files = source_files.copy() + kt_files = [f for f in source_files if f.endswith('.kt')] + assert len(kt_files) > 0, 'At least one .kt file must be passed in.' + + java_srcjars = args.java_srcjars + + # Use jar_path's directory to ensure paths are relative (needed for goma). + temp_dir = jar_path + '.staging' + build_utils.DeleteDirectory(temp_dir) + os.makedirs(temp_dir) + try: + classes_dir = os.path.join(temp_dir, 'classes') + os.makedirs(classes_dir) + + input_srcjars_dir = os.path.join(intermediates_out_dir or temp_dir, + 'input_srcjars') + + if java_srcjars: + logging.info('Extracting srcjars to %s', input_srcjars_dir) + build_utils.MakeDirectory(input_srcjars_dir) + for srcjar in args.java_srcjars: + source_files += build_utils.ExtractAll(srcjar, + no_clobber=True, + path=input_srcjars_dir, + pattern='*.java') + logging.info('Done extracting srcjars') + + # Don't include the output directory in the initial set of args since it + # being in a temp dir makes it unstable (breaks md5 stamping). + cmd = list(kotlinc_cmd) + cmd += ['-d', classes_dir] + + if args.classpath: + cmd += ['-classpath', ':'.join(args.classpath)] + + # This a kotlinc plugin to generate header files for .kt files, similar to + # turbine for .java files. + jvm_abi_path = os.path.join(build_utils.KOTLIN_HOME, 'lib', + 'jvm-abi-gen.jar') + cmd += [ + f'-Xplugin={jvm_abi_path}', '-P', + 'plugin:org.jetbrains.kotlin.jvm.abi:outputDir=' + + args.interface_jar_path + ] + + # Pass source paths as response files to avoid extremely long command + # lines that are tedius to debug. + source_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') + with open(source_files_rsp_path, 'w') as f: + f.write(' '.join(source_files)) + cmd += ['@' + source_files_rsp_path] + + # Explicitly set JAVA_HOME since some bots do not have this already set. + env = os.environ.copy() + env['JAVA_HOME'] = build_utils.JAVA_HOME + + logging.debug('Build command %s', cmd) + start = time.time() + build_utils.CheckOutput(cmd, + env=env, + print_stdout=args.chromium_code, + fail_on_output=args.warnings_as_errors) + logging.info('Kotlin compilation took %ss', time.time() - start) + + compile_java.CreateJarFile(jar_path, classes_dir) + + logging.info('Completed all steps in _RunCompiler') + finally: + shutil.rmtree(temp_dir) + + +def _ParseOptions(argv): + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + + parser.add_argument('--java-srcjars', + action='append', + default=[], + help='List of srcjars to include in compilation.') + parser.add_argument( + '--generated-dir', + help='Subdirectory within target_gen_dir to place extracted srcjars and ' + 'annotation processor output for codesearch to find.') + parser.add_argument('--classpath', action='append', help='Classpath to use.') + parser.add_argument( + '--chromium-code', + action='store_true', + help='Whether code being compiled should be built with stricter ' + 'warnings for chromium code.') + parser.add_argument('--gomacc-path', + help='When set, prefix kotlinc command with gomacc') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + parser.add_argument('--jar-path', help='Jar output path.', required=True) + parser.add_argument('--interface-jar-path', + help='Interface jar output path.', + required=True) + + args, extra_args = parser.parse_known_args(argv) + + args.classpath = action_helpers.parse_gn_list(args.classpath) + args.java_srcjars = action_helpers.parse_gn_list(args.java_srcjars) + + source_files = [] + for arg in extra_args: + # Interpret a path prefixed with @ as a file containing a list of sources. + if arg.startswith('@'): + source_files.extend(build_utils.ReadSourcesList(arg[1:])) + else: + assert not arg.startswith('--'), f'Undefined option {arg}' + source_files.append(arg) + + return args, source_files + + +def main(argv): + build_utils.InitLogging('KOTLINC_DEBUG') + argv = build_utils.ExpandFileArgs(argv) + args, source_files = _ParseOptions(argv) + + kotlinc_cmd = [] + if args.gomacc_path: + kotlinc_cmd.append(args.gomacc_path) + kotlinc_cmd.append(build_utils.KOTLINC_PATH) + + kotlinc_cmd += [ + '-no-jdk', # Avoid depending on the bundled JDK. + # Avoid depending on the bundled Kotlin stdlib. This may have a version + # skew with the one in //third_party/android_deps (which is the one we + # prefer to use). + '-no-stdlib', + # Avoid depending on the bundled Kotlin reflect libs. + '-no-reflect', + ] + + if args.generated_dir: + # Delete any stale files in the generated directory. The purpose of + # args.generated_dir is for codesearch. + shutil.rmtree(args.generated_dir, True) + + _RunCompiler(args, + kotlinc_cmd, + source_files, + args.jar_path, + intermediates_out_dir=args.generated_dir) + + if args.depfile: + # GN already knows of the source files, so avoid listing individual files + # in the depfile. + action_helpers.write_depfile(args.depfile, args.jar_path, args.classpath) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/compile_kt.pydeps b/android/gyp/compile_kt.pydeps new file mode 100644 index 000000000000..818bca802ed6 --- /dev/null +++ b/android/gyp/compile_kt.pydeps @@ -0,0 +1,33 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_kt.pydeps build/android/gyp/compile_kt.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py +compile_java.py +compile_kt.py +javac_output_processor.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py +util/md5_check.py +util/server_utils.py diff --git a/android/gyp/compile_resources.py b/android/gyp/compile_resources.py new file mode 100755 index 000000000000..3b1fe7300488 --- /dev/null +++ b/android/gyp/compile_resources.py @@ -0,0 +1,1014 @@ +#!/usr/bin/env python3 +# +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Compile Android resources into an intermediate APK. + +This can also generate an R.txt, and an .srcjar file containing the proper +final R.java class for all resource packages the APK depends on. + +This will crunch images with aapt2. +""" + +import argparse +import collections +import contextlib +import filecmp +import hashlib +import logging +import os +import pathlib +import re +import shutil +import subprocess +import sys +import textwrap +from xml.etree import ElementTree + +from util import build_utils +from util import diff_utils +from util import manifest_utils +from util import parallel +from util import protoresources +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +# Pngs that we shouldn't convert to webp. Please add rationale when updating. +_PNG_WEBP_EXCLUSION_PATTERN = re.compile('|'.join([ + # Crashes on Galaxy S5 running L (https://crbug.com/807059). + r'.*star_gray\.png', + # Android requires pngs for 9-patch images. + r'.*\.9\.png', + # Daydream requires pngs for icon files. + r'.*daydream_icon_.*\.png' +])) + + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from argparse.ArgumentParser.parse_args() + """ + parser = argparse.ArgumentParser(description=__doc__) + + input_opts = parser.add_argument_group('Input options') + output_opts = parser.add_argument_group('Output options') + + input_opts.add_argument('--include-resources', + action='append', + required=True, + help='Paths to arsc resource files used to link ' + 'against. Can be specified multiple times.') + input_opts.add_argument( + '--dependencies-res-zips', + default=[], + help='Resources zip archives from dependents. Required to ' + 'resolve @type/foo references into dependent libraries.') + input_opts.add_argument( + '--extra-res-packages', + help='Additional package names to generate R.java files for.') + input_opts.add_argument( + '--aapt2-path', required=True, help='Path to the Android aapt2 tool.') + input_opts.add_argument( + '--android-manifest', required=True, help='AndroidManifest.xml path.') + input_opts.add_argument( + '--r-java-root-package-name', + default='base', + help='Short package name for this target\'s root R java file (ex. ' + 'input of "base" would become gen.base_module). Defaults to "base".') + group = input_opts.add_mutually_exclusive_group() + group.add_argument( + '--shared-resources', + action='store_true', + help='Make all resources in R.java non-final and allow the resource IDs ' + 'to be reset to a different package index when the apk is loaded by ' + 'another application at runtime.') + group.add_argument( + '--app-as-shared-lib', + action='store_true', + help='Same as --shared-resources, but also ensures all resource IDs are ' + 'directly usable from the APK loaded as an application.') + input_opts.add_argument( + '--package-id', + type=int, + help='Decimal integer representing custom package ID for resources ' + '(instead of 127==0x7f). Cannot be used with --shared-resources.') + input_opts.add_argument( + '--package-name', + help='Package name that will be used to create R class.') + input_opts.add_argument( + '--rename-manifest-package', help='Package name to force AAPT to use.') + input_opts.add_argument( + '--arsc-package-name', + help='Package name to set in manifest of resources.arsc file. This is ' + 'only used for apks under test.') + input_opts.add_argument( + '--shared-resources-allowlist', + help='An R.txt file acting as a allowlist for resources that should be ' + 'non-final and have their package ID changed at runtime in R.java. ' + 'Implies and overrides --shared-resources.') + input_opts.add_argument( + '--shared-resources-allowlist-locales', + default='[]', + help='Optional GN-list of locales. If provided, all strings corresponding' + ' to this locale list will be kept in the final output for the ' + 'resources identified through --shared-resources-allowlist, even ' + 'if --locale-allowlist is being used.') + input_opts.add_argument( + '--use-resource-ids-path', + help='Use resource IDs generated by aapt --emit-ids.') + input_opts.add_argument( + '--debuggable', + action='store_true', + help='Whether to add android:debuggable="true".') + input_opts.add_argument('--version-code', help='Version code for apk.') + input_opts.add_argument('--version-name', help='Version name for apk.') + input_opts.add_argument( + '--min-sdk-version', required=True, help='android:minSdkVersion for APK.') + input_opts.add_argument( + '--target-sdk-version', + required=True, + help="android:targetSdkVersion for APK.") + input_opts.add_argument( + '--max-sdk-version', + help="android:maxSdkVersion expected in AndroidManifest.xml.") + input_opts.add_argument( + '--manifest-package', help='Package name of the AndroidManifest.xml.') + input_opts.add_argument( + '--locale-allowlist', + default='[]', + help='GN list of languages to include. All other language configs will ' + 'be stripped out. List may include a combination of Android locales ' + 'or Chrome locales.') + input_opts.add_argument( + '--resource-exclusion-regex', + default='', + help='File-based filter for resources (applied before compiling)') + input_opts.add_argument( + '--resource-exclusion-exceptions', + default='[]', + help='GN list of globs that say which files to include even ' + 'when --resource-exclusion-regex is set.') + input_opts.add_argument( + '--dependencies-res-zip-overlays', + help='GN list with subset of --dependencies-res-zips to use overlay ' + 'semantics for.') + input_opts.add_argument( + '--values-filter-rules', + help='GN list of source_glob:regex for filtering resources after they ' + 'are compiled. Use this to filter out entries within values/ files.') + input_opts.add_argument('--png-to-webp', action='store_true', + help='Convert png files to webp format.') + + input_opts.add_argument('--webp-binary', default='', + help='Path to the cwebp binary.') + input_opts.add_argument( + '--webp-cache-dir', help='The directory to store webp image cache.') + input_opts.add_argument( + '--is-bundle-module', + action='store_true', + help='Whether resources are being generated for a bundle module.') + input_opts.add_argument( + '--uses-split', + help='Value to set uses-split to in the AndroidManifest.xml.') + input_opts.add_argument( + '--verification-version-code-offset', + help='Subtract this from versionCode for expectation files') + input_opts.add_argument( + '--verification-library-version-offset', + help='Subtract this from static-library version for expectation files') + + action_helpers.add_depfile_arg(output_opts) + output_opts.add_argument('--arsc-path', help='Apk output for arsc format.') + output_opts.add_argument('--proto-path', help='Apk output for proto format.') + output_opts.add_argument( + '--info-path', help='Path to output info file for the partial apk.') + output_opts.add_argument( + '--srcjar-out', + help='Path to srcjar to contain generated R.java.') + output_opts.add_argument('--r-text-out', + help='Path to store the generated R.txt file.') + output_opts.add_argument( + '--proguard-file', help='Path to proguard.txt generated file.') + output_opts.add_argument( + '--proguard-file-main-dex', + help='Path to proguard.txt generated file for main dex.') + output_opts.add_argument( + '--emit-ids-out', help='Path to file produced by aapt2 --emit-ids.') + + diff_utils.AddCommandLineFlags(parser) + options = parser.parse_args(args) + + options.include_resources = action_helpers.parse_gn_list( + options.include_resources) + options.dependencies_res_zips = action_helpers.parse_gn_list( + options.dependencies_res_zips) + options.extra_res_packages = action_helpers.parse_gn_list( + options.extra_res_packages) + options.locale_allowlist = action_helpers.parse_gn_list( + options.locale_allowlist) + options.shared_resources_allowlist_locales = action_helpers.parse_gn_list( + options.shared_resources_allowlist_locales) + options.resource_exclusion_exceptions = action_helpers.parse_gn_list( + options.resource_exclusion_exceptions) + options.dependencies_res_zip_overlays = action_helpers.parse_gn_list( + options.dependencies_res_zip_overlays) + options.values_filter_rules = action_helpers.parse_gn_list( + options.values_filter_rules) + + if not options.arsc_path and not options.proto_path: + parser.error('One of --arsc-path or --proto-path is required.') + + if options.package_id and options.shared_resources: + parser.error('--package-id and --shared-resources are mutually exclusive') + + return options + + +def _IterFiles(root_dir): + for root, _, files in os.walk(root_dir): + for f in files: + yield os.path.join(root, f) + + +def _RenameLocaleResourceDirs(resource_dirs, path_info): + """Rename locale resource directories into standard names when necessary. + + This is necessary to deal with the fact that older Android releases only + support ISO 639-1 two-letter codes, and sometimes even obsolete versions + of them. + + In practice it means: + * 3-letter ISO 639-2 qualifiers are renamed under a corresponding + 2-letter one. E.g. for Filipino, strings under values-fil/ will be moved + to a new corresponding values-tl/ sub-directory. + + * Modern ISO 639-1 codes will be renamed to their obsolete variant + for Indonesian, Hebrew and Yiddish (e.g. 'values-in/ -> values-id/). + + * Norwegian macrolanguage strings will be renamed to Bokmal (main + Norway language). See http://crbug.com/920960. In practice this + means that 'values-no/ -> values-nb/' unless 'values-nb/' already + exists. + + * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1 + locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS'). + + Args: + resource_dirs: list of top-level resource directories. + """ + for resource_dir in resource_dirs: + ignore_dirs = {} + for path in _IterFiles(resource_dir): + locale = resource_utils.FindLocaleInStringResourceFilePath(path) + if not locale: + continue + cr_locale = resource_utils.ToChromiumLocaleName(locale) + if not cr_locale: + continue # Unsupported Android locale qualifier!? + locale2 = resource_utils.ToAndroidLocaleName(cr_locale) + if locale != locale2: + path2 = path.replace('/values-%s/' % locale, '/values-%s/' % locale2) + if path == path2: + raise Exception('Could not substitute locale %s for %s in %s' % + (locale, locale2, path)) + + # Ignore rather than rename when the destination resources config + # already exists. + # e.g. some libraries provide both values-nb/ and values-no/. + # e.g. material design provides: + # * res/values-rUS/values-rUS.xml + # * res/values-b+es+419/values-b+es+419.xml + config_dir = os.path.dirname(path2) + already_has_renamed_config = ignore_dirs.get(config_dir) + if already_has_renamed_config is None: + # Cache the result of the first time the directory is encountered + # since subsequent encounters will find the directory already exists + # (due to the rename). + already_has_renamed_config = os.path.exists(config_dir) + ignore_dirs[config_dir] = already_has_renamed_config + if already_has_renamed_config: + continue + + build_utils.MakeDirectory(os.path.dirname(path2)) + shutil.move(path, path2) + path_info.RegisterRename( + os.path.relpath(path, resource_dir), + os.path.relpath(path2, resource_dir)) + + +def _ToAndroidLocales(locale_allowlist): + """Converts the list of Chrome locales to Android config locale qualifiers. + + Args: + locale_allowlist: A list of Chromium locale names. + Returns: + A set of matching Android config locale qualifier names. + """ + ret = set() + for locale in locale_allowlist: + locale = resource_utils.ToAndroidLocaleName(locale) + if locale is None or ('-' in locale and '-r' not in locale): + raise Exception('Unsupported Chromium locale name: %s' % locale) + ret.add(locale) + # Always keep non-regional fall-backs. + language = locale.split('-')[0] + ret.add(language) + + return ret + + +def _MoveImagesToNonMdpiFolders(res_root, path_info): + """Move images from drawable-*-mdpi-* folders to drawable-* folders. + + Why? http://crbug.com/289843 + """ + for src_dir_name in os.listdir(res_root): + src_components = src_dir_name.split('-') + if src_components[0] != 'drawable' or 'mdpi' not in src_components: + continue + src_dir = os.path.join(res_root, src_dir_name) + if not os.path.isdir(src_dir): + continue + dst_components = [c for c in src_components if c != 'mdpi'] + assert dst_components != src_components + dst_dir_name = '-'.join(dst_components) + dst_dir = os.path.join(res_root, dst_dir_name) + build_utils.MakeDirectory(dst_dir) + for src_file_name in os.listdir(src_dir): + if not os.path.splitext(src_file_name)[1] in ('.png', '.webp', ''): + continue + src_file = os.path.join(src_dir, src_file_name) + dst_file = os.path.join(dst_dir, src_file_name) + assert not os.path.lexists(dst_file) + shutil.move(src_file, dst_file) + path_info.RegisterRename( + os.path.relpath(src_file, res_root), + os.path.relpath(dst_file, res_root)) + + +def _DeterminePlatformVersion(aapt2_path, jar_candidates): + def maybe_extract_version(j): + try: + return resource_utils.ExtractBinaryManifestValues(aapt2_path, j) + except build_utils.CalledProcessError: + return None + + def is_sdk_jar(jar_name): + if jar_name in ('android.jar', 'android_system.jar'): + return True + # Robolectric jar looks a bit different. + return 'android-all' in jar_name and 'robolectric' in jar_name + + android_sdk_jars = [ + j for j in jar_candidates if is_sdk_jar(os.path.basename(j)) + ] + extract_all = [maybe_extract_version(j) for j in android_sdk_jars] + extract_all = [x for x in extract_all if x] + if len(extract_all) == 0: + raise Exception( + 'Unable to find android SDK jar among candidates: %s' + % ', '.join(android_sdk_jars)) + if len(extract_all) > 1: + raise Exception( + 'Found multiple android SDK jars among candidates: %s' + % ', '.join(android_sdk_jars)) + platform_version_code, platform_version_name = extract_all.pop()[:2] + return platform_version_code, platform_version_name + + +def _FixManifest(options, temp_dir): + """Fix the APK's AndroidManifest.xml. + + This adds any missing namespaces for 'android' and 'tools', and + sets certains elements like 'platformBuildVersionCode' or + 'android:debuggable' depending on the content of |options|. + + Args: + options: The command-line arguments tuple. + temp_dir: A temporary directory where the fixed manifest will be written to. + Returns: + Tuple of: + * Manifest path within |temp_dir|. + * Original package_name. + * Manifest package name. + """ + doc, manifest_node, app_node = manifest_utils.ParseManifest( + options.android_manifest) + + # merge_manifest.py also sets package & . We may want to ensure + # manifest merger is always enabled and remove these command-line arguments. + manifest_utils.SetUsesSdk(manifest_node, options.target_sdk_version, + options.min_sdk_version, options.max_sdk_version) + orig_package = manifest_node.get('package') or options.manifest_package + fixed_package = (options.arsc_package_name or options.manifest_package + or orig_package) + manifest_node.set('package', fixed_package) + + platform_version_code, platform_version_name = _DeterminePlatformVersion( + options.aapt2_path, options.include_resources) + manifest_node.set('platformBuildVersionCode', platform_version_code) + manifest_node.set('platformBuildVersionName', platform_version_name) + if options.version_code: + manifest_utils.NamespacedSet(manifest_node, 'versionCode', + options.version_code) + if options.version_name: + manifest_utils.NamespacedSet(manifest_node, 'versionName', + options.version_name) + if options.debuggable: + manifest_utils.NamespacedSet(app_node, 'debuggable', 'true') + + if options.uses_split: + uses_split = ElementTree.SubElement(manifest_node, 'uses-split') + manifest_utils.NamespacedSet(uses_split, 'name', options.uses_split) + + # Make sure the min-sdk condition is not less than the min-sdk of the bundle. + for min_sdk_node in manifest_node.iter('{%s}min-sdk' % + manifest_utils.DIST_NAMESPACE): + dist_value = '{%s}value' % manifest_utils.DIST_NAMESPACE + if int(min_sdk_node.get(dist_value)) < int(options.min_sdk_version): + min_sdk_node.set(dist_value, options.min_sdk_version) + + debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml') + manifest_utils.SaveManifest(doc, debug_manifest_path) + return debug_manifest_path, orig_package, fixed_package + + +def _CreateKeepPredicate(resource_exclusion_regex, + resource_exclusion_exceptions): + """Return a predicate lambda to determine which resource files to keep. + + Args: + resource_exclusion_regex: A regular expression describing all resources + to exclude, except if they are mip-maps, or if they are listed + in |resource_exclusion_exceptions|. + resource_exclusion_exceptions: A list of glob patterns corresponding + to exceptions to the |resource_exclusion_regex|. + Returns: + A lambda that takes a path, and returns true if the corresponding file + must be kept. + """ + predicate = lambda path: os.path.basename(path)[0] != '.' + if resource_exclusion_regex == '': + # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. + return predicate + + # A simple predicate that only removes (returns False for) paths covered by + # the exclusion regex or listed as exceptions. + return lambda path: ( + not re.search(resource_exclusion_regex, path) or + build_utils.MatchesGlob(path, resource_exclusion_exceptions)) + + +def _ComputeSha1(path): + with open(path, 'rb') as f: + data = f.read() + return hashlib.sha1(data).hexdigest() + + +def _ConvertToWebPSingle(png_path, cwebp_binary, cwebp_version, webp_cache_dir): + sha1_hash = _ComputeSha1(png_path) + + # The set of arguments that will appear in the cache key. + quality_args = ['-m', '6', '-q', '100', '-lossless'] + + webp_cache_path = os.path.join( + webp_cache_dir, '{}-{}-{}'.format(sha1_hash, cwebp_version, + ''.join(quality_args))) + # No need to add .webp. Android can load images fine without them. + webp_path = os.path.splitext(png_path)[0] + + cache_hit = os.path.exists(webp_cache_path) + if cache_hit: + os.link(webp_cache_path, webp_path) + else: + # We place the generated webp image to webp_path, instead of in the + # webp_cache_dir to avoid concurrency issues. + args = [cwebp_binary, png_path, '-o', webp_path, '-quiet'] + quality_args + subprocess.check_call(args) + + try: + os.link(webp_path, webp_cache_path) + except OSError: + # Because of concurrent run, a webp image may already exists in + # webp_cache_path. + pass + + os.remove(png_path) + original_dir = os.path.dirname(os.path.dirname(png_path)) + rename_tuple = (os.path.relpath(png_path, original_dir), + os.path.relpath(webp_path, original_dir)) + return rename_tuple, cache_hit + + +def _ConvertToWebP(cwebp_binary, png_paths, path_info, webp_cache_dir): + cwebp_version = subprocess.check_output([cwebp_binary, '-version']).rstrip() + shard_args = [(f, ) for f in png_paths + if not _PNG_WEBP_EXCLUSION_PATTERN.match(f)] + + build_utils.MakeDirectory(webp_cache_dir) + results = parallel.BulkForkAndCall(_ConvertToWebPSingle, + shard_args, + cwebp_binary=cwebp_binary, + cwebp_version=cwebp_version, + webp_cache_dir=webp_cache_dir) + total_cache_hits = 0 + for rename_tuple, cache_hit in results: + path_info.RegisterRename(*rename_tuple) + total_cache_hits += int(cache_hit) + + logging.debug('png->webp cache: %d/%d', total_cache_hits, len(shard_args)) + + +def _RemoveImageExtensions(directory, path_info): + """Remove extensions from image files in the passed directory. + + This reduces binary size but does not affect android's ability to load the + images. + """ + for f in _IterFiles(directory): + if (f.endswith('.png') or f.endswith('.webp')) and not f.endswith('.9.png'): + path_with_extension = f + path_no_extension = os.path.splitext(path_with_extension)[0] + if path_no_extension != path_with_extension: + shutil.move(path_with_extension, path_no_extension) + path_info.RegisterRename( + os.path.relpath(path_with_extension, directory), + os.path.relpath(path_no_extension, directory)) + + +def _CompileSingleDep(index, dep_subdir, keep_predicate, aapt2_path, + partials_dir): + unique_name = '{}_{}'.format(index, os.path.basename(dep_subdir)) + partial_path = os.path.join(partials_dir, '{}.zip'.format(unique_name)) + + compile_command = [ + aapt2_path, + 'compile', + # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched. + # '--no-crunch', + '--dir', + dep_subdir, + '-o', + partial_path + ] + + # There are resources targeting API-versions lower than our minapi. For + # various reasons it's easier to let aapt2 ignore these than for us to + # remove them from our build (e.g. it's from a 3rd party library). + build_utils.CheckOutput( + compile_command, + stderr_filter=lambda output: build_utils.FilterLines( + output, r'ignoring configuration .* for (styleable|attribute)')) + + # Filtering these files is expensive, so only apply filters to the partials + # that have been explicitly targeted. + if keep_predicate: + logging.debug('Applying .arsc filtering to %s', dep_subdir) + protoresources.StripUnwantedResources(partial_path, keep_predicate) + return partial_path + + +def _CreateValuesKeepPredicate(exclusion_rules, dep_subdir): + patterns = [ + x[1] for x in exclusion_rules + if build_utils.MatchesGlob(dep_subdir, [x[0]]) + ] + if not patterns: + return None + + regexes = [re.compile(p) for p in patterns] + return lambda x: not any(r.search(x) for r in regexes) + + +def _CompileDeps(aapt2_path, dep_subdirs, dep_subdir_overlay_set, temp_dir, + exclusion_rules): + partials_dir = os.path.join(temp_dir, 'partials') + build_utils.MakeDirectory(partials_dir) + + job_params = [(i, dep_subdir, + _CreateValuesKeepPredicate(exclusion_rules, dep_subdir)) + for i, dep_subdir in enumerate(dep_subdirs)] + + # Filtering is slow, so ensure jobs with keep_predicate are started first. + job_params.sort(key=lambda x: not x[2]) + partials = list( + parallel.BulkForkAndCall(_CompileSingleDep, + job_params, + aapt2_path=aapt2_path, + partials_dir=partials_dir)) + + partials_cmd = list() + for i, partial in enumerate(partials): + dep_subdir = job_params[i][1] + if dep_subdir in dep_subdir_overlay_set: + partials_cmd += ['-R'] + partials_cmd += [partial] + return partials_cmd + + +def _CreateResourceInfoFile(path_info, info_path, dependencies_res_zips): + for zip_file in dependencies_res_zips: + zip_info_file_path = zip_file + '.info' + if os.path.exists(zip_info_file_path): + path_info.MergeInfoFile(zip_info_file_path) + path_info.Write(info_path) + + +def _RemoveUnwantedLocalizedStrings(dep_subdirs, options): + """Remove localized strings that should not go into the final output. + + Args: + dep_subdirs: List of resource dependency directories. + options: Command-line options namespace. + """ + # Collect locale and file paths from the existing subdirs. + # The following variable maps Android locale names to + # sets of corresponding xml file paths. + locale_to_files_map = collections.defaultdict(set) + for directory in dep_subdirs: + for f in _IterFiles(directory): + locale = resource_utils.FindLocaleInStringResourceFilePath(f) + if locale: + locale_to_files_map[locale].add(f) + + all_locales = set(locale_to_files_map) + + # Set A: wanted locales, either all of them or the + # list provided by --locale-allowlist. + wanted_locales = all_locales + if options.locale_allowlist: + wanted_locales = _ToAndroidLocales(options.locale_allowlist) + + # Set B: shared resources locales, which is either set A + # or the list provided by --shared-resources-allowlist-locales + shared_resources_locales = wanted_locales + shared_names_allowlist = set() + if options.shared_resources_allowlist_locales: + shared_names_allowlist = set( + resource_utils.GetRTxtStringResourceNames( + options.shared_resources_allowlist)) + + shared_resources_locales = _ToAndroidLocales( + options.shared_resources_allowlist_locales) + + # Remove any file that belongs to a locale not covered by + # either A or B. + removable_locales = (all_locales - wanted_locales - shared_resources_locales) + for locale in removable_locales: + for path in locale_to_files_map[locale]: + os.remove(path) + + # For any locale in B but not in A, only keep the shared + # resource strings in each file. + for locale in shared_resources_locales - wanted_locales: + for path in locale_to_files_map[locale]: + resource_utils.FilterAndroidResourceStringsXml( + path, lambda x: x in shared_names_allowlist) + + # For any locale in A but not in B, only keep the strings + # that are _not_ from shared resources in the file. + for locale in wanted_locales - shared_resources_locales: + for path in locale_to_files_map[locale]: + resource_utils.FilterAndroidResourceStringsXml( + path, lambda x: x not in shared_names_allowlist) + + +def _FilterResourceFiles(dep_subdirs, keep_predicate): + # Create a function that selects which resource files should be packaged + # into the final output. Any file that does not pass the predicate will + # be removed below. + png_paths = [] + for directory in dep_subdirs: + for f in _IterFiles(directory): + if not keep_predicate(f): + os.remove(f) + elif f.endswith('.png'): + png_paths.append(f) + + return png_paths + + +def _PackageApk(options, build): + """Compile and link resources with aapt2. + + Args: + options: The command-line options. + build: BuildContext object. + Returns: + The manifest package name for the APK. + """ + logging.debug('Extracting resource .zips') + dep_subdirs = [] + dep_subdir_overlay_set = set() + for dependency_res_zip in options.dependencies_res_zips: + extracted_dep_subdirs = resource_utils.ExtractDeps([dependency_res_zip], + build.deps_dir) + dep_subdirs += extracted_dep_subdirs + if dependency_res_zip in options.dependencies_res_zip_overlays: + dep_subdir_overlay_set.update(extracted_dep_subdirs) + + logging.debug('Applying locale transformations') + path_info = resource_utils.ResourceInfoFile() + _RenameLocaleResourceDirs(dep_subdirs, path_info) + + logging.debug('Applying file-based exclusions') + keep_predicate = _CreateKeepPredicate(options.resource_exclusion_regex, + options.resource_exclusion_exceptions) + png_paths = _FilterResourceFiles(dep_subdirs, keep_predicate) + + if options.locale_allowlist or options.shared_resources_allowlist_locales: + logging.debug('Applying locale-based string exclusions') + _RemoveUnwantedLocalizedStrings(dep_subdirs, options) + + if png_paths and options.png_to_webp: + logging.debug('Converting png->webp') + _ConvertToWebP(options.webp_binary, png_paths, path_info, + options.webp_cache_dir) + logging.debug('Applying drawable transformations') + for directory in dep_subdirs: + _MoveImagesToNonMdpiFolders(directory, path_info) + _RemoveImageExtensions(directory, path_info) + + logging.debug('Running aapt2 compile') + exclusion_rules = [x.split(':', 1) for x in options.values_filter_rules] + partials = _CompileDeps(options.aapt2_path, dep_subdirs, + dep_subdir_overlay_set, build.temp_dir, + exclusion_rules) + + link_command = [ + options.aapt2_path, + 'link', + '--auto-add-overlay', + '--no-version-vectors', + '--output-text-symbols', + build.r_txt_path, + ] + + for j in options.include_resources: + link_command += ['-I', j] + if options.proguard_file: + link_command += ['--proguard', build.proguard_path] + link_command += ['--proguard-minimal-keep-rules'] + if options.proguard_file_main_dex: + link_command += ['--proguard-main-dex', build.proguard_main_dex_path] + if options.emit_ids_out: + link_command += ['--emit-ids', build.emit_ids_path] + + # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib + # can be used with recent versions of aapt2. + if options.shared_resources: + link_command.append('--shared-lib') + + if int(options.min_sdk_version) > 21: + link_command.append('--no-xml-namespaces') + + if options.package_id: + link_command += [ + '--package-id', + '0x%02x' % options.package_id, + '--allow-reserved-package-id', + ] + + fixed_manifest, desired_manifest_package_name, fixed_manifest_package = ( + _FixManifest(options, build.temp_dir)) + if options.rename_manifest_package: + desired_manifest_package_name = options.rename_manifest_package + + link_command += [ + '--manifest', fixed_manifest, '--rename-manifest-package', + desired_manifest_package_name + ] + + if options.package_id is not None: + package_id = options.package_id + elif options.shared_resources: + package_id = 0 + else: + package_id = 0x7f + _CreateStableIdsFile(options.use_resource_ids_path, build.stable_ids_path, + fixed_manifest_package, package_id) + link_command += ['--stable-ids', build.stable_ids_path] + + link_command += partials + + # We always create a binary arsc file first, then convert to proto, so flags + # such as --shared-lib can be supported. + link_command += ['-o', build.arsc_path] + + logging.debug('Starting: aapt2 link') + link_proc = subprocess.Popen(link_command) + + # Create .res.info file in parallel. + if options.info_path: + logging.debug('Creating .res.info file') + _CreateResourceInfoFile(path_info, build.info_path, + options.dependencies_res_zips) + + exit_code = link_proc.wait() + assert exit_code == 0, f'aapt2 link cmd failed with {exit_code=}' + logging.debug('Finished: aapt2 link') + + if options.shared_resources: + logging.debug('Resolving styleables in R.txt') + # Need to resolve references because unused resource removal tool does not + # support references in R.txt files. + resource_utils.ResolveStyleableReferences(build.r_txt_path) + + if exit_code: + raise subprocess.CalledProcessError(exit_code, link_command) + + if options.proguard_file and (options.shared_resources + or options.app_as_shared_lib): + # Make sure the R class associated with the manifest package does not have + # its onResourcesLoaded method obfuscated or removed, so that the framework + # can call it in the case where the APK is being loaded as a library. + with open(build.proguard_path, 'a') as proguard_file: + keep_rule = ''' + -keep,allowoptimization class {package}.R {{ + public static void onResourcesLoaded(int); + }} + '''.format(package=desired_manifest_package_name) + proguard_file.write(textwrap.dedent(keep_rule)) + + logging.debug('Running aapt2 convert') + build_utils.CheckOutput([ + options.aapt2_path, 'convert', '--output-format', 'proto', '-o', + build.proto_path, build.arsc_path + ]) + + # Workaround for b/147674078. This is only needed for WebLayer and does not + # affect WebView usage, since WebView does not used dynamic attributes. + if options.shared_resources: + logging.debug('Hardcoding dynamic attributes') + protoresources.HardcodeSharedLibraryDynamicAttributes( + build.proto_path, options.is_bundle_module, + options.shared_resources_allowlist) + + build_utils.CheckOutput([ + options.aapt2_path, 'convert', '--output-format', 'binary', '-o', + build.arsc_path, build.proto_path + ]) + + # Sanity check that the created resources have the expected package ID. + logging.debug('Performing sanity check') + _, actual_package_id = resource_utils.ExtractArscPackage( + options.aapt2_path, + build.arsc_path if options.arsc_path else build.proto_path) + # When there are no resources, ExtractArscPackage returns (None, None), in + # this case there is no need to check for matching package ID. + if actual_package_id is not None and actual_package_id != package_id: + raise Exception('Invalid package ID 0x%x (expected 0x%x)' % + (actual_package_id, package_id)) + + return desired_manifest_package_name + + +def _CreateStableIdsFile(in_path, out_path, package_name, package_id): + """Transforms a file generated by --emit-ids from another package. + + --stable-ids is generally meant to be used by different versions of the same + package. To make it work for other packages, we need to transform the package + name references to match the package that resources are being generated for. + """ + if in_path: + data = pathlib.Path(in_path).read_text() + else: + # Force IDs to use 0x01 for the type byte in order to ensure they are + # different from IDs generated by other apps. https://crbug.com/1293336 + data = 'pkg:id/fake_resource_id = 0x7f010000\n' + # Replace "pkg:" with correct package name. + data = re.sub(r'^.*?:', package_name + ':', data, flags=re.MULTILINE) + # Replace "0x7f" with correct package id. + data = re.sub(r'0x..', '0x%02x' % package_id, data) + pathlib.Path(out_path).write_text(data) + + +def _WriteOutputs(options, build): + possible_outputs = [ + (options.srcjar_out, build.srcjar_path), + (options.r_text_out, build.r_txt_path), + (options.arsc_path, build.arsc_path), + (options.proto_path, build.proto_path), + (options.proguard_file, build.proguard_path), + (options.proguard_file_main_dex, build.proguard_main_dex_path), + (options.emit_ids_out, build.emit_ids_path), + (options.info_path, build.info_path), + ] + + for final, temp in possible_outputs: + # Write file only if it's changed. + if final and not (os.path.exists(final) and filecmp.cmp(final, temp)): + shutil.move(temp, final) + + +def _CreateNormalizedManifestForVerification(options): + with build_utils.TempDir() as tempdir: + fixed_manifest, _, _ = _FixManifest(options, tempdir) + with open(fixed_manifest) as f: + return manifest_utils.NormalizeManifest( + f.read(), options.verification_version_code_offset, + options.verification_library_version_offset) + + +def main(args): + build_utils.InitLogging('RESOURCE_DEBUG') + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + if options.expected_file: + actual_data = _CreateNormalizedManifestForVerification(options) + diff_utils.CheckExpectations(actual_data, options) + if options.only_verify_expectations: + return + + path = options.arsc_path or options.proto_path + debug_temp_resources_dir = os.environ.get('TEMP_RESOURCES_DIR') + if debug_temp_resources_dir: + path = os.path.join(debug_temp_resources_dir, os.path.basename(path)) + else: + # Use a deterministic temp directory since .pb files embed the absolute + # path of resources: crbug.com/939984 + path = path + '.tmpdir' + build_utils.DeleteDirectory(path) + + with resource_utils.BuildContext( + temp_dir=path, keep_files=bool(debug_temp_resources_dir)) as build: + + manifest_package_name = _PackageApk(options, build) + + # If --shared-resources-allowlist is used, all the resources listed in the + # corresponding R.txt file will be non-final, and an onResourcesLoaded() + # will be generated to adjust them at runtime. + # + # Otherwise, if --shared-resources is used, the all resources will be + # non-final, and an onResourcesLoaded() method will be generated too. + # + # Otherwise, all resources will be final, and no method will be generated. + # + rjava_build_options = resource_utils.RJavaBuildOptions() + if options.shared_resources_allowlist: + rjava_build_options.ExportSomeResources( + options.shared_resources_allowlist) + rjava_build_options.GenerateOnResourcesLoaded() + if options.shared_resources: + # The final resources will only be used in WebLayer, so hardcode the + # package ID to be what WebLayer expects. + rjava_build_options.SetFinalPackageId( + protoresources.SHARED_LIBRARY_HARDCODED_ID) + elif options.shared_resources or options.app_as_shared_lib: + rjava_build_options.ExportAllResources() + rjava_build_options.GenerateOnResourcesLoaded() + + custom_root_package_name = options.r_java_root_package_name + grandparent_custom_package_name = None + + # Always generate an R.java file for the package listed in + # AndroidManifest.xml because this is where Android framework looks to find + # onResourcesLoaded() for shared library apks. While not actually necessary + # for application apks, it also doesn't hurt. + apk_package_name = manifest_package_name + + if options.package_name and not options.arsc_package_name: + # Feature modules have their own custom root package name and should + # inherit from the appropriate base module package. This behaviour should + # not be present for test apks with an apk under test. Thus, + # arsc_package_name is used as it is only defined for test apks with an + # apk under test. + custom_root_package_name = options.package_name + grandparent_custom_package_name = options.r_java_root_package_name + # Feature modules have the same manifest package as the base module but + # they should not create an R.java for said manifest package because it + # will be created in the base module. + apk_package_name = None + + if options.srcjar_out: + logging.debug('Creating R.srcjar') + resource_utils.CreateRJavaFiles(build.srcjar_dir, apk_package_name, + build.r_txt_path, + options.extra_res_packages, + rjava_build_options, options.srcjar_out, + custom_root_package_name, + grandparent_custom_package_name) + with action_helpers.atomic_output(build.srcjar_path) as f: + zip_helpers.zip_directory(f, build.srcjar_dir) + + logging.debug('Copying outputs') + _WriteOutputs(options, build) + + if options.depfile: + assert options.srcjar_out, 'Update first output below and remove assert.' + depfile_deps = (options.dependencies_res_zips + + options.dependencies_res_zip_overlays + + options.include_resources) + action_helpers.write_depfile(options.depfile, options.srcjar_out, + depfile_deps) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/compile_resources.pydeps b/android/gyp/compile_resources.pydeps new file mode 100644 index 000000000000..458a772c319f --- /dev/null +++ b/android/gyp/compile_resources.pydeps @@ -0,0 +1,38 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +compile_resources.py +proto/Configuration_pb2.py +proto/Resources_pb2.py +proto/__init__.py +util/__init__.py +util/build_utils.py +util/diff_utils.py +util/manifest_utils.py +util/parallel.py +util/protoresources.py +util/resource_utils.py diff --git a/android/gyp/copy_ex.py b/android/gyp/copy_ex.py new file mode 100755 index 000000000000..542a08ca1ba5 --- /dev/null +++ b/android/gyp/copy_ex.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copies files to a directory.""" + + +import filecmp +import itertools +import optparse +import os +import shutil +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _get_all_files(base): + """Returns a list of all the files in |base|. Each entry is relative to the + last path entry of |base|.""" + result = [] + dirname = os.path.dirname(base) + for root, _, files in os.walk(base): + result.extend([os.path.join(root[len(dirname):], f) for f in files]) + return result + +def CopyFile(f, dest, deps): + """Copy file or directory and update deps.""" + if os.path.isdir(f): + shutil.copytree(f, os.path.join(dest, os.path.basename(f))) + deps.extend(_get_all_files(f)) + else: + if os.path.isfile(os.path.join(dest, os.path.basename(f))): + dest = os.path.join(dest, os.path.basename(f)) + + deps.append(f) + + if os.path.isfile(dest): + if filecmp.cmp(dest, f, shallow=False): + return + # The shutil.copy() below would fail if the file does not have write + # permissions. Deleting the file has similar costs to modifying the + # permissions. + os.unlink(dest) + + shutil.copy(f, dest) + +def DoCopy(options, deps): + """Copy files or directories given in options.files and update deps.""" + files = list( + itertools.chain.from_iterable( + action_helpers.parse_gn_list(f) for f in options.files)) + + for f in files: + if os.path.isdir(f) and not options.clear: + print('To avoid stale files you must use --clear when copying ' + 'directories') + sys.exit(-1) + CopyFile(f, options.dest, deps) + +def DoRenaming(options, deps): + """Copy and rename files given in options.renaming_sources and update deps.""" + src_files = list( + itertools.chain.from_iterable( + action_helpers.parse_gn_list(f) for f in options.renaming_sources)) + + dest_files = list( + itertools.chain.from_iterable( + action_helpers.parse_gn_list(f) + for f in options.renaming_destinations)) + + if (len(src_files) != len(dest_files)): + print('Renaming source and destination files not match.') + sys.exit(-1) + + for src, dest in zip(src_files, dest_files): + if os.path.isdir(src): + print('renaming diretory is not supported.') + sys.exit(-1) + else: + CopyFile(src, os.path.join(options.dest, dest), deps) + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + action_helpers.add_depfile_arg(parser) + + parser.add_option('--dest', help='Directory to copy files to.') + parser.add_option('--files', action='append', + help='List of files to copy.') + parser.add_option('--clear', action='store_true', + help='If set, the destination directory will be deleted ' + 'before copying files to it. This is highly recommended to ' + 'ensure that no stale files are left in the directory.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--renaming-sources', + action='append', + help='List of files need to be renamed while being ' + 'copied to dest directory') + parser.add_option('--renaming-destinations', + action='append', + help='List of destination file name without path, the ' + 'number of elements must match rename-sources.') + + options, _ = parser.parse_args(args) + + if options.clear: + build_utils.DeleteDirectory(options.dest) + build_utils.MakeDirectory(options.dest) + + deps = [] + + if options.files: + DoCopy(options, deps) + + if options.renaming_sources: + DoRenaming(options, deps) + + if options.depfile: + action_helpers.write_depfile(options.depfile, options.stamp, deps) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/copy_ex.pydeps b/android/gyp/copy_ex.pydeps new file mode 100644 index 000000000000..5d75f9a3965b --- /dev/null +++ b/android/gyp/copy_ex.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py +../../action_helpers.py +../../gn_helpers.py +copy_ex.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/create_apk_operations_script.py b/android/gyp/create_apk_operations_script.py new file mode 100755 index 000000000000..1d1cb5d1ab72 --- /dev/null +++ b/android/gyp/create_apk_operations_script.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import os +import string +import sys + +from util import build_utils + +SCRIPT_TEMPLATE = string.Template("""\ +#!/usr/bin/env python3 +# +# This file was generated by build/android/gyp/create_apk_operations_script.py + +import os +import sys + +def main(): + script_directory = os.path.dirname(__file__) + resolve = lambda p: p if p is None else os.path.abspath(os.path.join( + script_directory, p)) + sys.path.append(resolve(${APK_OPERATIONS_DIR})) + import apk_operations + output_dir = resolve(${OUTPUT_DIR}) + apk_operations.Run( + output_dir, + resolve(${APK_PATH}), + [resolve(p) for p in ${ADDITIONAL_APK_PATHS}], + resolve(${INC_JSON_PATH}), + ${FLAGS_FILE}, + ${TARGET_CPU}, + resolve(${MAPPING_PATH})) + + +if __name__ == '__main__': + sys.exit(main()) +""") + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', + help='Output path for executable script.') + parser.add_argument('--apk-path') + parser.add_argument('--incremental-install-json-path') + parser.add_argument('--command-line-flags-file') + parser.add_argument('--target-cpu') + parser.add_argument( + '--additional-apk-path', + action='append', + dest='additional_apk_paths', + default=[], + help='Paths to APKs to be installed prior to --apk-path.') + parser.add_argument('--proguard-mapping-path') + args = parser.parse_args(args) + + def relativize(path): + """Returns the path relative to the output script directory.""" + if path is None: + return path + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + apk_operations_dir = os.path.join(os.path.dirname(__file__), os.path.pardir) + apk_operations_dir = relativize(apk_operations_dir) + + with open(args.script_output_path, 'w') as script: + script_dict = { + 'APK_OPERATIONS_DIR': repr(apk_operations_dir), + 'OUTPUT_DIR': repr(relativize('.')), + 'APK_PATH': repr(relativize(args.apk_path)), + 'ADDITIONAL_APK_PATHS': + [relativize(p) for p in args.additional_apk_paths], + 'INC_JSON_PATH': repr(relativize(args.incremental_install_json_path)), + 'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)), + 'FLAGS_FILE': repr(args.command_line_flags_file), + 'TARGET_CPU': repr(args.target_cpu), + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) + os.chmod(args.script_output_path, 0o750) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/create_apk_operations_script.pydeps b/android/gyp/create_apk_operations_script.pydeps new file mode 100644 index 000000000000..e09bb7244c35 --- /dev/null +++ b/android/gyp/create_apk_operations_script.pydeps @@ -0,0 +1,6 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_apk_operations_script.pydeps build/android/gyp/create_apk_operations_script.py +../../gn_helpers.py +create_apk_operations_script.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/create_app_bundle.py b/android/gyp/create_app_bundle.py new file mode 100755 index 000000000000..128260868a16 --- /dev/null +++ b/android/gyp/create_app_bundle.py @@ -0,0 +1,605 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Create an Android application bundle from one or more bundle modules.""" + +import argparse +import concurrent.futures +import json +import logging +import os +import posixpath +import shutil +import sys +from xml.etree import ElementTree +import zipfile + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from pylib.utils import dexdump + +import bundletool +from util import build_utils +from util import manifest_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +# Location of language-based assets in bundle modules. +_LOCALES_SUBDIR = 'assets/locales/' + +# The fallback locale should always have its .pak file included in +# the base apk, i.e. not use language-based asset targetting. This ensures +# that Chrome won't crash on startup if its bundle is installed on a device +# with an unsupported system locale (e.g. fur-rIT). +_FALLBACK_LOCALE = 'en-US' + +# List of split dimensions recognized by this tool. +_ALL_SPLIT_DIMENSIONS = [ 'ABI', 'SCREEN_DENSITY', 'LANGUAGE' ] + +# Due to historical reasons, certain languages identified by Chromium with a +# 3-letters ISO 639-2 code, are mapped to a nearly equivalent 2-letters +# ISO 639-1 code instead (due to the fact that older Android releases only +# supported the latter when matching resources). +# +# the same conversion as for Java resources. +_SHORTEN_LANGUAGE_CODE_MAP = { + 'fil': 'tl', # Filipino to Tagalog. +} + +# A list of extensions corresponding to files that should never be compressed +# in the bundle. This used to be handled by bundletool automatically until +# release 0.8.0, which required that this be passed to the BundleConfig +# file instead. +# +# This is the original list, which was taken from aapt2, with 'webp' added to +# it (which curiously was missing from the list). +_UNCOMPRESSED_FILE_EXTS = [ + '3g2', '3gp', '3gpp', '3gpp2', 'aac', 'amr', 'awb', 'git', 'imy', 'jet', + 'jpeg', 'jpg', 'm4a', 'm4v', 'mid', 'midi', 'mkv', 'mp2', 'mp3', 'mp4', + 'mpeg', 'mpg', 'ogg', 'png', 'rtttl', 'smf', 'wav', 'webm', 'webp', 'wmv', + 'xmf' +] + +_COMPONENT_TYPES = ('activity', 'provider', 'receiver', 'service') +_DEDUPE_ENTRY_TYPES = _COMPONENT_TYPES + ('activity-alias', 'meta-data') + +_ROTATION_METADATA_KEY = 'com.google.play.apps.signing/RotationConfig.textproto' + + +def _ParseArgs(args): + parser = argparse.ArgumentParser() + parser.add_argument('--out-bundle', required=True, + help='Output bundle zip archive.') + parser.add_argument('--module-zips', required=True, + help='GN-list of module zip archives.') + parser.add_argument( + '--pathmap-in-paths', + action='append', + help='List of module pathmap files.') + parser.add_argument( + '--module-name', + action='append', + dest='module_names', + help='List of module names.') + parser.add_argument( + '--pathmap-out-path', help='Path to combined pathmap file for bundle.') + parser.add_argument( + '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.') + parser.add_argument( + '--rtxt-out-path', help='Path to combined R.txt file for bundle.') + parser.add_argument('--uncompressed-assets', action='append', + help='GN-list of uncompressed assets.') + parser.add_argument( + '--compress-shared-libraries', + action='store_true', + help='Whether to store native libraries compressed.') + parser.add_argument('--compress-dex', + action='store_true', + help='Compress .dex files') + parser.add_argument('--split-dimensions', + help="GN-list of split dimensions to support.") + parser.add_argument( + '--base-module-rtxt-path', + help='Optional path to the base module\'s R.txt file, only used with ' + 'language split dimension.') + parser.add_argument( + '--base-allowlist-rtxt-path', + help='Optional path to an R.txt file, string resources ' + 'listed there _and_ in --base-module-rtxt-path will ' + 'be kept in the base bundle module, even if language' + ' splitting is enabled.') + parser.add_argument('--rotation-config', + help='Path to a RotationConfig.textproto') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + + parser.add_argument( + '--validate-services', + action='store_true', + help='Check if services are in base module if isolatedSplits is enabled.') + + options = parser.parse_args(args) + options.module_zips = action_helpers.parse_gn_list(options.module_zips) + + if len(options.module_zips) == 0: + parser.error('The module zip list cannot be empty.') + if len(options.module_zips) != len(options.module_names): + parser.error('# module zips != # names.') + if 'base' not in options.module_names: + parser.error('Missing base module.') + + # Sort modules for more stable outputs. + per_module_values = list( + zip(options.module_names, options.module_zips, + options.uncompressed_assets, options.rtxt_in_paths, + options.pathmap_in_paths)) + per_module_values.sort(key=lambda x: (x[0] != 'base', x[0])) + options.module_names = [x[0] for x in per_module_values] + options.module_zips = [x[1] for x in per_module_values] + options.uncompressed_assets = [x[2] for x in per_module_values] + options.rtxt_in_paths = [x[3] for x in per_module_values] + options.pathmap_in_paths = [x[4] for x in per_module_values] + + options.rtxt_in_paths = action_helpers.parse_gn_list(options.rtxt_in_paths) + options.pathmap_in_paths = action_helpers.parse_gn_list( + options.pathmap_in_paths) + + # Merge all uncompressed assets into a set. + uncompressed_list = [] + for entry in action_helpers.parse_gn_list(options.uncompressed_assets): + # Each entry has the following format: 'zipPath' or 'srcPath:zipPath' + pos = entry.find(':') + if pos >= 0: + uncompressed_list.append(entry[pos + 1:]) + else: + uncompressed_list.append(entry) + + options.uncompressed_assets = set(uncompressed_list) + + # Check that all split dimensions are valid + if options.split_dimensions: + options.split_dimensions = action_helpers.parse_gn_list( + options.split_dimensions) + for dim in options.split_dimensions: + if dim.upper() not in _ALL_SPLIT_DIMENSIONS: + parser.error('Invalid split dimension "%s" (expected one of: %s)' % ( + dim, ', '.join(x.lower() for x in _ALL_SPLIT_DIMENSIONS))) + + # As a special case, --base-allowlist-rtxt-path can be empty to indicate + # that the module doesn't need such a allowlist. That's because it is easier + # to check this condition here than through GN rules :-( + if options.base_allowlist_rtxt_path == '': + options.base_module_rtxt_path = None + + # Check --base-module-rtxt-path and --base-allowlist-rtxt-path usage. + if options.base_module_rtxt_path: + if not options.base_allowlist_rtxt_path: + parser.error( + '--base-module-rtxt-path requires --base-allowlist-rtxt-path') + if 'language' not in options.split_dimensions: + parser.error('--base-module-rtxt-path is only valid with ' + 'language-based splits.') + + return options + + +def _MakeSplitDimension(value, enabled): + """Return dict modelling a BundleConfig splitDimension entry.""" + return {'value': value, 'negate': not enabled} + + +def _GenerateBundleConfigJson(uncompressed_assets, compress_dex, + compress_shared_libraries, split_dimensions, + base_master_resource_ids): + """Generate a dictionary that can be written to a JSON BuildConfig. + + Args: + uncompressed_assets: A list or set of file paths under assets/ that always + be stored uncompressed. + compressed_dex: Boolean, whether to compress .dex. + compress_shared_libraries: Boolean, whether to compress native libs. + split_dimensions: list of split dimensions. + base_master_resource_ids: Optional list of 32-bit resource IDs to keep + inside the base module, even when split dimensions are enabled. + Returns: + A dictionary that can be written as a json file. + """ + # Compute splitsConfig list. Each item is a dictionary that can have + # the following keys: + # 'value': One of ['LANGUAGE', 'DENSITY', 'ABI'] + # 'negate': Boolean, True to indicate that the bundle should *not* be + # split (unused at the moment by this script). + + split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions) + for dim in _ALL_SPLIT_DIMENSIONS ] + + # Locale-specific pak files stored in bundle splits need not be compressed. + uncompressed_globs = [ + 'assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak' + ] + # normpath to allow for ../ prefix. + uncompressed_globs.extend( + posixpath.normpath('assets/' + x) for x in uncompressed_assets) + # NOTE: Use '**' instead of '*' to work through directories! + uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS) + if not compress_dex: + # Explicit glob required only when using bundletool to create .apks files. + # Play Store looks for and respects "uncompressDexFiles" set below. + # b/176198991 + # This is added as a placeholder entry in order to have no effect unless + # processed with app_bundle_utils.GenerateBundleApks(). + uncompressed_globs.append('classesX.dex') + + data = { + 'optimizations': { + 'splitsConfig': { + 'splitDimension': split_dimensions, + }, + 'uncompressNativeLibraries': { + 'enabled': not compress_shared_libraries, + }, + 'uncompressDexFiles': { + 'enabled': True, # Applies only for P+. + } + }, + 'compression': { + 'uncompressedGlob': sorted(uncompressed_globs), + }, + } + + if base_master_resource_ids: + data['master_resources'] = { + 'resource_ids': list(base_master_resource_ids), + } + + return json.dumps(data, indent=2) + + +def _RewriteLanguageAssetPath(src_path): + """Rewrite the destination path of a locale asset for language-based splits. + + Should only be used when generating bundles with language-based splits. + This will rewrite paths that look like locales/.pak into + locales#/.pak, where is the language code + from the locale. + + Returns new path. + """ + if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'): + return [src_path] + + locale = src_path[len(_LOCALES_SUBDIR):-4] + android_locale = resource_utils.ToAndroidLocaleName(locale) + + # The locale format is - or or BCP-47 (e.g b+sr+Latn). + # Extract the language. + pos = android_locale.find('-') + if android_locale.startswith('b+'): + # If locale is in BCP-47 the language is the second tag (e.g. b+sr+Latn) + android_language = android_locale.split('+')[1] + elif pos >= 0: + android_language = android_locale[:pos] + else: + android_language = android_locale + + if locale == _FALLBACK_LOCALE: + # Fallback locale .pak files must be placed in a different directory + # to ensure they are always stored in the base module. + result_path = 'assets/fallback-locales/%s.pak' % locale + else: + # Other language .pak files go into a language-specific asset directory + # that bundletool will store in separate split APKs. + result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale) + + return result_path + + +def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions): + """Splits assets in a module if needed. + + Args: + src_module_zip: input zip module path. + tmp_dir: Path to temporary directory, where the new output module might + be written to. + split_dimensions: list of split dimensions. + + Returns: + If the module doesn't need asset targeting, doesn't do anything and + returns src_module_zip. Otherwise, create a new module zip archive under + tmp_dir with the same file name, but which contains assets paths targeting + the proper dimensions. + """ + split_language = 'LANGUAGE' in split_dimensions + if not split_language: + # Nothing to target, so return original module path. + return src_module_zip + + with zipfile.ZipFile(src_module_zip, 'r') as src_zip: + language_files = [ + f for f in src_zip.namelist() if f.startswith(_LOCALES_SUBDIR)] + + if not language_files: + # Not language-based assets to split in this module. + return src_module_zip + + tmp_zip = os.path.join(tmp_dir, os.path.basename(src_module_zip)) + with zipfile.ZipFile(tmp_zip, 'w') as dst_zip: + for info in src_zip.infolist(): + src_path = info.filename + is_compressed = info.compress_type != zipfile.ZIP_STORED + + dst_path = src_path + if src_path in language_files: + dst_path = _RewriteLanguageAssetPath(src_path) + + zip_helpers.add_to_zip_hermetic(dst_zip, + dst_path, + data=src_zip.read(src_path), + compress=is_compressed) + + return tmp_zip + + +def _GenerateBaseResourcesAllowList(base_module_rtxt_path, + base_allowlist_rtxt_path): + """Generate a allowlist of base master resource ids. + + Args: + base_module_rtxt_path: Path to base module R.txt file. + base_allowlist_rtxt_path: Path to base allowlist R.txt file. + Returns: + list of resource ids. + """ + ids_map = resource_utils.GenerateStringResourcesAllowList( + base_module_rtxt_path, base_allowlist_rtxt_path) + return ids_map.keys() + + +def _ConcatTextFiles(in_paths, out_path): + """Concatenate the contents of multiple text files into one. + + The each file contents is preceded by a line containing the original filename. + + Args: + in_paths: List of input file paths. + out_path: Path to output file. + """ + with open(out_path, 'w') as out_file: + for in_path in in_paths: + if not os.path.exists(in_path): + continue + with open(in_path, 'r') as in_file: + out_file.write('-- Contents of {}\n'.format(os.path.basename(in_path))) + out_file.write(in_file.read()) + + +def _LoadPathmap(pathmap_path): + """Load the pathmap of obfuscated resource paths. + + Returns: A dict mapping from obfuscated paths to original paths or an + empty dict if passed a None |pathmap_path|. + """ + if pathmap_path is None: + return {} + + pathmap = {} + with open(pathmap_path, 'r') as f: + for line in f: + line = line.strip() + if line.startswith('--') or line == '': + continue + original, renamed = line.split(' -> ') + pathmap[renamed] = original + return pathmap + + +def _WriteBundlePathmap(module_pathmap_paths, module_names, + bundle_pathmap_path): + """Combine the contents of module pathmaps into a bundle pathmap. + + This rebases the resource paths inside the module pathmap before adding them + to the bundle pathmap. So res/a.xml inside the base module pathmap would be + base/res/a.xml in the bundle pathmap. + """ + with open(bundle_pathmap_path, 'w') as bundle_pathmap_file: + for module_pathmap_path, module_name in zip(module_pathmap_paths, + module_names): + if not os.path.exists(module_pathmap_path): + continue + module_pathmap = _LoadPathmap(module_pathmap_path) + for short_path, long_path in module_pathmap.items(): + rebased_long_path = '{}/{}'.format(module_name, long_path) + rebased_short_path = '{}/{}'.format(module_name, short_path) + line = '{} -> {}\n'.format(rebased_long_path, rebased_short_path) + bundle_pathmap_file.write(line) + + +def _GetManifestForModule(bundle_path, module_name): + data = bundletool.RunBundleTool( + ['dump', 'manifest', '--bundle', bundle_path, '--module', module_name]) + try: + return ElementTree.fromstring(data) + except ElementTree.ParseError: + sys.stderr.write('Failed to parse:\n') + sys.stderr.write(data) + raise + + +def _GetComponentNames(manifest, tag_name): + android_name = '{%s}name' % manifest_utils.ANDROID_NAMESPACE + return [s.attrib.get(android_name) for s in manifest.iter(tag_name)] + + +def _ClassesFromZip(module_zip): + classes = set() + for package in dexdump.Dump(module_zip): + for java_package, package_dict in package.items(): + java_package += '.' if java_package else '' + classes.update(java_package + c for c in package_dict['classes']) + return classes + + +def _ValidateSplits(bundle_path, module_zips): + logging.info('Reading manifests and running dexdump') + base_zip = next(p for p in module_zips if os.path.basename(p) == 'base.zip') + module_names = sorted(os.path.basename(p)[:-len('.zip')] for p in module_zips) + # Using threads makes these step go from 7s -> 1s on my machine. + with concurrent.futures.ThreadPoolExecutor() as executor: + # Create list of classes from the base module's dex. + classes_future = executor.submit(_ClassesFromZip, base_zip) + + # Create xmltrees of all module manifests. + manifest_futures = [ + executor.submit(_GetManifestForModule, bundle_path, n) + for n in module_names + ] + manifests_by_name = dict( + zip(module_names, (f.result() for f in manifest_futures))) + base_classes = classes_future.result() + + # Collect service names from all split manifests. + logging.info('Performing checks') + errors = [] + + # Ensure there are no components defined in multiple splits. + splits_by_component = {} + for module_name, cur_manifest in manifests_by_name.items(): + for kind in _DEDUPE_ENTRY_TYPES: + for component in _GetComponentNames(cur_manifest, kind): + owner_module_name = splits_by_component.setdefault((kind, component), + module_name) + # Allow services that exist only to keep out of + # ApplicationInfo. + if (owner_module_name != module_name + and not component.endswith('HolderService')): + errors.append(f'The {kind} "{component}" appeared in both ' + f'{owner_module_name} and {module_name}.') + + # Ensure components defined in base manifest exist in base dex. + for (kind, component), module_name in splits_by_component.items(): + if module_name == 'base' and kind in _COMPONENT_TYPES: + if component not in base_classes: + errors.append(f"{component} is defined in the base manfiest, " + f"but the class does not exist in the base splits' dex") + + # Remaining checks apply only when isolatedSplits="true". + isolated_splits = manifests_by_name['base'].get( + f'{manifest_utils.ANDROID_NAMESPACE}isolatedSplits') + if isolated_splits != 'true': + return errors + + # Ensure all providers are present in base module. We enforce this because + # providers are loaded early in startup, and keeping them in the base module + # gives more time for the chrome split to load. + for module_name, cur_manifest in manifests_by_name.items(): + if module_name == 'base': + continue + provider_names = _GetComponentNames(cur_manifest, 'provider') + if provider_names: + errors.append('Providers should all be declared in the base manifest.' + ' "%s" module declared: %s' % (module_name, provider_names)) + + # Ensure all services are present in base module because service classes are + # not found if they are not present in the base module. b/169196314 + # It is fine if they are defined in split manifests though. + for cur_manifest in manifests_by_name.values(): + for service_name in _GetComponentNames(cur_manifest, 'service'): + if service_name not in base_classes: + errors.append("Service %s should be present in the base module's dex." + " See b/169196314 for more details." % service_name) + + return errors + + +def main(args): + build_utils.InitLogging('AAB_DEBUG') + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + split_dimensions = [] + if options.split_dimensions: + split_dimensions = [x.upper() for x in options.split_dimensions] + + + with build_utils.TempDir() as tmp_dir: + logging.info('Splitting locale assets') + module_zips = [ + _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \ + for module in options.module_zips] + + base_master_resource_ids = None + if options.base_module_rtxt_path: + logging.info('Creating R.txt allowlist') + base_master_resource_ids = _GenerateBaseResourcesAllowList( + options.base_module_rtxt_path, options.base_allowlist_rtxt_path) + + logging.info('Creating BundleConfig.pb.json') + bundle_config = _GenerateBundleConfigJson(options.uncompressed_assets, + options.compress_dex, + options.compress_shared_libraries, + split_dimensions, + base_master_resource_ids) + + tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle') + + # Important: bundletool requires that the bundle config file is + # named with a .pb.json extension. + tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json' + + with open(tmp_bundle_config, 'w') as f: + f.write(bundle_config) + + logging.info('Running bundletool') + cmd_args = build_utils.JavaCmd() + [ + '-jar', + bundletool.BUNDLETOOL_JAR_PATH, + 'build-bundle', + '--modules=' + ','.join(module_zips), + '--output=' + tmp_bundle, + '--config=' + tmp_bundle_config, + ] + + if options.rotation_config: + cmd_args += [ + f'--metadata-file={_ROTATION_METADATA_KEY}:{options.rotation_config}' + ] + + build_utils.CheckOutput( + cmd_args, + print_stdout=True, + print_stderr=True, + stderr_filter=build_utils.FilterReflectiveAccessJavaWarnings, + fail_on_output=options.warnings_as_errors) + + if options.validate_services: + # TODO(crbug.com/1126301): This step takes 0.4s locally for bundles with + # isolated splits disabled and 2s for bundles with isolated splits + # enabled. Consider making this run in parallel or move into a separate + # step before enabling isolated splits by default. + logging.info('Validating isolated split manifests') + errors = _ValidateSplits(tmp_bundle, module_zips) + if errors: + sys.stderr.write('Bundle failed sanity checks:\n ') + sys.stderr.write('\n '.join(errors)) + sys.stderr.write('\n') + sys.exit(1) + + logging.info('Writing final output artifacts') + shutil.move(tmp_bundle, options.out_bundle) + + if options.rtxt_out_path: + _ConcatTextFiles(options.rtxt_in_paths, options.rtxt_out_path) + + if options.pathmap_out_path: + _WriteBundlePathmap(options.pathmap_in_paths, options.module_names, + options.pathmap_out_path) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/create_app_bundle.pydeps b/android/gyp/create_app_bundle.pydeps new file mode 100644 index 000000000000..5e7a79f6387f --- /dev/null +++ b/android/gyp/create_app_bundle.pydeps @@ -0,0 +1,49 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle.pydeps build/android/gyp/create_app_bundle.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/base_error.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/catapult/devil/devil/utils/__init__.py +../../../third_party/catapult/devil/devil/utils/cmd_helper.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +../pylib/__init__.py +../pylib/constants/__init__.py +../pylib/utils/__init__.py +../pylib/utils/dexdump.py +bundletool.py +create_app_bundle.py +util/__init__.py +util/build_utils.py +util/manifest_utils.py +util/resource_utils.py diff --git a/android/gyp/create_app_bundle_apks.py b/android/gyp/create_app_bundle_apks.py new file mode 100755 index 000000000000..2f0dc51d9500 --- /dev/null +++ b/android/gyp/create_app_bundle_apks.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 +# +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates an .apks from an .aab.""" + +import argparse +import os +import sys + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from pylib.utils import app_bundle_utils + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + '--bundle', required=True, help='Path to input .aab file.') + parser.add_argument( + '--output', required=True, help='Path to output .apks file.') + parser.add_argument('--aapt2-path', required=True, help='Path to aapt2.') + parser.add_argument( + '--keystore-path', required=True, help='Path to keystore.') + parser.add_argument( + '--keystore-password', required=True, help='Keystore password.') + parser.add_argument( + '--keystore-name', required=True, help='Key name within keystore') + parser.add_argument( + '--minimal', + action='store_true', + help='Create APKs archive with minimal language support.') + parser.add_argument('--local-testing', + action='store_true', + help='Create APKs archive with local testing support.') + + args = parser.parse_args() + + app_bundle_utils.GenerateBundleApks(args.bundle, + args.output, + args.aapt2_path, + args.keystore_path, + args.keystore_password, + args.keystore_name, + local_testing=args.local_testing, + minimal=args.minimal, + check_for_noop=False) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/create_app_bundle_apks.pydeps b/android/gyp/create_app_bundle_apks.pydeps new file mode 100644 index 000000000000..65810c3eb0c1 --- /dev/null +++ b/android/gyp/create_app_bundle_apks.pydeps @@ -0,0 +1,36 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_apks.pydeps build/android/gyp/create_app_bundle_apks.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../pylib/__init__.py +../pylib/utils/__init__.py +../pylib/utils/app_bundle_utils.py +bundletool.py +create_app_bundle_apks.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/resource_utils.py diff --git a/android/gyp/create_bundle_wrapper_script.py b/android/gyp/create_bundle_wrapper_script.py new file mode 100755 index 000000000000..a3870bf89512 --- /dev/null +++ b/android/gyp/create_bundle_wrapper_script.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Create a wrapper script to manage an Android App Bundle.""" + +import argparse +import os +import string +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + +SCRIPT_TEMPLATE = string.Template("""\ +#!/usr/bin/env python3 +# +# This file was generated by build/android/gyp/create_bundle_wrapper_script.py + +import os +import sys + +def main(): + script_directory = os.path.dirname(__file__) + resolve = lambda p: p if p is None else os.path.abspath(os.path.join( + script_directory, p)) + sys.path.append(resolve(${WRAPPED_SCRIPT_DIR})) + import apk_operations + + additional_apk_paths = [resolve(p) for p in ${ADDITIONAL_APK_PATHS}] + apk_operations.RunForBundle(output_directory=resolve(${OUTPUT_DIR}), + bundle_path=resolve(${BUNDLE_PATH}), + bundle_apks_path=resolve(${BUNDLE_APKS_PATH}), + additional_apk_paths=additional_apk_paths, + aapt2_path=resolve(${AAPT2_PATH}), + keystore_path=resolve(${KEYSTORE_PATH}), + keystore_password=${KEYSTORE_PASSWORD}, + keystore_alias=${KEY_NAME}, + package_name=${PACKAGE_NAME}, + command_line_flags_file=${FLAGS_FILE}, + proguard_mapping_path=resolve(${MAPPING_PATH}), + target_cpu=${TARGET_CPU}, + system_image_locales=${SYSTEM_IMAGE_LOCALES}, + default_modules=${DEFAULT_MODULES}) + +if __name__ == '__main__': + sys.exit(main()) +""") + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', required=True, + help='Output path for executable script.') + parser.add_argument('--bundle-path', required=True) + parser.add_argument('--bundle-apks-path', required=True) + parser.add_argument( + '--additional-apk-path', + action='append', + dest='additional_apk_paths', + default=[], + help='Paths to APKs to be installed prior to --apk-path.') + parser.add_argument('--package-name', required=True) + parser.add_argument('--aapt2-path', required=True) + parser.add_argument('--keystore-path', required=True) + parser.add_argument('--keystore-password', required=True) + parser.add_argument('--key-name', required=True) + parser.add_argument('--command-line-flags-file') + parser.add_argument('--proguard-mapping-path') + parser.add_argument('--target-cpu') + parser.add_argument('--system-image-locales') + parser.add_argument('--default-modules', nargs='*', default=[]) + args = parser.parse_args(args) + + def relativize(path): + """Returns the path relative to the output script directory.""" + if path is None: + return path + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + + wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir) + wrapped_script_dir = relativize(wrapped_script_dir) + with open(args.script_output_path, 'w') as script: + script_dict = { + 'WRAPPED_SCRIPT_DIR': + repr(wrapped_script_dir), + 'OUTPUT_DIR': + repr(relativize('.')), + 'BUNDLE_PATH': + repr(relativize(args.bundle_path)), + 'BUNDLE_APKS_PATH': + repr(relativize(args.bundle_apks_path)), + 'ADDITIONAL_APK_PATHS': + [relativize(p) for p in args.additional_apk_paths], + 'PACKAGE_NAME': + repr(args.package_name), + 'AAPT2_PATH': + repr(relativize(args.aapt2_path)), + 'KEYSTORE_PATH': + repr(relativize(args.keystore_path)), + 'KEYSTORE_PASSWORD': + repr(args.keystore_password), + 'KEY_NAME': + repr(args.key_name), + 'MAPPING_PATH': + repr(relativize(args.proguard_mapping_path)), + 'FLAGS_FILE': + repr(args.command_line_flags_file), + 'TARGET_CPU': + repr(args.target_cpu), + 'SYSTEM_IMAGE_LOCALES': + repr(action_helpers.parse_gn_list(args.system_image_locales)), + 'DEFAULT_MODULES': + repr(args.default_modules), + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) + os.chmod(args.script_output_path, 0o750) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/create_bundle_wrapper_script.pydeps b/android/gyp/create_bundle_wrapper_script.pydeps new file mode 100644 index 000000000000..51d912c837c3 --- /dev/null +++ b/android/gyp/create_bundle_wrapper_script.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py +../../action_helpers.py +../../gn_helpers.py +create_bundle_wrapper_script.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/create_java_binary_script.py b/android/gyp/create_java_binary_script.py new file mode 100755 index 000000000000..f9e665f4e3d0 --- /dev/null +++ b/android/gyp/create_java_binary_script.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a simple script to run a java "binary". + +This creates a script that sets up the java command line for running a java +jar. This includes correctly setting the classpath and the main class. +""" + +import argparse +import os +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + +# The java command must be executed in the current directory because there may +# be user-supplied paths in the args. The script receives the classpath relative +# to the directory that the script is written in and then, when run, must +# recalculate the paths relative to the current directory. +script_template = """\ +#!/usr/bin/env python3 +# +# This file was generated by build/android/gyp/create_java_binary_script.py + +import argparse +import os +import sys + +self_dir = os.path.dirname(__file__) +classpath = [{classpath}] +extra_program_args = {extra_program_args} +java_path = {java_path} +if os.getcwd() != self_dir: + offset = os.path.relpath(self_dir, os.getcwd()) + fix_path = lambda p: os.path.normpath(os.path.join(offset, p)) + classpath = [fix_path(p) for p in classpath] + java_path = fix_path(java_path) +java_cmd = [java_path] +# This is a simple argparser for jvm, jar, and classpath arguments. +parser = argparse.ArgumentParser(add_help=False) +parser.add_argument('--jar-args') +parser.add_argument('--jvm-args') +parser.add_argument('--classpath') +# Test_runner parses the classpath for sharding junit tests. +parser.add_argument('--print-classpath', action='store_true', + help='Prints the classpass. Used by test_runner.') +known_args, unknown_args = parser.parse_known_args(sys.argv[1:]) + +if known_args.print_classpath: + sys.stdout.write(':'.join(classpath)) + sys.exit(0) + +if known_args.jvm_args: + jvm_arguments = known_args.jvm_args.strip('"').split() + java_cmd.extend(jvm_arguments) +if known_args.jar_args: + jar_arguments = known_args.jar_args.strip('"').split() + if unknown_args: + raise Exception('There are unknown arguments') +else: + jar_arguments = unknown_args + +if known_args.classpath: + classpath += [known_args.classpath] + +{extra_flags} +java_cmd.extend( + ['-classpath', ':'.join(classpath), '-enableassertions', \"{main_class}\"]) +java_cmd.extend(extra_program_args) +java_cmd.extend(jar_arguments) +os.execvp(java_cmd[0], java_cmd) +""" + +def main(argv): + argv = build_utils.ExpandFileArgs(argv) + parser = argparse.ArgumentParser() + parser.add_argument('--output', + required=True, + help='Output path for executable script.') + parser.add_argument( + '--main-class', + required=True, + help='Name of the java class with the "main" entry point.') + parser.add_argument('--max-heap-size', + required=True, + help='Argument for -Xmx') + parser.add_argument('--classpath', + action='append', + default=[], + help='Classpath for running the jar.') + parser.add_argument('--tiered-stop-at-level-one', + action='store_true', + help='JVM flag: -XX:TieredStopAtLevel=1.') + parser.add_argument('--use-jdk-11', + action='store_true', + help='Use older JDK11 instead of modern JDK.') + parser.add_argument('extra_program_args', + nargs='*', + help='This captures all ' + 'args after "--" to pass as extra args to the java cmd.') + + args = parser.parse_args(argv) + + extra_flags = [f'java_cmd.append("-Xmx{args.max_heap_size}")'] + if args.tiered_stop_at_level_one: + extra_flags.append('java_cmd.append("-XX:TieredStopAtLevel=1")') + + classpath = [] + for cp_arg in args.classpath: + classpath += action_helpers.parse_gn_list(cp_arg) + + run_dir = os.path.dirname(args.output) + classpath = [os.path.relpath(p, run_dir) for p in classpath] + + if args.use_jdk_11: + java_home = build_utils.JAVA_11_HOME_DEPRECATED + else: + java_home = build_utils.JAVA_HOME + java_path = os.path.relpath(os.path.join(java_home, 'bin', 'java'), run_dir) + + with action_helpers.atomic_output(args.output, mode='w') as script: + script.write( + script_template.format(classpath=('"%s"' % '", "'.join(classpath)), + java_path=repr(java_path), + main_class=args.main_class, + extra_program_args=repr(args.extra_program_args), + extra_flags='\n'.join(extra_flags))) + + os.chmod(args.output, 0o750) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/create_java_binary_script.pydeps b/android/gyp/create_java_binary_script.pydeps new file mode 100644 index 000000000000..a0a740dec9f1 --- /dev/null +++ b/android/gyp/create_java_binary_script.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py +../../action_helpers.py +../../gn_helpers.py +create_java_binary_script.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/create_r_java.py b/android/gyp/create_r_java.py new file mode 100755 index 000000000000..b662a39695fb --- /dev/null +++ b/android/gyp/create_r_java.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Writes a dummy R.java file from a list of R.txt files.""" + +import argparse +import sys + +from util import build_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def _ConcatRTxts(rtxt_in_paths, combined_out_path): + all_lines = set() + for rtxt_in_path in rtxt_in_paths: + with open(rtxt_in_path) as rtxt_in: + all_lines.update(rtxt_in.read().splitlines()) + with open(combined_out_path, 'w') as combined_out: + combined_out.write('\n'.join(sorted(all_lines))) + + +def _CreateRJava(rtxts, package_name, srcjar_out): + with resource_utils.BuildContext() as build: + _ConcatRTxts(rtxts, build.r_txt_path) + rjava_build_options = resource_utils.RJavaBuildOptions() + rjava_build_options.ExportAllResources() + rjava_build_options.ExportAllStyleables() + rjava_build_options.GenerateOnResourcesLoaded(fake=True) + resource_utils.CreateRJavaFiles(build.srcjar_dir, + package_name, + build.r_txt_path, + extra_res_packages=[], + rjava_build_options=rjava_build_options, + srcjar_out=srcjar_out, + ignore_mismatched_values=True) + with action_helpers.atomic_output(srcjar_out) as f: + zip_helpers.zip_directory(f, build.srcjar_dir) + + +def main(args): + parser = argparse.ArgumentParser(description='Create an R.java srcjar.') + action_helpers.add_depfile_arg(parser) + parser.add_argument('--srcjar-out', + required=True, + help='Path to output srcjar.') + parser.add_argument('--deps-rtxts', + required=True, + help='List of rtxts of resource dependencies.') + parser.add_argument('--r-package', + required=True, + help='R.java package to use.') + options = parser.parse_args(build_utils.ExpandFileArgs(args)) + options.deps_rtxts = action_helpers.parse_gn_list(options.deps_rtxts) + + _CreateRJava(options.deps_rtxts, options.r_package, options.srcjar_out) + action_helpers.write_depfile(options.depfile, + options.srcjar_out, + inputs=options.deps_rtxts) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/create_r_java.pydeps b/android/gyp/create_r_java.pydeps new file mode 100644 index 000000000000..20fd1f8bd4af --- /dev/null +++ b/android/gyp/create_r_java.pydeps @@ -0,0 +1,31 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_java.pydeps build/android/gyp/create_r_java.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +create_r_java.py +util/__init__.py +util/build_utils.py +util/resource_utils.py diff --git a/android/gyp/create_r_txt.py b/android/gyp/create_r_txt.py new file mode 100755 index 000000000000..429f62f06f42 --- /dev/null +++ b/android/gyp/create_r_txt.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Writes a dummy R.txt file from a resource zip.""" + +import argparse +import sys + +from util import build_utils +from util import resource_utils +from util import resources_parser + + +def main(args): + parser = argparse.ArgumentParser( + description='Create an R.txt from resources.') + parser.add_argument('--resources-zip-path', + required=True, + help='Path to input resources zip.') + parser.add_argument('--rtxt-path', + required=True, + help='Path to output R.txt file.') + options = parser.parse_args(build_utils.ExpandFileArgs(args)) + with build_utils.TempDir() as temp: + dep_subdirs = resource_utils.ExtractDeps([options.resources_zip_path], temp) + resources_parser.RTxtGenerator(dep_subdirs).WriteRTxtFile(options.rtxt_path) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/create_r_txt.pydeps b/android/gyp/create_r_txt.pydeps new file mode 100644 index 000000000000..65378f038aa2 --- /dev/null +++ b/android/gyp/create_r_txt.pydeps @@ -0,0 +1,31 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_r_txt.pydeps build/android/gyp/create_r_txt.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +create_r_txt.py +util/__init__.py +util/build_utils.py +util/resource_utils.py +util/resources_parser.py diff --git a/android/gyp/create_size_info_files.py b/android/gyp/create_size_info_files.py new file mode 100755 index 000000000000..24fcf8dc8bc0 --- /dev/null +++ b/android/gyp/create_size_info_files.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 + +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates size-info/*.info files used by SuperSize.""" + +import argparse +import collections +import os +import re +import sys +import zipfile + +from util import build_utils +from util import jar_info_utils +import action_helpers # build_utils adds //build to sys.path. + + +_AAR_VERSION_PATTERN = re.compile(r'/[^/]*?(\.aar/|\.jar/)') + + +def _RemoveDuplicatesFromList(source_list): + return collections.OrderedDict.fromkeys(source_list).keys() + + +def _TransformAarPaths(path): + # .aar files within //third_party/android_deps have a version suffix. + # The suffix changes each time .aar files are updated, which makes size diffs + # hard to compare (since the before/after have different source paths). + # Rather than changing how android_deps works, we employ this work-around + # to normalize the paths. + # From: .../androidx_appcompat_appcompat/appcompat-1.1.0.aar/res/... + # To: .../androidx_appcompat_appcompat.aar/res/... + # https://crbug.com/1056455 + if 'android_deps' not in path: + return path + return _AAR_VERSION_PATTERN.sub(r'\1', path) + + +def _MergeResInfoFiles(res_info_path, info_paths): + # Concatenate them all. + with action_helpers.atomic_output(res_info_path, 'w+') as dst: + for p in info_paths: + with open(p) as src: + dst.writelines(_TransformAarPaths(l) for l in src) + + +def _PakInfoPathsForAssets(assets): + return [f.split(':')[0] + '.info' for f in assets if f.endswith('.pak')] + + +def _MergePakInfoFiles(merged_path, pak_infos): + info_lines = set() + for pak_info_path in pak_infos: + with open(pak_info_path, 'r') as src_info_file: + info_lines.update(_TransformAarPaths(x) for x in src_info_file) + # only_if_changed=False since no build rules depend on this as an input. + with action_helpers.atomic_output(merged_path, + only_if_changed=False, + mode='w+') as f: + f.writelines(sorted(info_lines)) + + +def _FullJavaNameFromClassFilePath(path): + # Input: base/android/java/src/org/chromium/Foo.class + # Output: base.android.java.src.org.chromium.Foo + if not path.endswith('.class'): + return '' + path = os.path.splitext(path)[0] + parts = [] + while path: + # Use split to be platform independent. + head, tail = os.path.split(path) + path = head + parts.append(tail) + parts.reverse() # Package comes first + return '.'.join(parts) + + +def _MergeJarInfoFiles(output, inputs): + """Merge several .jar.info files to generate an .apk.jar.info. + + Args: + output: output file path. + inputs: List of .jar.info or .jar files. + """ + info_data = dict() + for path in inputs: + # For non-prebuilts: .jar.info files are written by compile_java.py and map + # .class files to .java source paths. + # + # For prebuilts: No .jar.info file exists, we scan the .jar files here and + # map .class files to the .jar. + # + # For .aar files: We look for a "source.info" file in the containing + # directory in order to map classes back to the .aar (rather than mapping + # them to the extracted .jar file). + if path.endswith('.info'): + info_data.update(jar_info_utils.ParseJarInfoFile(path)) + else: + attributed_path = path + if not path.startswith('..'): + parent_path = os.path.dirname(path) + # See if it's an sub-jar within the .aar. + if os.path.basename(parent_path) == 'libs': + parent_path = os.path.dirname(parent_path) + aar_source_info_path = os.path.join(parent_path, 'source.info') + # source.info files exist only for jars from android_aar_prebuilt(). + # E.g. Could have an java_prebuilt() pointing to a generated .jar. + if os.path.exists(aar_source_info_path): + attributed_path = jar_info_utils.ReadAarSourceInfo( + aar_source_info_path) + + with zipfile.ZipFile(path) as zip_info: + for name in zip_info.namelist(): + fully_qualified_name = _FullJavaNameFromClassFilePath(name) + if fully_qualified_name: + info_data[fully_qualified_name] = _TransformAarPaths('{}/{}'.format( + attributed_path, name)) + + # only_if_changed=False since no build rules depend on this as an input. + with action_helpers.atomic_output(output, only_if_changed=False) as f: + jar_info_utils.WriteJarInfoFile(f, info_data) + + +def _FindJarInputs(jar_paths): + ret = [] + for jar_path in jar_paths: + jar_info_path = jar_path + '.info' + if os.path.exists(jar_info_path): + ret.append(jar_info_path) + else: + ret.append(jar_path) + return ret + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser(description=__doc__) + action_helpers.add_depfile_arg(parser) + parser.add_argument( + '--jar-info-path', required=True, help='Output .jar.info file') + parser.add_argument( + '--pak-info-path', required=True, help='Output .pak.info file') + parser.add_argument( + '--res-info-path', required=True, help='Output .res.info file') + parser.add_argument( + '--jar-files', + required=True, + action='append', + help='GN-list of .jar file paths') + parser.add_argument( + '--assets', + required=True, + action='append', + help='GN-list of files to add as assets in the form ' + '"srcPath:zipPath", where ":zipPath" is optional.') + parser.add_argument( + '--uncompressed-assets', + required=True, + action='append', + help='Same as --assets, except disables compression.') + parser.add_argument( + '--in-res-info-path', + required=True, + action='append', + help='Paths to .ap_.info files') + + options = parser.parse_args(args) + + options.jar_files = action_helpers.parse_gn_list(options.jar_files) + options.assets = action_helpers.parse_gn_list(options.assets) + options.uncompressed_assets = action_helpers.parse_gn_list( + options.uncompressed_assets) + + jar_inputs = _FindJarInputs(_RemoveDuplicatesFromList(options.jar_files)) + pak_inputs = _PakInfoPathsForAssets(options.assets + + options.uncompressed_assets) + res_inputs = options.in_res_info_path + + # Just create the info files every time. See https://crbug.com/1045024 + _MergeJarInfoFiles(options.jar_info_path, jar_inputs) + _MergePakInfoFiles(options.pak_info_path, pak_inputs) + _MergeResInfoFiles(options.res_info_path, res_inputs) + + all_inputs = jar_inputs + pak_inputs + res_inputs + action_helpers.write_depfile(options.depfile, + options.jar_info_path, + inputs=all_inputs) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/create_size_info_files.pydeps b/android/gyp/create_size_info_files.pydeps new file mode 100644 index 000000000000..0dd61cbb35d7 --- /dev/null +++ b/android/gyp/create_size_info_files.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py +../../action_helpers.py +../../gn_helpers.py +create_size_info_files.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py diff --git a/android/gyp/create_stub_manifest.py b/android/gyp/create_stub_manifest.py new file mode 100755 index 000000000000..889fa26bf8d3 --- /dev/null +++ b/android/gyp/create_stub_manifest.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generates AndroidManifest.xml for a -Stub.apk.""" + +import argparse +import pathlib + +_MAIN_TEMPLATE = """\ + + + {} + +""" + +_STATIC_LIBRARY_TEMPLATE = """ + +""" + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--static-library-name') + parser.add_argument('--static-library-version') + parser.add_argument('--output', required=True) + args = parser.parse_args() + + static_library_part = '' + if args.static_library_name: + static_library_part = _STATIC_LIBRARY_TEMPLATE.format( + args.static_library_name, args.static_library_version) + + data = _MAIN_TEMPLATE.format(static_library_part) + pathlib.Path(args.output).write_text(data, encoding='utf8') + + +if __name__ == '__main__': + main() diff --git a/android/gyp/create_test_apk_wrapper_script.py b/android/gyp/create_test_apk_wrapper_script.py new file mode 100755 index 000000000000..1e6374872432 --- /dev/null +++ b/android/gyp/create_test_apk_wrapper_script.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Create a wrapper script to run a test apk using apk_operations.py.""" + +import argparse +import os +import string +import sys + +from util import build_utils + +SCRIPT_TEMPLATE = string.Template("""\ +#!/usr/bin/env python3 +# +# This file was generated by build/android/gyp/create_test_apk_wrapper_script.py + +import os +import sys + +def main(): + script_directory = os.path.dirname(__file__) + resolve = lambda p: p if p is None else os.path.abspath(os.path.join( + script_directory, p)) + sys.path.append(resolve(${WRAPPED_SCRIPT_DIR})) + import apk_operations + + additional_apk_paths = [resolve(p) for p in ${ADDITIONAL_APKS}] + apk_operations.RunForTestApk( + output_directory=resolve(${OUTPUT_DIR}), + package_name=${PACKAGE_NAME}, + test_apk_path=resolve(${TEST_APK}), + test_apk_json=resolve(${TEST_APK_JSON}), + proguard_mapping_path=resolve(${MAPPING_PATH}), + additional_apk_paths=additional_apk_paths) + +if __name__ == '__main__': + sys.exit(main()) +""") + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', + required=True, + help='Output path for executable script.') + parser.add_argument('--package-name', required=True) + parser.add_argument('--test-apk') + parser.add_argument('--test-apk-incremental-install-json') + parser.add_argument('--proguard-mapping-path') + parser.add_argument('--additional-apk', + action='append', + dest='additional_apks', + default=[], + help='Paths to APKs to be installed prior to --apk-path.') + args = parser.parse_args(args) + + def relativize(path): + """Returns the path relative to the output script directory.""" + if path is None: + return path + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + + wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir) + wrapped_script_dir = relativize(wrapped_script_dir) + with open(args.script_output_path, 'w') as script: + script_dict = { + 'WRAPPED_SCRIPT_DIR': repr(wrapped_script_dir), + 'OUTPUT_DIR': repr(relativize('.')), + 'PACKAGE_NAME': repr(args.package_name), + 'TEST_APK': repr(relativize(args.test_apk)), + 'TEST_APK_JSON': + repr(relativize(args.test_apk_incremental_install_json)), + 'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)), + 'ADDITIONAL_APKS': [relativize(p) for p in args.additional_apks], + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) + os.chmod(args.script_output_path, 0o750) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/create_test_apk_wrapper_script.pydeps b/android/gyp/create_test_apk_wrapper_script.pydeps new file mode 100644 index 000000000000..d52f3438fdf9 --- /dev/null +++ b/android/gyp/create_test_apk_wrapper_script.pydeps @@ -0,0 +1,6 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_test_apk_wrapper_script.pydeps build/android/gyp/create_test_apk_wrapper_script.py +../../gn_helpers.py +create_test_apk_wrapper_script.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/create_ui_locale_resources.py b/android/gyp/create_ui_locale_resources.py new file mode 100755 index 000000000000..c767bc50121a --- /dev/null +++ b/android/gyp/create_ui_locale_resources.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generate a zip archive containing localized locale name Android resource +strings! + +This script takes a list of input Chrome-specific locale names, as well as an +output zip file path. + +Each output file will contain the definition of a single string resource, +named 'current_locale', whose value will be the matching Chromium locale name. +E.g. values-en-rUS/strings.xml will define 'current_locale' as 'en-US'. +""" + +import argparse +import os +import sys +import zipfile + +sys.path.insert( + 0, + os.path.join( + os.path.dirname(__file__), '..', '..', '..', 'build', 'android', 'gyp')) + +from util import build_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +# A small string template for the content of each strings.xml file. +# NOTE: The name is chosen to avoid any conflicts with other string defined +# by other resource archives. +_TEMPLATE = """\ + + + {resource_text} + +""" + +# The default Chrome locale value. +_DEFAULT_CHROME_LOCALE = 'en-US' + + +def _GenerateLocaleStringsXml(locale): + return _TEMPLATE.format(resource_text=locale) + + +def _AddLocaleResourceFileToZip(out_zip, android_locale, locale): + locale_data = _GenerateLocaleStringsXml(locale) + if android_locale: + zip_path = 'values-%s/strings.xml' % android_locale + else: + zip_path = 'values/strings.xml' + zip_helpers.add_to_zip_hermetic(out_zip, + zip_path, + data=locale_data, + compress=False) + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + + parser.add_argument( + '--locale-list', + required=True, + help='GN-list of Chrome-specific locale names.') + parser.add_argument( + '--output-zip', required=True, help='Output zip archive path.') + + args = parser.parse_args() + + locale_list = action_helpers.parse_gn_list(args.locale_list) + if not locale_list: + raise Exception('Locale list cannot be empty!') + + with action_helpers.atomic_output(args.output_zip) as tmp_file: + with zipfile.ZipFile(tmp_file, 'w') as out_zip: + # First, write the default value, since aapt requires one. + _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE) + + for locale in locale_list: + android_locale = resource_utils.ToAndroidLocaleName(locale) + _AddLocaleResourceFileToZip(out_zip, android_locale, locale) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/create_ui_locale_resources.pydeps b/android/gyp/create_ui_locale_resources.pydeps new file mode 100644 index 000000000000..5cffc7906aba --- /dev/null +++ b/android/gyp/create_ui_locale_resources.pydeps @@ -0,0 +1,31 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_ui_locale_resources.pydeps build/android/gyp/create_ui_locale_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +create_ui_locale_resources.py +util/__init__.py +util/build_utils.py +util/resource_utils.py diff --git a/android/gyp/create_unwind_table.py b/android/gyp/create_unwind_table.py new file mode 100755 index 000000000000..83cd73d6546d --- /dev/null +++ b/android/gyp/create_unwind_table.py @@ -0,0 +1,1095 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates a table of unwind information in Android Chrome's bespoke format.""" + +import abc +import argparse +import collections +import enum +import json +import logging +import re +import struct +import subprocess +import sys +from typing import (Dict, Iterable, List, NamedTuple, Sequence, TextIO, Tuple, + Union) + +from util import build_utils + +_STACK_CFI_INIT_REGEX = re.compile( + r'^STACK CFI INIT ([0-9a-f]+) ([0-9a-f]+) (.+)$') +_STACK_CFI_REGEX = re.compile(r'^STACK CFI ([0-9a-f]+) (.+)$') + + +class AddressCfi(NamedTuple): + """Record representing CFI for an address within a function. + + Represents the Call Frame Information required to unwind from an address in a + function. + + Attributes: + address: The address. + unwind_instructions: The unwind instructions for the address. + + """ + address: int + unwind_instructions: str + + +class FunctionCfi(NamedTuple): + """Record representing CFI for a function. + + Note: address_cfi[0].address is the start address of the function. + + Attributes: + size: The function size in bytes. + address_cfi: The CFI at each address in the function. + + """ + size: int + address_cfi: Tuple[AddressCfi, ...] + + +def FilterToNonTombstoneCfi(stream: TextIO) -> Iterable[str]: + """Generates non-tombstone STACK CFI lines from the stream. + + STACK CFI functions with address 0 correspond are a 'tombstone' record + associated with dead code and can be ignored. See + https://bugs.llvm.org/show_bug.cgi?id=47148#c2. + + Args: + stream: A file object. + + Returns: + An iterable over the non-tombstone STACK CFI lines in the stream. + """ + in_tombstone_function = False + for line in stream: + if not line.startswith('STACK CFI '): + continue + + if line.startswith('STACK CFI INIT 0 '): + in_tombstone_function = True + elif line.startswith('STACK CFI INIT '): + in_tombstone_function = False + + if not in_tombstone_function: + yield line + + +def ReadFunctionCfi(stream: TextIO) -> Iterable[FunctionCfi]: + """Generates FunctionCfi records from the stream. + + Args: + stream: A file object. + + Returns: + An iterable over FunctionCfi corresponding to the non-tombstone STACK CFI + lines in the stream. + """ + current_function_address = None + current_function_size = None + current_function_address_cfi = [] + for line in FilterToNonTombstoneCfi(stream): + cfi_init_match = _STACK_CFI_INIT_REGEX.search(line) + if cfi_init_match: + # Function CFI with address 0 are tombstone entries per + # https://bugs.llvm.org/show_bug.cgi?id=47148#c2 and should have been + # filtered in `FilterToNonTombstoneCfi`. + assert current_function_address != 0 + if (current_function_address is not None + and current_function_size is not None): + yield FunctionCfi(current_function_size, + tuple(current_function_address_cfi)) + current_function_address = int(cfi_init_match.group(1), 16) + current_function_size = int(cfi_init_match.group(2), 16) + current_function_address_cfi = [ + AddressCfi(int(cfi_init_match.group(1), 16), cfi_init_match.group(3)) + ] + else: + cfi_match = _STACK_CFI_REGEX.search(line) + assert cfi_match + current_function_address_cfi.append( + AddressCfi(int(cfi_match.group(1), 16), cfi_match.group(2))) + + assert current_function_address is not None + assert current_function_size is not None + yield FunctionCfi(current_function_size, tuple(current_function_address_cfi)) + + +def EncodeAsBytes(*values: int) -> bytes: + """Encodes the argument ints as bytes. + + This function validates that the inputs are within the range that can be + represented as bytes. + + Args: + values: Integers in range [0, 255]. + + Returns: + The values encoded as bytes. + """ + for i, value in enumerate(values): + if not 0 <= value <= 255: + raise ValueError('value = %d out of bounds at byte %d' % (value, i)) + return bytes(values) + + +def Uleb128Encode(value: int) -> bytes: + """Encodes the argument int to ULEB128 format. + + Args: + value: Unsigned integer. + + Returns: + The values encoded as ULEB128 bytes. + """ + if value < 0: + raise ValueError(f'Cannot uleb128 encode negative value ({value}).') + + uleb128_bytes = [] + done = False + while not done: + value, lowest_seven_bits = divmod(value, 0x80) + done = value == 0 + uleb128_bytes.append(lowest_seven_bits | (0x80 if not done else 0x00)) + return EncodeAsBytes(*uleb128_bytes) + + +def EncodeStackPointerUpdate(offset: int) -> bytes: + """Encodes a stack pointer update as arm unwind instructions. + + Args: + offset: Offset to apply on stack pointer. Should be in range [-0x204, inf). + + Returns: + A list of arm unwind instructions as bytes. + """ + assert offset % 4 == 0 + + abs_offset = abs(offset) + instruction_code = 0b01000000 if offset < 0 else 0b00000000 + if 0x04 <= abs_offset <= 0x200: + instructions = [ + # vsp = vsp + (xxxxxx << 2) + 4. Covers range 0x04-0x100 inclusive. + instruction_code | ((min(abs_offset, 0x100) - 4) >> 2) + ] + # For vsp increments of 0x104-0x200 we use 00xxxxxx twice. + if abs_offset >= 0x104: + instructions.append(instruction_code | ((abs_offset - 0x100 - 4) >> 2)) + try: + return EncodeAsBytes(*instructions) + except ValueError as e: + raise RuntimeError('offset = %d produced out of range value' % + offset) from e + else: + # This only encodes positive sp movement. + assert offset > 0, offset + return EncodeAsBytes(0b10110010 # vsp = vsp + 0x204 + (uleb128 << 2) + ) + Uleb128Encode((offset - 0x204) >> 2) + + +def EncodePop(registers: Sequence[int]) -> bytes: + """Encodes popping of a sequence of registers as arm unwind instructions. + + Args: + registers: Collection of target registers to accept values popped from + stack. Register value order in the sequence does not matter. Values are + popped based on register index order. + + Returns: + A list of arm unwind instructions as bytes. + """ + assert all( + r in range(4, 16) + for r in registers), f'Can only pop r4 ~ r15. Registers:\n{registers}.' + assert len(registers) > 0, 'Register sequence cannot be empty.' + + instructions: List[int] = [] + + # Check if the pushed registers are continuous set starting from r4 (and + # ending prior to r12). This scenario has its own encoding. + pop_lr = 14 in registers + non_lr_registers = [r for r in registers if r != 14] + non_lr_registers_continuous_from_r4 = \ + sorted(non_lr_registers) == list(range(4, 4 + len(non_lr_registers))) + + if (pop_lr and 0 < len(non_lr_registers) <= 8 + and non_lr_registers_continuous_from_r4): + instructions.append(0b10101000 + | (len(non_lr_registers) - 1) # Pop r4-r[4+nnn], r14. + ) + else: + register_bits = 0 + for register in registers: + register_bits |= 1 << register + register_bits = register_bits >> 4 # Skip r0 ~ r3. + instructions.extend([ + # Pop up to 12 integer registers under masks {r15-r12}, {r11-r4}. + 0b10000000 | (register_bits >> 8), + register_bits & 0xff + ]) + + return EncodeAsBytes(*instructions) + + +class UnwindType(enum.Enum): + """ + The type of unwind action to perform. + """ + + # Use lr as the return address. + RETURN_TO_LR = 1 + + # Increment or decrement the stack pointer and/or pop registers (r4 ~ r15). + # If both, the increment/decrement occurs first. + UPDATE_SP_AND_OR_POP_REGISTERS = 2 + + # Restore the stack pointer from a register then increment/decrement the stack + # pointer. + RESTORE_SP_FROM_REGISTER = 3 + + # No action necessary. Used for floating point register pops. + NO_ACTION = 4 + + +class AddressUnwind(NamedTuple): + """Record representing unwind information for an address within a function. + + Attributes: + address_offset: The offset of the address from the start of the function. + unwind_type: The type of unwind to perform from the address. + sp_offset: The offset to apply to the stack pointer. + registers: The registers involved in the unwind. + """ + address_offset: int + unwind_type: UnwindType + sp_offset: int + registers: Tuple[int, ...] + + +class FunctionUnwind(NamedTuple): + """Record representing unwind information for a function. + + Attributes: + address: The address of the function. + size: The function size in bytes. + address_unwind_info: The unwind info at each address in the function. + """ + + address: int + size: int + address_unwinds: Tuple[AddressUnwind, ...] + + +def EncodeAddressUnwind(address_unwind: AddressUnwind) -> bytes: + """Encodes an `AddressUnwind` object as arm unwind instructions. + + Args: + address_unwind: Record representing unwind information for an address within + a function. + + Returns: + A list of arm unwind instructions as bytes. + """ + if address_unwind.unwind_type == UnwindType.RETURN_TO_LR: + return EncodeAsBytes(0b10110000) # Finish. + if address_unwind.unwind_type == UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS: + return ((EncodeStackPointerUpdate(address_unwind.sp_offset) + if address_unwind.sp_offset else b'') + + (EncodePop(address_unwind.registers) + if address_unwind.registers else b'')) + + if address_unwind.unwind_type == UnwindType.RESTORE_SP_FROM_REGISTER: + assert len(address_unwind.registers) == 1 + return (EncodeAsBytes(0b10010000 + | address_unwind.registers[0] # Set vsp = r[nnnn]. + ) + + (EncodeStackPointerUpdate(address_unwind.sp_offset) + if address_unwind.sp_offset else b'')) + + if address_unwind.unwind_type == UnwindType.NO_ACTION: + return b'' + + assert False, 'unknown unwind type' + return b'' + + +class UnwindInstructionsParser(abc.ABC): + """Base class for parsers of breakpad unwind instruction sequences. + + Provides regexes matching breakpad instruction sequences understood by the + parser, and parsing of the sequences from the regex match. + """ + + @abc.abstractmethod + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + pass + + @abc.abstractmethod + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + """Returns the regex matching the breakpad instructions. + + Args: + address_offset: Offset from function start address. + cfa_sp_offset: CFA stack pointer offset. + + Returns: + The unwind info for the address plus the new cfa_sp_offset. + """ + + +class NullParser(UnwindInstructionsParser): + """Translates the state before any instruction has been executed.""" + + regex = re.compile(r'^\.cfa: sp 0 \+ \.ra: lr$') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + return AddressUnwind(address_offset, UnwindType.RETURN_TO_LR, 0, ()), 0 + + +class PushOrSubSpParser(UnwindInstructionsParser): + """Translates unwinds from push or sub sp, #constant instructions.""" + + # We expect at least one of the three outer groups to be non-empty. Cases: + # + # Standard prologue pushes. + # Match the first two and optionally the third. + # + # Standard prologue sub sp, #constant. + # Match only the first. + # + # Pushes in dynamic stack allocation functions after saving sp. + # Match only the third since they don't alter the stack pointer or store the + # return address. + # + # Leaf functions that use callee-save registers. + # Match the first and third but not the second. + regex = re.compile(r'^(?:\.cfa: sp (\d+) \+ ?)?' + r'(?:\.ra: \.cfa (-\d+) \+ \^ ?)?' + r'((?:r\d+: \.cfa -\d+ \+ \^ ?)*)$') + + # 'r' followed by digits, with 'r' matched via positive lookbehind so only the + # number appears in the match. + register_regex = re.compile('(?<=r)(\d+)') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + # The group will be None if the outer non-capturing groups for the(\d+) and + # (-\d+) expressions are not matched. + new_cfa_sp_offset, ra_cfa_offset = (int(group) if group else None + for group in match.groups()[:2]) + + # Registers are pushed in reverse order by register number so are popped in + # order. Sort them to ensure the proper order. + registers = sorted([ + int(register) + for register in self.register_regex.findall(match.group(3)) + # `UpdateSpAndOrPopRegisters` only supports popping of register + # r4 ~ r15. The ignored registers are translated to sp increments by + # the following calculation on `sp_offset`. + if int(register) in range(4, 16) + ] + + # Also pop lr (ra in breakpad terms) if it was stored. + ([14] if ra_cfa_offset is not None else [])) + + sp_offset = 0 + if new_cfa_sp_offset is not None: + sp_offset = new_cfa_sp_offset - cfa_sp_offset + assert sp_offset % 4 == 0 + if sp_offset >= len(registers) * 4: + # Handles the sub sp, #constant case, and push instructions that push + # caller-save registers r0-r3 which don't get encoded in the unwind + # instructions. In the latter case we need to move the stack pointer up + # to the first pushed register. + sp_offset -= len(registers) * 4 + + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, sp_offset, + tuple(registers)), new_cfa_sp_offset or cfa_sp_offset + + +class VPushParser(UnwindInstructionsParser): + # VPushes that occur in dynamic stack allocation functions after storing the + # stack pointer don't change the stack pointer or push any register that we + # care about. The first group will not match in those cases. + # + # Breakpad doesn't seem to understand how to name the floating point + # registers so calls them unnamed_register. + regex = re.compile(r'^(?:\.cfa: sp (\d+) \+ )?' + r'(?:unnamed_register\d+: \.cfa -\d+ \+ \^ ?)+$') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + # `match.group(1)`, which corresponds to the (\d+) expression, will be None + # if the first outer non-capturing group is not matched. + new_cfa_sp_offset = int(match.group(1)) if match.group(1) else None + if new_cfa_sp_offset is None: + return (AddressUnwind(address_offset, UnwindType.NO_ACTION, 0, + ()), cfa_sp_offset) + + sp_offset = new_cfa_sp_offset - cfa_sp_offset + assert sp_offset % 4 == 0 + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, sp_offset, + ()), new_cfa_sp_offset + + +class StoreSpParser(UnwindInstructionsParser): + regex = re.compile(r'^\.cfa: r(\d+) (\d+) \+$') + + def GetBreakpadInstructionsRegex(self) -> re.Pattern: + return self.regex + + def ParseFromMatch(self, address_offset: int, cfa_sp_offset: int, + match: re.Match) -> Tuple[AddressUnwind, int]: + register = int(match.group(1)) + new_cfa_sp_offset = int(match.group(2)) + sp_offset = new_cfa_sp_offset - cfa_sp_offset + assert sp_offset % 4 == 0 + return AddressUnwind(address_offset, UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset, (register, )), new_cfa_sp_offset + + +def EncodeUnwindInstructionTable(complete_instruction_sequences: Iterable[bytes] + ) -> Tuple[bytes, Dict[bytes, int]]: + """Encodes the unwind instruction table. + + Deduplicates the encoded unwind instruction sequences. Generates the table and + a dictionary mapping a function to its starting index in the table. + + The instruction table is used by the unwinder to provide the sequence of + unwind instructions to execute for each function, separated by offset + into the function. + + Args: + complete_instruction_sequences: An iterable of encoded unwind instruction + sequences. The sequences represent the series of unwind instructions to + execute corresponding to offsets within each function. + + Returns: + A tuple containing: + - The unwind instruction table as bytes. + - The mapping from the instruction sequence to the offset in the unwind + instruction table. This mapping is used to construct the function offset + table, which references entries in the unwind instruction table. + """ + # As the function offset table uses variable length number encoding (uleb128), + # which means smaller number uses fewer bytes to represent, we should sort + # the unwind instruction table by number of references from the function + # offset table in order to minimize the size of the function offset table. + ref_counts: Dict[bytes, int] = collections.defaultdict(int) + for sequence in complete_instruction_sequences: + ref_counts[sequence] += 1 + + def ComputeScore(sequence): + """ Score for each sequence is computed as ref_count / size_of_sequence. + + According to greedy algorithm, items with higher value / space cost ratio + should be prioritized. Here value is bytes saved in the function offset + table, represetned by ref_count. Space cost is the space taken in the + unwind instruction table, represented by size_of_sequence. + + Note: In order to ensure build-time determinism, `sequence` is also returned + to resolve sorting order when scores are the same. + """ + return ref_counts[sequence] / len(sequence), sequence + + ordered_sequences = sorted(ref_counts.keys(), key=ComputeScore, reverse=True) + offsets: Dict[bytes, int] = {} + current_offset = 0 + for sequence in ordered_sequences: + offsets[sequence] = current_offset + current_offset += len(sequence) + return b''.join(ordered_sequences), offsets + + +class EncodedAddressUnwind(NamedTuple): + """Record representing unwind information for an address within a function. + + This structure represents the same concept as `AddressUnwind`. The only + difference is that how to unwind from the address is represented as + encoded ARM unwind instructions. + + Attributes: + address_offset: The offset of the address from the start address of the + function. + complete_instruction_sequence: The full ARM unwind instruction sequence to + unwind from the `address_offset`. + """ + address_offset: int + complete_instruction_sequence: bytes + + +def EncodeAddressUnwinds(address_unwinds: Tuple[AddressUnwind, ...] + ) -> Tuple[EncodedAddressUnwind, ...]: + """Encodes the unwind instructions and offset for the addresses within a + function. + + Args: + address_unwinds: A tuple of unwind state for addresses within a function. + + Returns: + The encoded unwind instructions and offsets for the addresses within a + function, ordered by decreasing offset. + """ + sorted_address_unwinds: List[AddressUnwind] = sorted( + address_unwinds, + key=lambda address_unwind: address_unwind.address_offset, + reverse=True) + unwind_instructions: List[bytes] = [ + EncodeAddressUnwind(address_unwind) + for address_unwind in sorted_address_unwinds + ] + + # A complete instruction sequence contains all the unwind instructions + # necessary to unwind from an offset within a function. For a given offset + # this includes the offset's instructions plus the instructions for all + # earlier offsets. The offsets are stored in reverse order, hence the i: + # range rather than :i+1. + complete_instruction_sequences = [ + b''.join(unwind_instructions[i:]) for i in range(len(unwind_instructions)) + ] + + encoded_unwinds: List[EncodedAddressUnwind] = [] + for address_unwind, sequence in zip(sorted_address_unwinds, + complete_instruction_sequences): + encoded_unwinds.append( + EncodedAddressUnwind(address_unwind.address_offset, sequence)) + return tuple(encoded_unwinds) + + +class EncodedFunctionUnwind(NamedTuple): + """Record representing unwind information for a function. + + This structure represents the same concept as `FunctionUnwind`, but with + some differences: + - Attribute `address` is split into 2 attributes: `page_number` and + `page_offset`. + - Attribute `size` is dropped. + - Attribute `address_unwinds` becomes a collection of `EncodedAddressUnwind`s, + instead of a collection of `AddressUnwind`s. + + Attributes: + page_number: The upper bits (17 ~ 31bits) of byte offset from text section + start. + page_offset: The lower bits (1 ~ 16bits) of instruction offset from text + section start. + address_unwinds: A collection of `EncodedAddressUnwind`s. + + """ + + page_number: int + page_offset: int + address_unwinds: Tuple[EncodedAddressUnwind, ...] + + +# The trivial unwind is defined as a single `RETURN_TO_LR` instruction +# at the start of the function. +TRIVIAL_UNWIND: Tuple[EncodedAddressUnwind, ...] = EncodeAddressUnwinds( + (AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=()), )) + +# The refuse to unwind filler unwind is used to fill the invalid space +# before the first function in the first page and after the last function +# in the last page. +REFUSE_TO_UNWIND: Tuple[EncodedAddressUnwind, ...] = (EncodedAddressUnwind( + address_offset=0, + complete_instruction_sequence=bytes([0b10000000, 0b00000000])), ) + + +def EncodeFunctionUnwinds(function_unwinds: Iterable[FunctionUnwind], + text_section_start_address: int + ) -> Iterable[EncodedFunctionUnwind]: + """Encodes the unwind state for all functions defined in the binary. + + This function + - sorts the collection of `FunctionUnwind`s by address. + - fills in gaps between functions with trivial unwind. + - fills the space in the last page after last function with refuse to unwind. + - fills the space in the first page before the first function with refuse + to unwind. + + Args: + function_unwinds: An iterable of function unwind states. + text_section_start_address: The address of .text section in ELF file. + + Returns: + The encoded function unwind states with no gaps between functions, ordered + by ascending address. + """ + + def GetPageNumber(address: int) -> int: + """Calculates the page number. + + Page number is calculated as byte_offset_from_text_section_start >> 17, + i.e. the upper bits (17 ~ 31bits) of byte offset from text section start. + """ + return (address - text_section_start_address) >> 17 + + def GetPageOffset(address: int) -> int: + """Calculates the page offset. + + Page offset is calculated as (byte_offset_from_text_section_start >> 1) + & 0xffff, i.e. the lower bits (1 ~ 16bits) of instruction offset from + text section start. + """ + return ((address - text_section_start_address) >> 1) & 0xffff + + sorted_function_unwinds: List[FunctionUnwind] = sorted( + function_unwinds, key=lambda function_unwind: function_unwind.address) + + if sorted_function_unwinds[0].address > text_section_start_address: + yield EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=REFUSE_TO_UNWIND) + + prev_func_end_address: int = sorted_function_unwinds[0].address + + gaps = 0 + for unwind in sorted_function_unwinds: + assert prev_func_end_address <= unwind.address, ( + 'Detected overlap between functions.') + + if prev_func_end_address < unwind.address: + # Gaps between functions are typically filled by regions of thunks which + # do not alter the stack pointer. Filling these gaps with TRIVIAL_UNWIND + # is the appropriate unwind strategy. + gaps += 1 + yield EncodedFunctionUnwind(GetPageNumber(prev_func_end_address), + GetPageOffset(prev_func_end_address), + TRIVIAL_UNWIND) + + yield EncodedFunctionUnwind(GetPageNumber(unwind.address), + GetPageOffset(unwind.address), + EncodeAddressUnwinds(unwind.address_unwinds)) + + prev_func_end_address = unwind.address + unwind.size + + if GetPageOffset(prev_func_end_address) != 0: + yield EncodedFunctionUnwind(GetPageNumber(prev_func_end_address), + GetPageOffset(prev_func_end_address), + REFUSE_TO_UNWIND) + + logging.info('%d/%d gaps between functions filled with trivial unwind.', gaps, + len(sorted_function_unwinds)) + + +def EncodeFunctionOffsetTable( + encoded_address_unwind_sequences: Iterable[ + Tuple[EncodedAddressUnwind, ...]], + unwind_instruction_table_offsets: Dict[bytes, int] +) -> Tuple[bytes, Dict[Tuple[EncodedAddressUnwind, ...], int]]: + """Encodes the function offset table. + + The function offset table maps local instruction offset from function + start to the location in the unwind instruction table. + + Args: + encoded_address_unwind_sequences: An iterable of encoded address unwind + sequences. + unwind_instruction_table_offsets: The offset mapping returned from + `EncodeUnwindInstructionTable`. + + Returns: + A tuple containing: + - The function offset table as bytes. + - The mapping from the `EncodedAddressUnwind`s to the offset in the function + offset table. This mapping is used to construct the function table, which + references entries in the function offset table. + """ + function_offset_table = bytearray() + offsets: Dict[Tuple[EncodedAddressUnwind, ...], int] = {} + + for sequence in encoded_address_unwind_sequences: + if sequence in offsets: + continue + + offsets[sequence] = len(function_offset_table) + for address_offset, complete_instruction_sequence in sequence: + # Note: address_offset is the number of bytes from one address to another, + # while the instruction_offset is the number of 2-byte instructions + # from one address to another. + instruction_offset = address_offset >> 1 + function_offset_table += ( + Uleb128Encode(instruction_offset) + Uleb128Encode( + unwind_instruction_table_offsets[complete_instruction_sequence])) + + return bytes(function_offset_table), offsets + + +def EncodePageTableAndFunctionTable( + function_unwinds: Iterable[EncodedFunctionUnwind], + function_offset_table_offsets: Dict[Tuple[EncodedAddressUnwind, ...], int] +) -> Tuple[bytes, bytes]: + """Encode page table and function table as bytes. + + Page table: + A table that contains the mapping from page_number to the location of the + entry for the first function on the page in the function table. + + Function table: + A table that contains the mapping from page_offset to the location of an entry + in the function offset table. + + Args: + function_unwinds: All encoded function unwinds in the module. + function_offset_table_offsets: The offset mapping returned from + `EncodeFunctionOffsetTable`. + + Returns: + A tuple containing: + - The page table as bytes. + - The function table as bytes. + """ + page_function_unwinds: Dict[ + int, List[EncodedFunctionUnwind]] = collections.defaultdict(list) + for function_unwind in function_unwinds: + page_function_unwinds[function_unwind.page_number].append(function_unwind) + + raw_page_table: List[int] = [] + function_table = bytearray() + + for page_number, same_page_function_unwinds in sorted( + page_function_unwinds.items(), key=lambda item: item[0]): + # Pad empty pages. + # Empty pages can occur when a function spans over multiple pages. + # Example: + # A page table with a starting function that spans 3 over pages. + # page_table: + # [0, 1, 1, 1] + # function_table: + # [ + # # Page 0 + # (0, 20) # This function spans from page 0 offset 0 to page 3 offset 5. + # # Page 1 is empty. + # # Page 2 is empty. + # # Page 3 + # (6, 70) + # ] + assert page_number > len(raw_page_table) - 1 + number_of_empty_pages = page_number - len(raw_page_table) + # The function table is represented as `base::FunctionTableEntry[]`, + # where `base::FunctionTableEntry` is 4 bytes. + function_table_index = len(function_table) // 4 + raw_page_table.extend([function_table_index] * (number_of_empty_pages + 1)) + assert page_number == len(raw_page_table) - 1 + + for function_unwind in sorted( + same_page_function_unwinds, + key=lambda function_unwind: function_unwind.page_offset): + function_table += struct.pack( + 'HH', function_unwind.page_offset, + function_offset_table_offsets[function_unwind.address_unwinds]) + + page_table = struct.pack(f'{len(raw_page_table)}I', *raw_page_table) + + return page_table, bytes(function_table) + + +ALL_PARSERS: Tuple[UnwindInstructionsParser, ...] = ( + NullParser(), + PushOrSubSpParser(), + StoreSpParser(), + VPushParser(), +) + + +def ParseAddressCfi(address_cfi: AddressCfi, function_start_address: int, + parsers: Tuple[UnwindInstructionsParser, ...], + prev_cfa_sp_offset: int + ) -> Tuple[Union[AddressUnwind, None], bool, int]: + """Parses address CFI with given parsers. + + Args: + address_cfi: The CFI for an address in the function. + function_start_address: The start address of the function. + parsers: Available parsers to try on CFI data. + prev_cfa_sp_offset: Previous CFA stack pointer offset. + + Returns: + A tuple containing: + - An `AddressUnwind` object when the parse is successful, None otherwise. + - Whether the address is in function epilogue. + - The new cfa_sp_offset. + """ + for parser in parsers: + match = parser.GetBreakpadInstructionsRegex().search( + address_cfi.unwind_instructions) + if not match: + continue + + address_unwind, cfa_sp_offset = parser.ParseFromMatch( + address_cfi.address - function_start_address, prev_cfa_sp_offset, match) + + in_epilogue = ( + prev_cfa_sp_offset > cfa_sp_offset + and address_unwind.unwind_type != UnwindType.RESTORE_SP_FROM_REGISTER) + + return (address_unwind if not in_epilogue else None, in_epilogue, + cfa_sp_offset) + + return None, False, prev_cfa_sp_offset + + +def GenerateUnwinds(function_cfis: Iterable[FunctionCfi], + parsers: Tuple[UnwindInstructionsParser, ...] + ) -> Iterable[FunctionUnwind]: + """Generates parsed function unwind states from breakpad CFI data. + + This function parses `FunctionCfi`s to `FunctionUnwind`s using + `UnwindInstructionParser`. + + Args: + function_cfis: An iterable of function CFI data. + parsers: Available parsers to try on CFI address data. + + Returns: + An iterable of parsed function unwind states. + """ + functions = 0 + addresses = 0 + handled_addresses = 0 + epilogues_seen = 0 + + for function_cfi in function_cfis: + functions += 1 + address_unwinds: List[AddressUnwind] = [] + cfa_sp_offset = 0 + for address_cfi in function_cfi.address_cfi: + addresses += 1 + + address_unwind, in_epilogue, cfa_sp_offset = ParseAddressCfi( + address_cfi, function_cfi.address_cfi[0].address, parsers, + cfa_sp_offset) + + if address_unwind: + handled_addresses += 1 + address_unwinds.append(address_unwind) + continue + + if in_epilogue: + epilogues_seen += 1 + break + + logging.info('unrecognized CFI: %x %s.', address_cfi.address, + address_cfi.unwind_instructions) + + if address_unwinds: + # We expect that the unwind information for every function starts with a + # trivial unwind (RETURN_TO_LR) prior to the execution of any code in the + # function. This is required by the arm calling convention which involves + # setting lr to the return address on calling into a function. + assert address_unwinds[0].address_offset == 0 + assert address_unwinds[0].unwind_type == UnwindType.RETURN_TO_LR + + yield FunctionUnwind(function_cfi.address_cfi[0].address, + function_cfi.size, tuple(address_unwinds)) + + logging.info('%d functions.', functions) + logging.info('%d/%d addresses handled.', handled_addresses, addresses) + logging.info('epilogues_seen: %d.', epilogues_seen) + + +def EncodeUnwindInfo(page_table: bytes, function_table: bytes, + function_offset_table: bytes, + unwind_instruction_table: bytes) -> bytes: + """Encodes all unwind tables as a single binary. + + Concats all unwind table binaries together and attach a header at the start + with a offset-size pair for each table. + + offset: The offset to the target table from the start of the unwind info + binary in bytes. + size: The declared size of the target table. + + Both offset and size are represented as 32bit integers. + See `base::ChromeUnwindInfoHeaderAndroid` for more details. + + Args: + page_table: The page table as bytes. + function_table: The function table as bytes. + function_offset_table: The function offset table as bytes. + unwind_instruction_table: The unwind instruction table as bytes. + + Returns: + A single binary containing + - A header that points to the location of each table. + - All unwind tables. + """ + unwind_info_header = bytearray() + # Each table is represented as (offset, size) pair, both offset and size + # are represented as 4 byte integer. + unwind_info_header_size = 4 * 2 * 4 + unwind_info_body = bytearray() + + # Both the page_table and the function table need to be aligned because their + # contents are interpreted as multi-byte integers. However, the byte size of + # the header, the page table, the function table are all multiples of 4 and + # the resource will be memory mapped at a 4 byte boundary, so no extra care + # is required to align the page table and the function table. + # + # The function offset table and the unwind instruction table are accessed + # byte by byte, so they only need 1 byte alignment. + + assert len(page_table) % 4 == 0, ( + 'Each entry in the page table should be 4-byte integer.') + assert len(function_table) % 4 == 0, ( + 'Each entry in the function table should be a pair of 2 2-byte integers.') + + for table in page_table, function_table: + offset = unwind_info_header_size + len(unwind_info_body) + # For the page table and the function_table, declared size is the number of + # entries in each table. The tables will be aligned to a 4 byte boundary + # because the resource will be memory mapped at a 4 byte boundary and the + # header is a multiple of 4 bytes. + declared_size = len(table) // 4 + unwind_info_header += struct.pack('II', offset, declared_size) + unwind_info_body += table + + for table in function_offset_table, unwind_instruction_table: + offset = unwind_info_header_size + len(unwind_info_body) + # Because both the function offset table and the unwind instruction table + # contain variable length encoded numbers, the declared size is simply the + # number of bytes in each table. The tables only require 1 byte alignment. + declared_size = len(table) + unwind_info_header += struct.pack('II', offset, declared_size) + unwind_info_body += table + + return bytes(unwind_info_header + unwind_info_body) + + +def GenerateUnwindTables( + encoded_function_unwinds_iterable: Iterable[EncodedFunctionUnwind] +) -> Tuple[bytes, bytes, bytes, bytes]: + """Generates all unwind tables as bytes. + + Args: + encoded_function_unwinds_iterable: Encoded function unwinds for all + functions in the ELF binary. + + Returns: + A tuple containing: + - The page table as bytes. + - The function table as bytes. + - The function offset table as bytes. + - The unwind instruction table as bytes. + """ + encoded_function_unwinds: List[EncodedFunctionUnwind] = list( + encoded_function_unwinds_iterable) + complete_instruction_sequences: List[bytes] = [] + encoded_address_unwind_sequences: List[Tuple[EncodedAddressUnwind, ...]] = [] + + for encoded_function_unwind in encoded_function_unwinds: + encoded_address_unwind_sequences.append( + encoded_function_unwind.address_unwinds) + for address_unwind in encoded_function_unwind.address_unwinds: + complete_instruction_sequences.append( + address_unwind.complete_instruction_sequence) + + unwind_instruction_table, unwind_instruction_table_offsets = ( + EncodeUnwindInstructionTable(complete_instruction_sequences)) + + function_offset_table, function_offset_table_offsets = ( + EncodeFunctionOffsetTable(encoded_address_unwind_sequences, + unwind_instruction_table_offsets)) + + page_table, function_table = EncodePageTableAndFunctionTable( + encoded_function_unwinds, function_offset_table_offsets) + + return (page_table, function_table, function_offset_table, + unwind_instruction_table) + + +def ReadTextSectionStartAddress(readobj_path: str, libchrome_path: str) -> int: + """Reads the .text section start address of libchrome ELF. + + Arguments: + readobj_path: Path to llvm-obj binary. + libchrome_path: Path to libchrome binary. + + Returns: + The text section start address as a number. + """ + def GetSectionName(section) -> str: + # See crbug.com/1426287 for context on different JSON names. + if 'Name' in section['Section']['Name']: + return section['Section']['Name']['Name'] + return section['Section']['Name']['Value'] + + proc = subprocess.Popen( + [readobj_path, '--sections', '--elf-output-style=JSON', libchrome_path], + stdout=subprocess.PIPE, + encoding='ascii') + + elfs = json.loads(proc.stdout.read())[0] + sections = elfs['Sections'] + + return next(s['Section']['Address'] for s in sections + if GetSectionName(s) == '.text') + + +def main(): + build_utils.InitLogging('CREATE_UNWIND_TABLE_DEBUG') + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--input_path', + help='Path to the unstripped binary.', + required=True, + metavar='FILE') + parser.add_argument('--output_path', + help='Path to unwind info binary output.', + required=True, + metavar='FILE') + parser.add_argument('--dump_syms_path', + required=True, + help='The path of the dump_syms binary.', + metavar='FILE') + parser.add_argument('--readobj_path', + required=True, + help='The path of the llvm-readobj binary.', + metavar='FILE') + + args = parser.parse_args() + proc = subprocess.Popen(['./' + args.dump_syms_path, args.input_path, '-v'], + stdout=subprocess.PIPE, + encoding='ascii') + + function_cfis = ReadFunctionCfi(proc.stdout) + function_unwinds = GenerateUnwinds(function_cfis, parsers=ALL_PARSERS) + encoded_function_unwinds = EncodeFunctionUnwinds( + function_unwinds, + ReadTextSectionStartAddress(args.readobj_path, args.input_path)) + (page_table, function_table, function_offset_table, + unwind_instruction_table) = GenerateUnwindTables(encoded_function_unwinds) + unwind_info: bytes = EncodeUnwindInfo(page_table, function_table, + function_offset_table, + unwind_instruction_table) + + if proc.wait(): + logging.critical('dump_syms exited with return code %d', proc.returncode) + sys.exit(proc.returncode) + + with open(args.output_path, 'wb') as f: + f.write(unwind_info) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/gyp/create_unwind_table_tests.py b/android/gyp/create_unwind_table_tests.py new file mode 100755 index 000000000000..14fbc227ff2c --- /dev/null +++ b/android/gyp/create_unwind_table_tests.py @@ -0,0 +1,1182 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests for create_unwind_table.py. + +This test suite contains tests for the custom unwind table creation for 32-bit +arm builds. +""" + +import io +import struct + +import unittest +import unittest.mock +import re + +from create_unwind_table import ( + AddressCfi, AddressUnwind, FilterToNonTombstoneCfi, FunctionCfi, + FunctionUnwind, EncodeAddressUnwind, EncodeAddressUnwinds, + EncodedAddressUnwind, EncodeAsBytes, EncodeFunctionOffsetTable, + EncodedFunctionUnwind, EncodeFunctionUnwinds, EncodeStackPointerUpdate, + EncodePop, EncodePageTableAndFunctionTable, EncodeUnwindInfo, + EncodeUnwindInstructionTable, GenerateUnwinds, GenerateUnwindTables, + NullParser, ParseAddressCfi, PushOrSubSpParser, ReadFunctionCfi, + REFUSE_TO_UNWIND, StoreSpParser, TRIVIAL_UNWIND, Uleb128Encode, + UnwindInstructionsParser, UnwindType, VPushParser) + + +class _TestReadFunctionCfi(unittest.TestCase): + def testFilterTombstone(self): + input_lines = [ + 'file name', + 'STACK CFI INIT 0 ', + 'STACK CFI 100 ', + 'STACK CFI INIT 1 ', + 'STACK CFI 200 ', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual([ + 'STACK CFI INIT 1 \n', + 'STACK CFI 200 \n', + ], list(FilterToNonTombstoneCfi(f))) + + def testReadFunctionCfiTombstoneFiltered(self): + input_lines = [ + 'STACK CFI INIT 0 50 .cfa: sp 0 + .ra: lr', # Tombstone function. + 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^', + 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual( + [FunctionCfi(4, (AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'), ))], + list(ReadFunctionCfi(f))) + + def testReadFunctionCfiSingleFunction(self): + input_lines = [ + 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr', + 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual([ + FunctionCfi(4, ( + AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'), + AddressCfi( + 0x2, '.cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^'), + )) + ], list(ReadFunctionCfi(f))) + + def testReadFunctionCfiMultipleFunctions(self): + input_lines = [ + 'STACK CFI INIT 15b6490 4 .cfa: sp 0 + .ra: lr', + 'STACK CFI 2 .cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^', + 'STACK CFI INIT 15b655a 26 .cfa: sp 0 + .ra: lr', + 'STACK CFI 15b655c .cfa: sp 8 + .ra: .cfa - 4 + ^ r4: .cfa - 8 + ^', + ] + + f = io.StringIO(''.join(line + '\n' for line in input_lines)) + + self.assertEqual([ + FunctionCfi(0x4, ( + AddressCfi(0x15b6490, '.cfa: sp 0 + .ra: lr'), + AddressCfi( + 0x2, '.cfa: sp 24 + .ra: .cfa - 4 + ^ r4: .cfa - 16 + ^ ' + 'r5: .cfa - 12 + ^ r7: .cfa - 8 + ^'), + )), + FunctionCfi(0x26, ( + AddressCfi(0x15b655a, '.cfa: sp 0 + .ra: lr'), + AddressCfi(0x15b655c, + '.cfa: sp 8 + .ra: .cfa - 4 + ^ r4: .cfa - 8 + ^'), + )), + ], list(ReadFunctionCfi(f))) + + +class _TestEncodeAsBytes(unittest.TestCase): + def testOutOfBounds(self): + self.assertRaises(ValueError, lambda: EncodeAsBytes(1024)) + self.assertRaises(ValueError, lambda: EncodeAsBytes(256)) + self.assertRaises(ValueError, lambda: EncodeAsBytes(-1)) + + def testEncode(self): + self.assertEqual(bytes([0]), EncodeAsBytes(0)) + self.assertEqual(bytes([255]), EncodeAsBytes(255)) + self.assertEqual(bytes([0, 1]), EncodeAsBytes(0, 1)) + + +class _TestUleb128Encode(unittest.TestCase): + def testNegativeValue(self): + self.assertRaises(ValueError, lambda: Uleb128Encode(-1)) + + def testSingleByte(self): + self.assertEqual(bytes([0]), Uleb128Encode(0)) + self.assertEqual(bytes([1]), Uleb128Encode(1)) + self.assertEqual(bytes([127]), Uleb128Encode(127)) + + def testMultiBytes(self): + self.assertEqual(bytes([0b10000000, 0b1]), Uleb128Encode(128)) + self.assertEqual(bytes([0b10000000, 0b10000000, 0b1]), + Uleb128Encode(128**2)) + + +class _TestEncodeStackPointerUpdate(unittest.TestCase): + def testSingleByte(self): + self.assertEqual(bytes([0b00000000 | 0]), EncodeStackPointerUpdate(4)) + self.assertEqual(bytes([0b01000000 | 0]), EncodeStackPointerUpdate(-4)) + + self.assertEqual(bytes([0b00000000 | 0b00111111]), + EncodeStackPointerUpdate(0x100)) + self.assertEqual(bytes([0b01000000 | 0b00111111]), + EncodeStackPointerUpdate(-0x100)) + + self.assertEqual(bytes([0b00000000 | 3]), EncodeStackPointerUpdate(16)) + self.assertEqual(bytes([0b01000000 | 3]), EncodeStackPointerUpdate(-16)) + + self.assertEqual(bytes([0b00111111]), EncodeStackPointerUpdate(0x100)) + + # 10110010 uleb128 + # vsp = vsp + 0x204 + (uleb128 << 2) + self.assertEqual(bytes([0b10110010, 0b00000000]), + EncodeStackPointerUpdate(0x204)) + self.assertEqual(bytes([0b10110010, 0b00000001]), + EncodeStackPointerUpdate(0x208)) + + # For vsp increments of 0x104-0x200, use 00xxxxxx twice. + self.assertEqual(bytes([0b00111111, 0b00000000]), + EncodeStackPointerUpdate(0x104)) + self.assertEqual(bytes([0b00111111, 0b00111111]), + EncodeStackPointerUpdate(0x200)) + self.assertEqual(bytes([0b01111111, 0b01111111]), + EncodeStackPointerUpdate(-0x200)) + + # Not multiple of 4. + self.assertRaises(AssertionError, lambda: EncodeStackPointerUpdate(101)) + # offset=0 is meaningless. + self.assertRaises(AssertionError, lambda: EncodeStackPointerUpdate(0)) + + +class _TestEncodePop(unittest.TestCase): + def testSingleRegister(self): + # Should reject registers outside r4 ~ r15 range. + for r in 0, 1, 2, 3, 16: + self.assertRaises(AssertionError, lambda: EncodePop([r])) + # Should use + # 1000iiii iiiiiiii + # Pop up to 12 integer registers under masks {r15-r12}, {r11-r4}. + self.assertEqual(bytes([0b10000000, 0b00000001]), EncodePop([4])) + self.assertEqual(bytes([0b10000000, 0b00001000]), EncodePop([7])) + self.assertEqual(bytes([0b10000100, 0b00000000]), EncodePop([14])) + self.assertEqual(bytes([0b10001000, 0b00000000]), EncodePop([15])) + + def testContinuousRegisters(self): + # 10101nnn + # Pop r4-r[4+nnn], r14. + self.assertEqual(bytes([0b10101000]), EncodePop([4, 14])) + self.assertEqual(bytes([0b10101001]), EncodePop([4, 5, 14])) + self.assertEqual(bytes([0b10101111]), + EncodePop([4, 5, 6, 7, 8, 9, 10, 11, 14])) + + def testDiscontinuousRegisters(self): + # 1000iiii iiiiiiii + # Pop up to 12 integer registers under masks {r15-r12}, {r11-r4}. + self.assertEqual(bytes([0b10001000, 0b00000001]), EncodePop([4, 15])) + self.assertEqual(bytes([0b10000100, 0b00011000]), EncodePop([7, 8, 14])) + self.assertEqual(bytes([0b10000111, 0b11111111]), + EncodePop([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14])) + self.assertEqual(bytes([0b10000100, 0b10111111]), + EncodePop([4, 5, 6, 7, 8, 9, 11, 14])) + + +class _TestEncodeAddressUnwind(unittest.TestCase): + def testReturnToLr(self): + self.assertEqual( + bytes([0b10110000]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=tuple()))) + + def testNoAction(self): + self.assertEqual( + bytes([]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.NO_ACTION, + sp_offset=0, + registers=tuple()))) + + def testUpdateSpAndOrPopRegisters(self): + self.assertEqual( + bytes([0b0, 0b10101000]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0x4, + registers=(4, 14)))) + + self.assertEqual( + bytes([0b0]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0x4, + registers=tuple()))) + + self.assertEqual( + bytes([0b10101000]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 14)))) + + def testRestoreSpFromRegisters(self): + self.assertEqual( + bytes([0b10010100, 0b0]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=0x4, + registers=(4, )))) + + self.assertEqual( + bytes([0b10010100]), + EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=0, + registers=(4, )))) + + self.assertRaises( + AssertionError, lambda: EncodeAddressUnwind( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=0x4, + registers=tuple()))) + + +class _TestEncodeAddressUnwinds(unittest.TestCase): + def testEncodeOrder(self): + address_unwind1 = AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=tuple()) + address_unwind2 = AddressUnwind( + address_offset=4, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 14)) + + def MockEncodeAddressUnwind(address_unwind): + return { + address_unwind1: bytes([1]), + address_unwind2: bytes([2]), + }[address_unwind] + + with unittest.mock.patch("create_unwind_table.EncodeAddressUnwind", + side_effect=MockEncodeAddressUnwind): + encoded_unwinds = EncodeAddressUnwinds((address_unwind1, address_unwind2)) + self.assertEqual(( + EncodedAddressUnwind(4, + bytes([2]) + bytes([1])), + EncodedAddressUnwind(0, bytes([1])), + ), encoded_unwinds) + + +PAGE_SIZE = 1 << 17 + + +class _TestEncodeFunctionUnwinds(unittest.TestCase): + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testEncodeOrder(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual([ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=100, + size=PAGE_SIZE - 100, + address_unwinds=()), + FunctionUnwind( + address=0, size=100, address_unwinds=()), + ], + text_section_start_address=0))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testFillingGaps(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual([ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + EncodedFunctionUnwind( + page_number=0, page_offset=50 >> 1, address_unwinds=TRIVIAL_UNWIND), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind(0, b'\x00')), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind( + address=0, size=50, address_unwinds=()), + FunctionUnwind(address=100, + size=PAGE_SIZE - 100, + address_unwinds=()), + ], + text_section_start_address=0))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testFillingLastPage(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual( + [ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=200 >> 1, + address_unwinds=REFUSE_TO_UNWIND), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=1100, size=100, address_unwinds=()), + FunctionUnwind(address=1200, size=100, address_unwinds=()), + ], + text_section_start_address=1100))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testFillingFirstPage(self, MockEncodeAddressUnwinds): + MockEncodeAddressUnwinds.return_value = EncodedAddressUnwind(0, b'\x00') + + self.assertEqual( + [ + EncodedFunctionUnwind( + page_number=0, page_offset=0, address_unwinds=REFUSE_TO_UNWIND), + EncodedFunctionUnwind(page_number=0, + page_offset=100 >> 1, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=200 >> 1, + address_unwinds=EncodedAddressUnwind( + 0, b'\x00')), + EncodedFunctionUnwind(page_number=0, + page_offset=300 >> 1, + address_unwinds=REFUSE_TO_UNWIND), + ], + list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=1100, size=100, address_unwinds=()), + FunctionUnwind(address=1200, size=100, address_unwinds=()), + ], + text_section_start_address=1000))) + + @unittest.mock.patch('create_unwind_table.EncodeAddressUnwinds') + def testOverlappedFunctions(self, _): + self.assertRaises( + # Eval generator with `list`. Otherwise the code will not execute. + AssertionError, + lambda: list( + EncodeFunctionUnwinds([ + FunctionUnwind(address=0, size=100, address_unwinds=()), + FunctionUnwind(address=50, size=100, address_unwinds=()), + ], + text_section_start_address=0))) + + +class _TestNullParser(unittest.TestCase): + def testCfaChange(self): + parser = NullParser() + match = parser.GetBreakpadInstructionsRegex().search('.cfa: sp 0 + .ra: lr') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=0, + cfa_sp_offset=0, + match=match) + + self.assertEqual(0, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=()), address_unwind) + + +class _TestPushOrSubSpParser(unittest.TestCase): + def testCfaChange(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search('.cfa: sp 4 +') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(4, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=()), address_unwind) + + def testCfaAndRaChangePopOnly(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 4 + .ra: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(4, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(14, )), address_unwind) + + def testCfaAndRaChangePopAndSpUpdate(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 8 + .ra: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(8, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=(14, )), address_unwind) + + def testCfaAndRaAndRegistersChangePopOnly(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 12 + .ra: .cfa -4 + ^ r4: .cfa -12 + ^ r7: .cfa -8 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(12, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 7, 14)), address_unwind) + + def testCfaAndRaAndRegistersChangePopAndSpUpdate(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -12 + ^ r7: .cfa -8 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(16, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=(4, 7, 14)), address_unwind) + + def testRegistersChange(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + 'r4: .cfa -8 + ^ r7: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(0, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 7)), address_unwind) + + def testCfaAndRegistersChange(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 8 + r4: .cfa -8 + ^ r7: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(8, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(4, 7)), address_unwind) + + def testRegistersOrdering(self): + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + 'r10: .cfa -8 + ^ r7: .cfa -4 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(0, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=0, + registers=(7, 10)), address_unwind) + + def testPoppingCallerSaveRegisters(self): + """Regression test for pop unwinds that encode caller-save registers. + + Callee-save registers: r0 ~ r3. + """ + parser = PushOrSubSpParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 16 + .ra: .cfa -4 + ^ ' + 'r3: .cfa -16 + ^ r4: .cfa -12 + ^ r5: .cfa -8 + ^') + + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=0, + match=match) + + self.assertEqual(16, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=4, + registers=(4, 5, 14)), address_unwind) + + +class _TestVPushParser(unittest.TestCase): + def testCfaAndRegistersChange(self): + parser = VPushParser() + match = parser.GetBreakpadInstructionsRegex().search( + '.cfa: sp 40 + unnamed_register264: .cfa -40 + ^ ' + 'unnamed_register265: .cfa -32 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=24, + match=match) + + self.assertEqual(40, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, + sp_offset=16, + registers=()), address_unwind) + + def testRegistersChange(self): + parser = VPushParser() + match = parser.GetBreakpadInstructionsRegex().search( + 'unnamed_register264: .cfa -40 + ^ unnamed_register265: .cfa -32 + ^') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=24, + match=match) + + self.assertEqual(24, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.NO_ACTION, + sp_offset=0, + registers=()), address_unwind) + + +class _TestStoreSpParser(unittest.TestCase): + def testCfaAndRegistersChange(self): + parser = StoreSpParser() + match = parser.GetBreakpadInstructionsRegex().search('.cfa: r7 8 +') + self.assertIsNotNone(match) + + address_unwind, new_cfa_sp_offset = parser.ParseFromMatch(address_offset=20, + cfa_sp_offset=12, + match=match) + + self.assertEqual(8, new_cfa_sp_offset) + self.assertEqual( + AddressUnwind(address_offset=20, + unwind_type=UnwindType.RESTORE_SP_FROM_REGISTER, + sp_offset=-4, + registers=(7, )), address_unwind) + + +class _TestEncodeUnwindInstructionTable(unittest.TestCase): + def testSingleEntry(self): + table, offsets = EncodeUnwindInstructionTable([bytes([3])]) + + self.assertEqual(bytes([3]), table) + self.assertDictEqual({ + bytes([3]): 0, + }, offsets) + + def testMultipleEntries(self): + self.maxDiff = None + # Result should be sorted by score descending. + table, offsets = EncodeUnwindInstructionTable([ + bytes([1, 2, 3]), + bytes([0, 3]), + bytes([3]), + ]) + self.assertEqual(bytes([3, 0, 3, 1, 2, 3]), table) + self.assertDictEqual( + { + bytes([1, 2, 3]): 3, # score = 1 / 3 = 0.67 + bytes([0, 3]): 1, # score = 1 / 2 = 0.5 + bytes([3]): 0, # score = 1 / 1 = 1 + }, + offsets) + + # When scores are same, sort by sequence descending. + table, offsets = EncodeUnwindInstructionTable([ + bytes([3]), + bytes([0, 3]), + bytes([0, 3]), + bytes([1, 2, 3]), + bytes([1, 2, 3]), + bytes([1, 2, 3]), + ]) + self.assertEqual(bytes([3, 1, 2, 3, 0, 3]), table) + self.assertDictEqual( + { + bytes([3]): 0, # score = 1 / 1 = 1 + bytes([1, 2, 3]): 1, # score = 3 / 3 = 1 + bytes([0, 3]): 4, # score = 2 / 2 = 1 + }, + offsets) + + +class _TestFunctionOffsetTable(unittest.TestCase): + def testSingleEntry(self): + self.maxDiff = None + complete_instruction_sequence0 = bytes([3]) + complete_instruction_sequence1 = bytes([1, 3]) + + sequence1 = ( + EncodedAddressUnwind(0x400, complete_instruction_sequence1), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + + address_unwind_sequences = [sequence1] + + table, offsets = EncodeFunctionOffsetTable( + address_unwind_sequences, { + complete_instruction_sequence0: 52, + complete_instruction_sequence1: 50, + }) + + self.assertEqual( + bytes([ + # (0x200, 50) + 128, + 4, + 50, + # (0, 52) + 0, + 52, + ]), + table) + + self.assertDictEqual({ + sequence1: 0, + }, offsets) + + def testMultipleEntry(self): + self.maxDiff = None + complete_instruction_sequence0 = bytes([3]) + complete_instruction_sequence1 = bytes([1, 3]) + complete_instruction_sequence2 = bytes([2, 3]) + + sequence1 = ( + EncodedAddressUnwind(0x20, complete_instruction_sequence1), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + sequence2 = ( + EncodedAddressUnwind(0x400, complete_instruction_sequence2), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + address_unwind_sequences = [sequence1, sequence2] + + table, offsets = EncodeFunctionOffsetTable( + address_unwind_sequences, { + complete_instruction_sequence0: 52, + complete_instruction_sequence1: 50, + complete_instruction_sequence2: 80, + }) + + self.assertEqual( + bytes([ + # (0x10, 50) + 0x10, + 50, + # (0, 52) + 0, + 52, + # (0x200, 80) + 128, + 4, + 80, + # (0, 52) + 0, + 52, + ]), + table) + + self.assertDictEqual({ + sequence1: 0, + sequence2: 4, + }, offsets) + + def testDuplicatedEntry(self): + self.maxDiff = None + complete_instruction_sequence0 = bytes([3]) + complete_instruction_sequence1 = bytes([1, 3]) + complete_instruction_sequence2 = bytes([2, 3]) + + sequence1 = ( + EncodedAddressUnwind(0x20, complete_instruction_sequence1), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + sequence2 = ( + EncodedAddressUnwind(0x400, complete_instruction_sequence2), + EncodedAddressUnwind(0x0, complete_instruction_sequence0), + ) + sequence3 = sequence1 + + address_unwind_sequences = [sequence1, sequence2, sequence3] + + table, offsets = EncodeFunctionOffsetTable( + address_unwind_sequences, { + complete_instruction_sequence0: 52, + complete_instruction_sequence1: 50, + complete_instruction_sequence2: 80, + }) + + self.assertEqual( + bytes([ + # (0x10, 50) + 0x10, + 50, + # (0, 52) + 0, + 52, + # (0x200, 80) + 128, + 4, + 80, + # (0, 52) + 0, + 52, + ]), + table) + + self.assertDictEqual({ + sequence1: 0, + sequence2: 4, + }, offsets) + + +class _TestEncodePageTableAndFunctionTable(unittest.TestCase): + def testMultipleFunctionUnwinds(self): + address_unwind_sequence0 = ( + EncodedAddressUnwind(0x10, bytes([0, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence1 = ( + EncodedAddressUnwind(0x10, bytes([1, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence2 = ( + EncodedAddressUnwind(0x200, bytes([2, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + + function_unwinds = [ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=address_unwind_sequence0), + EncodedFunctionUnwind(page_number=0, + page_offset=0x8000, + address_unwinds=address_unwind_sequence1), + EncodedFunctionUnwind(page_number=1, + page_offset=0x8000, + address_unwinds=address_unwind_sequence2), + ] + + function_offset_table_offsets = { + address_unwind_sequence0: 0x100, + address_unwind_sequence1: 0x200, + address_unwind_sequence2: 0x300, + } + + page_table, function_table = EncodePageTableAndFunctionTable( + function_unwinds, function_offset_table_offsets) + + self.assertEqual(2 * 4, len(page_table)) + self.assertEqual((0, 2), struct.unpack('2I', page_table)) + + self.assertEqual(6 * 2, len(function_table)) + self.assertEqual((0, 0x100, 0x8000, 0x200, 0x8000, 0x300), + struct.unpack('6H', function_table)) + + def testMultiPageFunction(self): + address_unwind_sequence0 = ( + EncodedAddressUnwind(0x10, bytes([0, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence1 = ( + EncodedAddressUnwind(0x10, bytes([1, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + address_unwind_sequence2 = ( + EncodedAddressUnwind(0x200, bytes([2, 3])), + EncodedAddressUnwind(0x0, bytes([3])), + ) + + function_unwinds = [ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=address_unwind_sequence0), + # Large function. + EncodedFunctionUnwind(page_number=0, + page_offset=0x8000, + address_unwinds=address_unwind_sequence1), + EncodedFunctionUnwind(page_number=4, + page_offset=0x8000, + address_unwinds=address_unwind_sequence2), + ] + + function_offset_table_offsets = { + address_unwind_sequence0: 0x100, + address_unwind_sequence1: 0x200, + address_unwind_sequence2: 0x300, + } + + page_table, function_table = EncodePageTableAndFunctionTable( + function_unwinds, function_offset_table_offsets) + + self.assertEqual(5 * 4, len(page_table)) + self.assertEqual((0, 2, 2, 2, 2), struct.unpack('5I', page_table)) + + self.assertEqual(6 * 2, len(function_table)) + self.assertEqual((0, 0x100, 0x8000, 0x200, 0x8000, 0x300), + struct.unpack('6H', function_table)) + + +class MockReturnParser(UnwindInstructionsParser): + def GetBreakpadInstructionsRegex(self): + return re.compile(r'^RETURN$') + + def ParseFromMatch(self, address_offset, cfa_sp_offset, match): + return AddressUnwind(address_offset, UnwindType.RETURN_TO_LR, 0, ()), 0 + + +class MockEpilogueUnwindParser(UnwindInstructionsParser): + def GetBreakpadInstructionsRegex(self): + return re.compile(r'^EPILOGUE_UNWIND$') + + def ParseFromMatch(self, address_offset, cfa_sp_offset, match): + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, 0, ()), -100 + + +class MockWildcardParser(UnwindInstructionsParser): + def GetBreakpadInstructionsRegex(self): + return re.compile(r'.*') + + def ParseFromMatch(self, address_offset, cfa_sp_offset, match): + return AddressUnwind(address_offset, + UnwindType.UPDATE_SP_AND_OR_POP_REGISTERS, 0, ()), -200 + + +class _TestParseAddressCfi(unittest.TestCase): + def testSuccessParse(self): + address_unwind = AddressUnwind( + address_offset=0x300, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ) + + self.assertEqual((address_unwind, False, 0), + ParseAddressCfi(AddressCfi(address=0x800, + unwind_instructions='RETURN'), + function_start_address=0x500, + parsers=(MockReturnParser(), ), + prev_cfa_sp_offset=0)) + + def testUnhandledAddress(self): + self.assertEqual((None, False, 100), + ParseAddressCfi(AddressCfi(address=0x800, + unwind_instructions='UNKNOWN'), + function_start_address=0x500, + parsers=(MockReturnParser(), ), + prev_cfa_sp_offset=100)) + + def testEpilogueUnwind(self): + self.assertEqual( + (None, True, -100), + ParseAddressCfi(AddressCfi(address=0x800, + unwind_instructions='EPILOGUE_UNWIND'), + function_start_address=0x500, + parsers=(MockEpilogueUnwindParser(), ), + prev_cfa_sp_offset=100)) + + def testParsePrecedence(self): + address_unwind = AddressUnwind( + address_offset=0x300, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ) + + self.assertEqual( + (address_unwind, False, 0), + ParseAddressCfi(AddressCfi(address=0x800, unwind_instructions='RETURN'), + function_start_address=0x500, + parsers=(MockReturnParser(), MockWildcardParser()), + prev_cfa_sp_offset=0)) + + +class _TestGenerateUnwinds(unittest.TestCase): + def testSuccessUnwind(self): + self.assertEqual( + [ + FunctionUnwind(address=0x100, + size=1024, + address_unwinds=( + AddressUnwind( + address_offset=0x0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), + AddressUnwind( + address_offset=0x200, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), + )) + ], + list( + GenerateUnwinds([ + FunctionCfi( + size=1024, + address_cfi=( + AddressCfi(address=0x100, unwind_instructions='RETURN'), + AddressCfi(address=0x300, unwind_instructions='RETURN'), + )) + ], + parsers=[MockReturnParser()]))) + + def testUnhandledAddress(self): + self.assertEqual( + [ + FunctionUnwind(address=0x100, + size=1024, + address_unwinds=(AddressUnwind( + address_offset=0x0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), )) + ], + list( + GenerateUnwinds([ + FunctionCfi(size=1024, + address_cfi=( + AddressCfi(address=0x100, + unwind_instructions='RETURN'), + AddressCfi(address=0x300, + unwind_instructions='UNKNOWN'), + )) + ], + parsers=[MockReturnParser()]))) + + def testEpilogueUnwind(self): + self.assertEqual( + [ + FunctionUnwind(address=0x100, + size=1024, + address_unwinds=(AddressUnwind( + address_offset=0x0, + unwind_type=UnwindType.RETURN_TO_LR, + sp_offset=0, + registers=(), + ), )) + ], + list( + GenerateUnwinds([ + FunctionCfi( + size=1024, + address_cfi=( + AddressCfi(address=0x100, unwind_instructions='RETURN'), + AddressCfi(address=0x300, + unwind_instructions='EPILOGUE_UNWIND'), + )) + ], + parsers=[ + MockReturnParser(), + MockEpilogueUnwindParser() + ]))) + + def testInvalidInitialUnwindInstructionAsserts(self): + self.assertRaises( + AssertionError, lambda: list( + GenerateUnwinds([ + FunctionCfi(size=1024, + address_cfi=( + AddressCfi(address=0x100, + unwind_instructions='UNKNOWN'), + AddressCfi(address=0x200, + unwind_instructions='RETURN'), + )) + ], + parsers=[MockReturnParser()]))) + + +class _TestEncodeUnwindInfo(unittest.TestCase): + def testEncodeTables(self): + page_table = struct.pack('I', 0) + function_table = struct.pack('4H', 1, 2, 3, 4) + function_offset_table = bytes([1, 2]) + unwind_instruction_table = bytes([1, 2, 3]) + + unwind_info = EncodeUnwindInfo( + page_table, + function_table, + function_offset_table, + unwind_instruction_table, + ) + + self.assertEqual( + 32 + len(page_table) + len(function_table) + + len(function_offset_table) + len(unwind_instruction_table), + len(unwind_info)) + # Header. + self.assertEqual((32, 1, 36, 2, 44, 2, 46, 3), + struct.unpack('8I', unwind_info[:32])) + # Body. + self.assertEqual( + page_table + function_table + function_offset_table + + unwind_instruction_table, unwind_info[32:]) + + def testUnalignedTables(self): + self.assertRaises( + AssertionError, lambda: EncodeUnwindInfo(bytes([1]), b'', b'', b'')) + self.assertRaises( + AssertionError, lambda: EncodeUnwindInfo(b'', bytes([1]), b'', b'')) + + +class _TestGenerateUnwindTables(unittest.TestCase): + def testGenerateUnwindTables(self): + """This is an integration test that hooks everything together. """ + address_unwind_sequence0 = ( + EncodedAddressUnwind(0x20, bytes([0, 0xb0])), + EncodedAddressUnwind(0x0, bytes([0xb0])), + ) + address_unwind_sequence1 = ( + EncodedAddressUnwind(0x20, bytes([1, 0xb0])), + EncodedAddressUnwind(0x0, bytes([0xb0])), + ) + address_unwind_sequence2 = ( + EncodedAddressUnwind(0x200, bytes([2, 0xb0])), + EncodedAddressUnwind(0x0, bytes([0xb0])), + ) + + (page_table, function_table, function_offset_table, + unwind_instruction_table) = GenerateUnwindTables([ + EncodedFunctionUnwind(page_number=0, + page_offset=0, + address_unwinds=TRIVIAL_UNWIND), + EncodedFunctionUnwind(page_number=0, + page_offset=0x1000, + address_unwinds=address_unwind_sequence0), + EncodedFunctionUnwind(page_number=1, + page_offset=0x2000, + address_unwinds=address_unwind_sequence1), + EncodedFunctionUnwind(page_number=3, + page_offset=0x1000, + address_unwinds=address_unwind_sequence2), + ]) + + # Complete instruction sequences and their frequencies. + # [0xb0]: 4 + # [0, 0xb0]: 1 + # [1, 0xb0]: 1 + # [2, 0xb0]: 1 + self.assertEqual(bytes([0xb0, 2, 0xb0, 1, 0xb0, 0, 0xb0]), + unwind_instruction_table) + + self.assertEqual( + bytes([ + # Trivial unwind. + 0, + 0, + # Address unwind sequence 0. + 0x10, + 5, + 0, + 0, + # Address unwind sequence 1. + 0x10, + 3, + 0, + 0, + # Address unwind sequence 2. + 0x80, + 2, + 1, + 0, + 0, + ]), + function_offset_table) + + self.assertEqual(8 * 2, len(function_table)) + self.assertEqual((0, 0, 0x1000, 2, 0x2000, 6, 0x1000, 10), + struct.unpack('8H', function_table)) + + self.assertEqual(4 * 4, len(page_table)) + self.assertEqual((0, 2, 3, 3), struct.unpack('4I', page_table)) diff --git a/android/gyp/dex.py b/android/gyp/dex.py new file mode 100755 index 000000000000..a7f024a277d5 --- /dev/null +++ b/android/gyp/dex.py @@ -0,0 +1,538 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import collections +import logging +import os +import re +import shutil +import shlex +import sys +import tempfile +import zipfile + +from util import build_utils +from util import md5_check +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +_DEX_XMX = '2G' # Increase this when __final_dex OOMs. + +_IGNORE_WARNINGS = ( + # Warning: Running R8 version main (build engineering), which cannot be + # represented as a semantic version. Using an artificial version newer than + # any known version for selecting Proguard configurations embedded under + # META-INF/. This means that all rules with a '-upto-' qualifier will be + # excluded and all rules with a -from- qualifier will be included. + r'Running R8 version main', + # E.g. Triggers for weblayer_instrumentation_test_apk since both it and its + # apk_under_test have no shared_libraries. + # https://crbug.com/1364192 << To fix this in a better way. + r'Missing class org.chromium.build.NativeLibraries', + # Caused by internal protobuf package: https://crbug.com/1183971 + r'referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension', # pylint: disable=line-too-long + # Desugaring configs may occasionally not match types in our program. This + # may happen temporarily until we move over to the new desugared library + # json flags. See crbug.com/1302088 - this should be removed when this bug + # is fixed. + r'Warning: Specification conversion: The following', + # Caused by protobuf runtime using -identifiernamestring in a way that + # doesn't work with R8. Looks like: + # Rule matches the static final field `...`, which may have been inlined... + # com.google.protobuf.*GeneratedExtensionRegistryLite { + # static java.lang.String CONTAINING_TYPE_*; + # } + r'GeneratedExtensionRegistryLite.CONTAINING_TYPE_', + # Relevant for R8 when optimizing an app that doesn't use protobuf. + r'Ignoring -shrinkunusedprotofields since the protobuf-lite runtime is', + # Ignore Unused Rule Warnings in third_party libraries. + r'/third_party/.*Proguard configuration rule does not match anything', + # Ignore Unused Rule Warnings for system classes (aapt2 generates these). + r'Proguard configuration rule does not match anything:.*class android\.', + # TODO(crbug.com/1303951): Don't ignore all such warnings. + r'Proguard configuration rule does not match anything:', + # TODO(agrieve): Remove once we update to U SDK. + r'OnBackAnimationCallback', +) + +_SKIPPED_CLASS_FILE_NAMES = ( + 'module-info.class', # Explicitly skipped by r8/utils/FileUtils#isClassFile +) + + +def _ParseArgs(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + + action_helpers.add_depfile_arg(parser) + parser.add_argument('--output', required=True, help='Dex output path.') + parser.add_argument( + '--class-inputs', + action='append', + help='GN-list of .jars with .class files.') + parser.add_argument( + '--class-inputs-filearg', + action='append', + help='GN-list of .jars with .class files (added to depfile).') + parser.add_argument( + '--dex-inputs', action='append', help='GN-list of .jars with .dex files.') + parser.add_argument( + '--dex-inputs-filearg', + action='append', + help='GN-list of .jars with .dex files (added to depfile).') + parser.add_argument( + '--incremental-dir', + help='Path of directory to put intermediate dex files.') + parser.add_argument('--main-dex-rules-path', + action='append', + help='Path to main dex rules for multidex.') + parser.add_argument( + '--multi-dex', + action='store_true', + help='Allow multiple dex files within output.') + parser.add_argument('--library', + action='store_true', + help='Allow numerous dex files within output.') + parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.') + parser.add_argument('--skip-custom-d8', + action='store_true', + help='When rebuilding the CustomD8 jar, this may be ' + 'necessary to avoid incompatibility with the new r8 ' + 'jar.') + parser.add_argument('--custom-d8-jar-path', + required=True, + help='Path to our customized d8 jar.') + parser.add_argument('--desugar-dependencies', + help='Path to store desugar dependencies.') + parser.add_argument('--desugar', action='store_true') + parser.add_argument( + '--bootclasspath', + action='append', + help='GN-list of bootclasspath. Needed for --desugar') + parser.add_argument('--show-desugar-default-interface-warnings', + action='store_true', + help='Enable desugaring warnings.') + parser.add_argument( + '--classpath', + action='append', + help='GN-list of full classpath. Needed for --desugar') + parser.add_argument( + '--release', + action='store_true', + help='Run D8 in release mode. Release mode maximises main dex and ' + 'deletes non-essential line number information (vs debug which minimizes ' + 'main dex and keeps all line number information, and then some.') + parser.add_argument( + '--min-api', help='Minimum Android API level compatibility.') + parser.add_argument('--force-enable-assertions', + action='store_true', + help='Forcefully enable javac generated assertion code.') + parser.add_argument('--assertion-handler', + help='The class name of the assertion handler class.') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + parser.add_argument('--dump-inputs', + action='store_true', + help='Use when filing D8 bugs to capture inputs.' + ' Stores inputs to d8inputs.zip') + options = parser.parse_args(args) + + if options.main_dex_rules_path and not options.multi_dex: + parser.error('--main-dex-rules-path is unused if multidex is not enabled') + + if options.force_enable_assertions and options.assertion_handler: + parser.error('Cannot use both --force-enable-assertions and ' + '--assertion-handler') + + options.class_inputs = action_helpers.parse_gn_list(options.class_inputs) + options.class_inputs_filearg = action_helpers.parse_gn_list( + options.class_inputs_filearg) + options.bootclasspath = action_helpers.parse_gn_list(options.bootclasspath) + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.dex_inputs = action_helpers.parse_gn_list(options.dex_inputs) + options.dex_inputs_filearg = action_helpers.parse_gn_list( + options.dex_inputs_filearg) + + return options + + +def CreateStderrFilter(show_desugar_default_interface_warnings): + def filter_stderr(output): + # Set this when debugging R8 output. + if os.environ.get('R8_SHOW_ALL_OUTPUT', '0') != '0': + return output + + warnings = re.split(r'^(?=Warning|Error)', output, flags=re.MULTILINE) + preamble, *warnings = warnings + + patterns = list(_IGNORE_WARNINGS) + + # Missing deps can happen for prebuilts that are missing transitive deps + # and have set enable_bytecode_checks=false. + if not show_desugar_default_interface_warnings: + patterns += ['default or static interface methods'] + + combined_pattern = '|'.join(re.escape(p) for p in patterns) + preamble = build_utils.FilterLines(preamble, combined_pattern) + + compiled_re = re.compile(combined_pattern, re.DOTALL) + warnings = [w for w in warnings if not compiled_re.search(w)] + + return preamble + ''.join(warnings) + + return filter_stderr + + +def _RunD8(dex_cmd, input_paths, output_path, warnings_as_errors, + show_desugar_default_interface_warnings): + dex_cmd = dex_cmd + ['--output', output_path] + input_paths + + stderr_filter = CreateStderrFilter(show_desugar_default_interface_warnings) + + is_debug = logging.getLogger().isEnabledFor(logging.DEBUG) + + # Avoid deleting the flag file when DEX_DEBUG is set in case the flag file + # needs to be examined after the build. + with tempfile.NamedTemporaryFile(mode='w', delete=not is_debug) as flag_file: + # Chosen arbitrarily. Needed to avoid command-line length limits. + MAX_ARGS = 50 + orig_dex_cmd = dex_cmd + if len(dex_cmd) > MAX_ARGS: + # Add all flags to D8 (anything after the first --) as well as all + # positional args at the end to the flag file. + for idx, cmd in enumerate(dex_cmd): + if cmd.startswith('--'): + flag_file.write('\n'.join(dex_cmd[idx:])) + flag_file.flush() + dex_cmd = dex_cmd[:idx] + dex_cmd.append('@' + flag_file.name) + break + + # stdout sometimes spams with things like: + # Stripped invalid locals information from 1 method. + try: + build_utils.CheckOutput(dex_cmd, + stderr_filter=stderr_filter, + fail_on_output=warnings_as_errors) + except Exception: + if orig_dex_cmd is not dex_cmd: + sys.stderr.write('Full command: ' + shlex.join(orig_dex_cmd) + '\n') + raise + + +def _ZipAligned(dex_files, output_path): + """Creates a .dex.jar with 4-byte aligned files. + + Args: + dex_files: List of dex files. + output_path: The output file in which to write the zip. + """ + with zipfile.ZipFile(output_path, 'w') as z: + for i, dex_file in enumerate(dex_files): + name = 'classes{}.dex'.format(i + 1 if i > 0 else '') + zip_helpers.add_to_zip_hermetic(z, name, src_path=dex_file, alignment=4) + + +def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None): + tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip') + needs_dexing = not all(f.endswith('.dex') for f in d8_inputs) + needs_dexmerge = output.endswith('.dex') or not (options and options.library) + if needs_dexing or needs_dexmerge: + if options and options.main_dex_rules_path: + for main_dex_rule in options.main_dex_rules_path: + dex_cmd = dex_cmd + ['--main-dex-rules', main_dex_rule] + + tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir') + os.mkdir(tmp_dex_dir) + + _RunD8(dex_cmd, d8_inputs, tmp_dex_dir, + (not options or options.warnings_as_errors), + (options and options.show_desugar_default_interface_warnings)) + logging.debug('Performed dex merging') + + dex_files = [os.path.join(tmp_dex_dir, f) for f in os.listdir(tmp_dex_dir)] + + if output.endswith('.dex'): + if len(dex_files) > 1: + raise Exception('%d files created, expected 1' % len(dex_files)) + tmp_dex_output = dex_files[0] + else: + _ZipAligned(sorted(dex_files), tmp_dex_output) + else: + # Skip dexmerger. Just put all incrementals into the .jar individually. + _ZipAligned(sorted(d8_inputs), tmp_dex_output) + logging.debug('Quick-zipped %d files', len(d8_inputs)) + + # The dex file is complete and can be moved out of tmp_dir. + shutil.move(tmp_dex_output, output) + + +def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir): + """Returns a list of all intermediate dex file paths.""" + dex_files = [] + for jar in class_inputs: + with zipfile.ZipFile(jar, 'r') as z: + for subpath in z.namelist(): + if _IsClassFile(subpath): + subpath = subpath[:-5] + 'dex' + dex_files.append(os.path.join(incremental_dir, subpath)) + return dex_files + + +def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files): + """Deletes intermediate .dex files that are no longer needed.""" + all_files = build_utils.FindInDirectory(dex_dir) + desired_files = set(dex_files) + for path in all_files: + if path not in desired_files: + os.unlink(path) + + +def _ParseDesugarDeps(desugar_dependencies_file): + # pylint: disable=line-too-long + """Returns a dict of dependent/dependency mapping parsed from the file. + + Example file format: + $ tail out/Debug/gen/base/base_java__dex.desugardeps + org/chromium/base/task/SingleThreadTaskRunnerImpl.class + <- org/chromium/base/task/SingleThreadTaskRunner.class + <- org/chromium/base/task/TaskRunnerImpl.class + org/chromium/base/task/TaskRunnerImpl.class + <- org/chromium/base/task/TaskRunner.class + org/chromium/base/task/TaskRunnerImplJni$1.class + <- obj/base/jni_java.turbine.jar:org/chromium/base/JniStaticTestMocker.class + org/chromium/base/task/TaskRunnerImplJni.class + <- org/chromium/base/task/TaskRunnerImpl$Natives.class + """ + # pylint: enable=line-too-long + dependents_from_dependency = collections.defaultdict(set) + if desugar_dependencies_file and os.path.exists(desugar_dependencies_file): + with open(desugar_dependencies_file, 'r') as f: + dependent = None + for line in f: + line = line.rstrip() + if line.startswith(' <- '): + dependency = line[len(' <- '):] + # Note that this is a reversed mapping from the one in CustomD8.java. + dependents_from_dependency[dependency].add(dependent) + else: + dependent = line + return dependents_from_dependency + + +def _ComputeRequiredDesugarClasses(changes, desugar_dependencies_file, + class_inputs, classpath): + dependents_from_dependency = _ParseDesugarDeps(desugar_dependencies_file) + required_classes = set() + # Gather classes that need to be re-desugared from changes in the classpath. + for jar in classpath: + for subpath in changes.IterChangedSubpaths(jar): + dependency = '{}:{}'.format(jar, subpath) + required_classes.update(dependents_from_dependency[dependency]) + + for jar in class_inputs: + for subpath in changes.IterChangedSubpaths(jar): + required_classes.update(dependents_from_dependency[subpath]) + + return required_classes + + +def _IsClassFile(path): + if os.path.basename(path) in _SKIPPED_CLASS_FILE_NAMES: + return False + return path.endswith('.class') + + +def _ExtractClassFiles(changes, tmp_dir, class_inputs, required_classes_set): + classes_list = [] + for jar in class_inputs: + if changes: + changed_class_list = (set(changes.IterChangedSubpaths(jar)) + | required_classes_set) + predicate = lambda x: x in changed_class_list and _IsClassFile(x) + else: + predicate = _IsClassFile + + classes_list.extend( + build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate)) + return classes_list + + +def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd): + # Create temporary directory for classes to be extracted to. + tmp_extract_dir = os.path.join(tmp_dir, 'tmp_extract_dir') + os.mkdir(tmp_extract_dir) + + # Do a full rebuild when changes occur in non-input files. + allowed_changed = set(options.class_inputs) + allowed_changed.update(options.dex_inputs) + allowed_changed.update(options.classpath) + strings_changed = changes.HasStringChanges() + non_direct_input_changed = next( + (p for p in changes.IterChangedPaths() if p not in allowed_changed), None) + + if strings_changed or non_direct_input_changed: + logging.debug('Full dex required: strings_changed=%s path_changed=%s', + strings_changed, non_direct_input_changed) + changes = None + + if changes is None: + required_desugar_classes_set = set() + else: + required_desugar_classes_set = _ComputeRequiredDesugarClasses( + changes, options.desugar_dependencies, options.class_inputs, + options.classpath) + logging.debug('Class files needing re-desugar: %d', + len(required_desugar_classes_set)) + class_files = _ExtractClassFiles(changes, tmp_extract_dir, + options.class_inputs, + required_desugar_classes_set) + logging.debug('Extracted class files: %d', len(class_files)) + + # If the only change is deleting a file, class_files will be empty. + if class_files: + # Dex necessary classes into intermediate dex files. + dex_cmd = dex_cmd + ['--intermediate', '--file-per-class-file'] + if options.desugar_dependencies and not options.skip_custom_d8: + # Adding os.sep to remove the entire prefix. + dex_cmd += ['--file-tmp-prefix', tmp_extract_dir + os.sep] + if changes is None and os.path.exists(options.desugar_dependencies): + # Since incremental dexing only ever adds to the desugar_dependencies + # file, whenever full dexes are required the .desugardeps files need to + # be manually removed. + os.unlink(options.desugar_dependencies) + _RunD8(dex_cmd, class_files, options.incremental_dir, + options.warnings_as_errors, + options.show_desugar_default_interface_warnings) + logging.debug('Dexed class files.') + + +def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd): + logging.debug('_OnStaleMd5') + with build_utils.TempDir() as tmp_dir: + if options.incremental_dir: + # Create directory for all intermediate dex files. + if not os.path.exists(options.incremental_dir): + os.makedirs(options.incremental_dir) + + _DeleteStaleIncrementalDexFiles(options.incremental_dir, final_dex_inputs) + logging.debug('Stale files deleted') + _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd) + + _CreateFinalDex( + final_dex_inputs, options.output, tmp_dir, dex_cmd, options=options) + + +def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar, + min_api): + dex_cmd = build_utils.JavaCmd(xmx=_DEX_XMX) + [ + '-cp', + r8_jar_path, + 'com.android.tools.r8.D8', + '--min-api', + min_api, + ] + with build_utils.TempDir() as tmp_dir: + _CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd) + + +def main(args): + build_utils.InitLogging('DEX_DEBUG') + options = _ParseArgs(args) + + options.class_inputs += options.class_inputs_filearg + options.dex_inputs += options.dex_inputs_filearg + + input_paths = options.class_inputs + options.dex_inputs + input_paths.append(options.r8_jar_path) + input_paths.append(options.custom_d8_jar_path) + if options.main_dex_rules_path: + input_paths.extend(options.main_dex_rules_path) + + depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg + + output_paths = [options.output] + + track_subpaths_allowlist = [] + if options.incremental_dir: + final_dex_inputs = _IntermediateDexFilePathsFromInputJars( + options.class_inputs, options.incremental_dir) + output_paths += final_dex_inputs + track_subpaths_allowlist += options.class_inputs + else: + final_dex_inputs = list(options.class_inputs) + final_dex_inputs += options.dex_inputs + + dex_cmd = build_utils.JavaCmd(xmx=_DEX_XMX) + + if options.dump_inputs: + dex_cmd += ['-Dcom.android.tools.r8.dumpinputtofile=d8inputs.zip'] + + if not options.skip_custom_d8: + dex_cmd += [ + '-cp', + '{}:{}'.format(options.r8_jar_path, options.custom_d8_jar_path), + 'org.chromium.build.CustomD8', + ] + else: + dex_cmd += [ + '-cp', + options.r8_jar_path, + 'com.android.tools.r8.D8', + ] + + if options.release: + dex_cmd += ['--release'] + if options.min_api: + dex_cmd += ['--min-api', options.min_api] + + if not options.desugar: + dex_cmd += ['--no-desugaring'] + elif options.classpath: + # The classpath is used by D8 to for interface desugaring. + if options.desugar_dependencies and not options.skip_custom_d8: + dex_cmd += ['--desugar-dependencies', options.desugar_dependencies] + if track_subpaths_allowlist: + track_subpaths_allowlist += options.classpath + depfile_deps += options.classpath + input_paths += options.classpath + # Still pass the entire classpath in case a new dependency is needed by + # desugar, so that desugar_dependencies will be updated for the next build. + for path in options.classpath: + dex_cmd += ['--classpath', path] + + if options.classpath or options.main_dex_rules_path: + # --main-dex-rules requires bootclasspath. + dex_cmd += ['--lib', build_utils.JAVA_HOME] + for path in options.bootclasspath: + dex_cmd += ['--lib', path] + depfile_deps += options.bootclasspath + input_paths += options.bootclasspath + + + if options.assertion_handler: + dex_cmd += ['--force-assertions-handler:' + options.assertion_handler] + if options.force_enable_assertions: + dex_cmd += ['--force-enable-assertions'] + + # The changes feature from md5_check allows us to only re-dex the class files + # that have changed and the class files that need to be re-desugared by D8. + md5_check.CallAndWriteDepfileIfStale( + lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd), + options, + input_paths=input_paths, + input_strings=dex_cmd + [str(bool(options.incremental_dir))], + output_paths=output_paths, + pass_changes=True, + track_subpaths_allowlist=track_subpaths_allowlist, + depfile_deps=depfile_deps) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/dex.pydeps b/android/gyp/dex.pydeps new file mode 100644 index 000000000000..d920e24617e3 --- /dev/null +++ b/android/gyp/dex.pydeps @@ -0,0 +1,10 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +dex.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/android/gyp/dex_test.py b/android/gyp/dex_test.py new file mode 100755 index 000000000000..5042e5fc3799 --- /dev/null +++ b/android/gyp/dex_test.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest + +import dex + + +class DexTest(unittest.TestCase): + def testStdErrFilter(self): + # pylint: disable=line-too-long + output = """\ +some initial message +Warning: Specification conversion: The following prefixes do not match any type: [Ljava/util/Desugar] +Warning in ../../clank/third_party/google3/pg_confs/java_com_google_protobuf_lite_proguard.pgcfg: +Rule matches the static final field `java.lang.String com.google.protobuf.BaseGeneratedExtensionRegistryLite.CONTAINING_TYPE_0`, which may have been inlined: -identifiernamestring class com.google.protobuf.*GeneratedExtensionRegistryLite { + static java.lang.String CONTAINING_TYPE_*; +} +Warning: some message +Warning in gen/.../Foo.jar:Bar.class: + Type `libcore.io.Memory` was not found, it is required for default or static interface methods desugaring of `void Bar.a(long, byte)` +Warning: Missing class com.google.android.apps.gsa.search.shared.service.proto.PublicStopClientEvent (referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension com.google.protobuf.BaseGeneratedExtensionRegistryLite.findLiteExtensionByNumber(com.google.protobuf.MessageLite, int)) +Missing class com.google.android.gms.feedback.ApplicationProperties (referenced from: com.google.protobuf.GeneratedMessageLite$GeneratedExtension com.google.protobuf.BaseGeneratedExtensionRegistryLite.findLiteExtensionByNumber(com.google.protobuf.MessageLite, int)) +""" + expected = """\ +some initial message +Warning: some message +""" + # pylint: enable=line-too-long + filter_func = dex.CreateStderrFilter( + show_desugar_default_interface_warnings=False) + self.assertEqual(filter_func(output), expected) + + # Test no preamble, not filtered. + output = """Warning: hi""" + expected = output + self.assertEqual(filter_func(output), expected) + + # Test no preamble, filtered + output = """\ +Warning: Specification conversion: The following prefixes do not ... +""" + expected = '' + self.assertEqual(filter_func(output), expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/dist_aar.py b/android/gyp/dist_aar.py new file mode 100755 index 000000000000..507d0c3d83c9 --- /dev/null +++ b/android/gyp/dist_aar.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 +# +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates an Android .aar file.""" + +import argparse +import os +import posixpath +import shutil +import sys +import tempfile +import zipfile + +import filter_zip +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__)) + + +def _MergeRTxt(r_paths, include_globs): + """Merging the given R.txt files and returns them as a string.""" + all_lines = set() + for r_path in r_paths: + if include_globs and not build_utils.MatchesGlob(r_path, include_globs): + continue + with open(r_path) as f: + all_lines.update(f.readlines()) + return ''.join(sorted(all_lines)) + + +def _MergeProguardConfigs(proguard_configs): + """Merging the given proguard config files and returns them as a string.""" + ret = [] + for config in proguard_configs: + ret.append('# FROM: {}'.format(config)) + with open(config) as f: + ret.append(f.read()) + return '\n'.join(ret) + + +def _AddResources(aar_zip, resource_zips, include_globs): + """Adds all resource zips to the given aar_zip. + + Ensures all res/values/* files have unique names by prefixing them. + """ + for i, path in enumerate(resource_zips): + if include_globs and not build_utils.MatchesGlob(path, include_globs): + continue + with zipfile.ZipFile(path) as res_zip: + for info in res_zip.infolist(): + data = res_zip.read(info) + dirname, basename = posixpath.split(info.filename) + if 'values' in dirname: + root, ext = os.path.splitext(basename) + basename = '{}_{}{}'.format(root, i, ext) + info.filename = posixpath.join(dirname, basename) + info.filename = posixpath.join('res', info.filename) + aar_zip.writestr(info, data) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--output', required=True, help='Path to output aar.') + parser.add_argument('--jars', required=True, help='GN list of jar inputs.') + parser.add_argument('--dependencies-res-zips', required=True, + help='GN list of resource zips') + parser.add_argument('--r-text-files', required=True, + help='GN list of R.txt files to merge') + parser.add_argument('--proguard-configs', required=True, + help='GN list of ProGuard flag files to merge.') + parser.add_argument( + '--android-manifest', + help='Path to AndroidManifest.xml to include.', + default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml')) + parser.add_argument('--native-libraries', default='', + help='GN list of native libraries. If non-empty then ' + 'ABI must be specified.') + parser.add_argument('--abi', + help='ABI (e.g. armeabi-v7a) for native libraries.') + parser.add_argument( + '--jar-excluded-globs', + help='GN-list of globs for paths to exclude in jar.') + parser.add_argument( + '--jar-included-globs', + help='GN-list of globs for paths to include in jar.') + parser.add_argument( + '--resource-included-globs', + help='GN-list of globs for paths to include in R.txt and resources zips.') + + options = parser.parse_args(args) + + if options.native_libraries and not options.abi: + parser.error('You must provide --abi if you have native libs') + + options.jars = action_helpers.parse_gn_list(options.jars) + options.dependencies_res_zips = action_helpers.parse_gn_list( + options.dependencies_res_zips) + options.r_text_files = action_helpers.parse_gn_list(options.r_text_files) + options.proguard_configs = action_helpers.parse_gn_list( + options.proguard_configs) + options.native_libraries = action_helpers.parse_gn_list( + options.native_libraries) + options.jar_excluded_globs = action_helpers.parse_gn_list( + options.jar_excluded_globs) + options.jar_included_globs = action_helpers.parse_gn_list( + options.jar_included_globs) + options.resource_included_globs = action_helpers.parse_gn_list( + options.resource_included_globs) + + with tempfile.NamedTemporaryFile(delete=False) as staging_file: + try: + with zipfile.ZipFile(staging_file.name, 'w') as z: + zip_helpers.add_to_zip_hermetic(z, + 'AndroidManifest.xml', + src_path=options.android_manifest) + + path_transform = filter_zip.CreatePathTransform( + options.jar_excluded_globs, options.jar_included_globs) + with tempfile.NamedTemporaryFile() as jar_file: + zip_helpers.merge_zips(jar_file.name, + options.jars, + path_transform=path_transform) + zip_helpers.add_to_zip_hermetic(z, + 'classes.jar', + src_path=jar_file.name) + + zip_helpers.add_to_zip_hermetic(z, + 'R.txt', + data=_MergeRTxt( + options.r_text_files, + options.resource_included_globs)) + zip_helpers.add_to_zip_hermetic(z, 'public.txt', data='') + + if options.proguard_configs: + zip_helpers.add_to_zip_hermetic(z, + 'proguard.txt', + data=_MergeProguardConfigs( + options.proguard_configs)) + + _AddResources(z, options.dependencies_res_zips, + options.resource_included_globs) + + for native_library in options.native_libraries: + libname = os.path.basename(native_library) + zip_helpers.add_to_zip_hermetic(z, + os.path.join('jni', options.abi, + libname), + src_path=native_library) + except: + os.unlink(staging_file.name) + raise + shutil.move(staging_file.name, options.output) + + if options.depfile: + all_inputs = (options.jars + options.dependencies_res_zips + + options.r_text_files + options.proguard_configs) + action_helpers.write_depfile(options.depfile, options.output, all_inputs) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/dist_aar.pydeps b/android/gyp/dist_aar.pydeps new file mode 100644 index 000000000000..ba0dd52590eb --- /dev/null +++ b/android/gyp/dist_aar.pydeps @@ -0,0 +1,9 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +dist_aar.py +filter_zip.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/extract_unwind_tables.py b/android/gyp/extract_unwind_tables.py new file mode 100755 index 000000000000..de0f016b5a3b --- /dev/null +++ b/android/gyp/extract_unwind_tables.py @@ -0,0 +1,283 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Extracts the unwind tables in from breakpad symbol files + +Runs dump_syms on the given binary file and extracts the CFI data into the +given output file. +The output file is a binary file containing CFI rows ordered based on function +address. The output file only contains rows that match the most popular rule +type in CFI table, to reduce the output size and specify data in compact format. +See doc https://github.com/google/breakpad/blob/main/docs/symbol_files.md. +1. The CFA rules should be of postfix form "SP +". +2. The RA rules should be of postfix form "CFA + ^". +Note: breakpad represents dereferencing address with '^' operator. + +The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI +format. The first table contains function addresses and an index into the +UNW_DATA table. The second table contains one or more rows for the function +unwind information. + +The output file starts with 4 bytes counting the number of entries in UNW_INDEX. +Then UNW_INDEX table and UNW_DATA table. + +UNW_INDEX contains two columns of N rows each, where N is the number of +functions. + 1. First column 4 byte rows of all the function start address as offset from + start of the binary, in sorted order. + 2. For each function addr, the second column contains 2 byte indices in order. + The indices are offsets (in count of 2 bytes) of the CFI data from start of + UNW_DATA. +The last entry in the table always contains CANT_UNWIND index to specify the +end address of the last function. + +UNW_DATA contains data of all the functions. Each function data contains N rows. +The data found at the address pointed from UNW_INDEX will be: + 2 bytes: N - number of rows that belong to current function. + N * 4 bytes: N rows of data. 16 bits : Address offset from function start. + 14 bits : CFA offset / 4. + 2 bits : RA offset / 4. + +The function is not added to the unwind table in following conditions: +C1. If length of the function code (number of instructions) is greater than + 0xFFFF (2 byte address span). This is because we use 16 bits to refer to + offset of instruction from start of the address. +C2. If the function moves the SP by more than 0xFFFF bytes. This is because we + use 14 bits to denote CFA offset (last 2 bits are 0). +C3. If the Return Address is stored at an offset >= 16 from the CFA. Some + functions which have variable arguments can have offset upto 16. + TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since + we never have 0. +C4: Some functions do not have unwind information defined in dwarf info. These + functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table. + + +Usage: + extract_unwind_tables.py --input_path [root path to unstripped chrome.so] + --output_path [output path] --dump_syms_path [path to dump_syms binary] +""" + +import argparse +import re +import struct +import subprocess +import sys +import tempfile + + +_CFA_REG = '.cfa' +_RA_REG = '.ra' + +_ADDR_ENTRY = 0 +_LENGTH_ENTRY = 1 + +_CANT_UNWIND = 0xFFFF + + +def _Write4Bytes(output_file, val): + """Writes a 32 bit unsigned integer to the given output file.""" + output_file.write(struct.pack(' R2: ". + """ + out = [] + found_register = False + for part in cfi_row: + if found_register: + if part[-1] == ':': + break + out.append(part) + elif part == reg + ':': + found_register = True + return ' '.join(out) + + +def _GetCfaAndRaOffset(cfi_row): + """Returns a tuple with 2 numbers (cfa_offset, ra_offset). + + Returns right values if rule matches the predefined criteria. Returns (0, 0) + otherwise. The criteria for CFA rule is postfix form "SP +" and RA rule + is postfix form "CFA - + ^". + """ + cfa_offset = 0 + ra_offset = 0 + cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG) + ra_rule = _FindRuleForRegister(cfi_row, _RA_REG) + if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule): + cfa_offset = int(cfa_rule.split()[1], 10) + if ra_rule: + if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule): + return (0, 0) + ra_offset = -1 * int(ra_rule.split()[1], 10) + return (cfa_offset, ra_offset) + + +def _GetAllCfiRows(symbol_file): + """Returns parsed CFI data from given symbol_file. + + Each entry in the cfi data dictionary returned is a map from function start + address to array of function rows, starting with FUNCTION type, followed by + one or more CFI rows. + """ + cfi_data = {} + current_func = [] + for line in symbol_file: + line = line.decode('utf8') + if 'STACK CFI' not in line: + continue + + parts = line.split() + data = {} + if parts[2] == 'INIT': + # Add the previous function to the output + if len(current_func) > 1: + cfi_data[current_func[0][_ADDR_ENTRY]] = current_func + current_func = [] + + # The function line is of format "STACK CFI INIT ..." + data[_ADDR_ENTRY] = int(parts[3], 16) + data[_LENGTH_ENTRY] = int(parts[4], 16) + + # Condition C1: Skip if length is large. + if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff: + continue # Skip the current function. + else: + # The current function is skipped. + if len(current_func) == 0: + continue + + # The CFI row is of format "STACK CFI .cfa: .ra: ..." + data[_ADDR_ENTRY] = int(parts[2], 16) + (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts) + + # Condition C2 and C3: Skip based on limits on offsets. + if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff: + current_func = [] + continue + assert data[_CFA_REG] % 4 == 0 + # Since we skipped functions with code size larger than 0xffff, we should + # have no function offset larger than the same value. + assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff + + if data[_ADDR_ENTRY] == 0: + # Skip current function, delete all previous entries. + current_func = [] + continue + assert data[_ADDR_ENTRY] % 2 == 0 + current_func.append(data) + + # Condition C4: Skip function without CFI rows. + if len(current_func) > 1: + cfi_data[current_func[0][_ADDR_ENTRY]] = current_func + return cfi_data + + +def _WriteCfiData(cfi_data, out_file): + """Writes the CFI data in defined format to out_file.""" + # Stores the final data that will be written to UNW_DATA table, in order + # with 2 byte items. + unw_data = [] + + # Represent all the CFI data of functions as set of numbers and map them to an + # index in the |unw_data|. This index is later written to the UNW_INDEX table + # for each function. This map is used to find index of the data for functions. + data_to_index = {} + # Store mapping between the functions to the index. + func_addr_to_index = {} + previous_func_end = 0 + for addr, function in sorted(cfi_data.items()): + # Add an empty function entry when functions CFIs are missing between 2 + # functions. + if previous_func_end != 0 and addr - previous_func_end > 4: + func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND + previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY] + + assert len(function) > 1 + func_data_arr = [] + func_data = 0 + # The first row contains the function address and length. The rest of the + # rows have CFI data. Create function data array as given in the format. + for row in function[1:]: + addr_offset = row[_ADDR_ENTRY] - addr + cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] // 4) + + func_data_arr.append(addr_offset) + func_data_arr.append(cfa_offset) + + # Consider all the rows in the data as one large integer and add it as a key + # to the |data_to_index|. + for data in func_data_arr: + func_data = (func_data << 16) | data + + row_count = len(func_data_arr) // 2 + if func_data not in data_to_index: + # When data is not found, create a new index = len(unw_data), and write + # the data to |unw_data|. + index = len(unw_data) + data_to_index[func_data] = index + unw_data.append(row_count) + for row in func_data_arr: + unw_data.append(row) + else: + # If the data was found, then use the same index for the function. + index = data_to_index[func_data] + assert row_count == unw_data[index] + func_addr_to_index[addr] = data_to_index[func_data] + + # Mark the end end of last function entry. + func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND + + # Write the size of UNW_INDEX file in bytes. + _Write4Bytes(out_file, len(func_addr_to_index)) + + # Write the UNW_INDEX table. First list of addresses and then indices. + sorted_unw_index = sorted(func_addr_to_index.items()) + for addr, index in sorted_unw_index: + _Write4Bytes(out_file, addr) + for addr, index in sorted_unw_index: + _Write2Bytes(out_file, index) + + # Write the UNW_DATA table. + for data in unw_data: + _Write2Bytes(out_file, data) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--input_path', required=True, + help='The input path of the unstripped binary') + parser.add_argument( + '--output_path', required=True, + help='The path of the output file') + parser.add_argument( + '--dump_syms_path', required=True, + help='The path of the dump_syms binary') + + args = parser.parse_args() + cmd = ['./' + args.dump_syms_path, args.input_path, '-v'] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + cfi_data = _GetAllCfiRows(proc.stdout) + if proc.wait(): + sys.stderr.write('dump_syms exited with code {} after {} symbols\n'.format( + proc.returncode, len(cfi_data))) + sys.exit(proc.returncode) + with open(args.output_path, 'wb') as out_file: + _WriteCfiData(cfi_data, out_file) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/extract_unwind_tables_tests.py b/android/gyp/extract_unwind_tables_tests.py new file mode 100755 index 000000000000..dd716bf20b3c --- /dev/null +++ b/android/gyp/extract_unwind_tables_tests.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for extract_unwind_tables.py + +This test suite contains various tests for extracting CFI tables from breakpad +symbol files. +""" + +import io +import optparse +import os +import struct +import sys +import tempfile +import unittest + +import extract_unwind_tables + +sys.path.append(os.path.join(os.path.dirname(__file__), "gyp")) +from util import build_utils + + +class TestExtractUnwindTables(unittest.TestCase): + def testExtractCfi(self): + test_data_lines = """ +MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so +INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642 +FILE 0 ../../base/allocator/allocator_check.cc +FILE 1 ../../base/allocator/allocator_extension.cc +FILE 2 ../../base/allocator/allocator_shim.cc +FUNC 1adcb60 54 0 i2d_name_canon +1adcb60 1a 509 17054 +3b94c70 2 69 40 +PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize() +PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File) +STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr +STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr +STACK CFI 2 .cfa: sp 4 + +STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^ +STACK CFI 6 .cfa: sp 16 + +STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr +STACK CFI e1a970 .cfa: sp 4 + +STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^ +STACK CFI e1a974 .cfa: sp 16 + +STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr +STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 + +STACK CFI e1a1e8 .cfa: sp 80 + +STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr +STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr +STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^ +STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr +STACK CFI e17004 2 .cfa: sp 0 + .ra: lr +STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr +STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^ +STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4 +STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr +STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^ +STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr +STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^ +STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr +STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^ +""".splitlines() + cfi_data = extract_unwind_tables._GetAllCfiRows( + [l.encode('utf8') for l in test_data_lines]) + out_file = io.BytesIO() + extract_unwind_tables._WriteCfiData(cfi_data, out_file) + + expected_cfi_data = { + 0xe1a1e4: [0x2, 0x11, 0x4, 0x50], + 0xe1a296: [], + 0xe1a96e: [0x2, 0x4, 0x4, 0xe, 0x6, 0x10], + 0xe1a990: [], + 0x3b92e24: [0x28, 0x13], + 0x3b92e62: [], + } + expected_function_count = len(expected_cfi_data) + + actual_output = [] + out_file.seek(0) + while True: + read = out_file.read(2) + if not read: + break + actual_output.append(struct.unpack('H', read)[0]) + + # First value is size of unw_index table. + unw_index_size = actual_output[1] << 16 | actual_output[0] + # |unw_index_size| should match entry count. + self.assertEqual(expected_function_count, unw_index_size) + # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing + # size. + unw_index_start = 2 + unw_index_addr_end = unw_index_start + expected_function_count * 2 + unw_index_end = unw_index_addr_end + expected_function_count + unw_index_addr_col = actual_output[unw_index_start:unw_index_addr_end] + unw_index_index_col = actual_output[unw_index_addr_end:unw_index_end] + + unw_data_start = unw_index_end + unw_data = actual_output[unw_data_start:] + + for func_iter in range(0, expected_function_count): + func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 + | unw_index_addr_col[func_iter * 2]) + index = unw_index_index_col[func_iter] + # If index is CANT_UNWIND then invalid function. + if index == 0xFFFF: + self.assertEqual(expected_cfi_data[func_addr], []) + continue + + func_start = index + 1 + func_end = func_start + unw_data[index] * 2 + self.assertEqual(len(expected_cfi_data[func_addr]), func_end - func_start) + func_cfi = unw_data[func_start:func_end] + self.assertEqual(expected_cfi_data[func_addr], func_cfi) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/filter_zip.py b/android/gyp/filter_zip.py new file mode 100755 index 000000000000..0382651bfc4b --- /dev/null +++ b/android/gyp/filter_zip.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import shutil +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def CreatePathTransform(exclude_globs, include_globs): + """Returns a function to strip paths for the given patterns. + + Args: + exclude_globs: List of globs that if matched should be excluded. + include_globs: List of globs that if not matched should be excluded. + + Returns: + * None if no filters are needed. + * A function "(path) -> path" that returns None when |path| should be + stripped, or |path| otherwise. + """ + if not (exclude_globs or include_globs): + return None + exclude_globs = list(exclude_globs or []) + def path_transform(path): + # Exclude filters take precidence over include filters. + if build_utils.MatchesGlob(path, exclude_globs): + return None + if include_globs and not build_utils.MatchesGlob(path, include_globs): + return None + return path + + return path_transform + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', required=True, + help='Input zip file.') + parser.add_argument('--output', required=True, + help='Output zip file') + parser.add_argument('--exclude-globs', + help='GN list of exclude globs') + parser.add_argument('--include-globs', + help='GN list of include globs') + argv = build_utils.ExpandFileArgs(sys.argv[1:]) + args = parser.parse_args(argv) + + args.exclude_globs = action_helpers.parse_gn_list(args.exclude_globs) + args.include_globs = action_helpers.parse_gn_list(args.include_globs) + + path_transform = CreatePathTransform(args.exclude_globs, args.include_globs) + with action_helpers.atomic_output(args.output) as f: + if path_transform: + zip_helpers.merge_zips(f.name, [args.input], + path_transform=path_transform) + else: + shutil.copy(args.input, f.name) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/filter_zip.pydeps b/android/gyp/filter_zip.pydeps new file mode 100644 index 000000000000..4905fd5d73a1 --- /dev/null +++ b/android/gyp/filter_zip.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +filter_zip.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/finalize_apk.py b/android/gyp/finalize_apk.py new file mode 100644 index 000000000000..aaf66c2af862 --- /dev/null +++ b/android/gyp/finalize_apk.py @@ -0,0 +1,78 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Signs and aligns an APK.""" + +import argparse +import logging +import shutil +import subprocess +import sys +import tempfile + +from util import build_utils + + +def FinalizeApk(apksigner_path, + zipalign_path, + unsigned_apk_path, + final_apk_path, + key_path, + key_passwd, + key_name, + min_sdk_version, + warnings_as_errors=False): + # Use a tempfile so that Ctrl-C does not leave the file with a fresh mtime + # and a corrupted state. + with tempfile.NamedTemporaryFile() as staging_file: + if zipalign_path: + # v2 signing requires that zipalign happen first. + logging.debug('Running zipalign') + zipalign_cmd = [ + zipalign_path, '-p', '-f', '4', unsigned_apk_path, staging_file.name + ] + build_utils.CheckOutput(zipalign_cmd, + print_stdout=True, + fail_on_output=warnings_as_errors) + signer_input_path = staging_file.name + else: + signer_input_path = unsigned_apk_path + + sign_cmd = build_utils.JavaCmd() + [ + '-jar', + apksigner_path, + 'sign', + '--in', + signer_input_path, + '--out', + staging_file.name, + '--ks', + key_path, + '--ks-key-alias', + key_name, + '--ks-pass', + 'pass:' + key_passwd, + ] + # V3 signing adds security niceties, which are irrelevant for local builds. + sign_cmd += ['--v3-signing-enabled', 'false'] + + if min_sdk_version >= 24: + # Disable v1 signatures when v2 signing can be used (it's much faster). + # By default, both v1 and v2 signing happen. + sign_cmd += ['--v1-signing-enabled', 'false'] + else: + # Force SHA-1 (makes signing faster; insecure is fine for local builds). + # Leave v2 signing enabled since it verifies faster on device when + # supported. + sign_cmd += ['--min-sdk-version', '1'] + + logging.debug('Signing apk') + build_utils.CheckOutput(sign_cmd, + print_stdout=True, + fail_on_output=warnings_as_errors) + shutil.move(staging_file.name, final_apk_path) + # TODO(crbug.com/1174969): Remove this once Python2 is obsoleted. + if sys.version_info.major == 2: + staging_file.delete = False + else: + staging_file._closer.delete = False diff --git a/android/gyp/find.py b/android/gyp/find.py new file mode 100755 index 000000000000..617efef3ff07 --- /dev/null +++ b/android/gyp/find.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Finds files in directories. +""" + + +import fnmatch +import optparse +import os +import sys + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('--pattern', default='*', help='File pattern to match.') + options, directories = parser.parse_args(argv) + + for d in directories: + if not os.path.exists(d): + print('%s does not exist' % d, file=sys.stderr) + return 1 + for root, _, filenames in os.walk(d): + for f in fnmatch.filter(filenames, options.pattern): + print(os.path.join(root, f)) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/flatc_java.py b/android/gyp/flatc_java.py new file mode 100755 index 000000000000..003f8201f4be --- /dev/null +++ b/android/gyp/flatc_java.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generate java source files from flatbuffer files. + +This is the action script for the flatbuffer_java_library template. +""" + +import argparse +import sys + +from util import build_utils +import action_helpers +import zip_helpers + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument('--flatc', required=True, help='Path to flatc binary.') + parser.add_argument('--srcjar', required=True, help='Path to output srcjar.') + parser.add_argument( + '--import-dir', + action='append', + default=[], + help='Extra import directory for flatbuffers, can be repeated.') + parser.add_argument('flatbuffers', nargs='+', help='flatbuffer source files') + options = parser.parse_args(argv) + + import_args = [] + for path in options.import_dir: + import_args += ['-I', path] + with build_utils.TempDir() as temp_dir: + build_utils.CheckOutput([options.flatc, '-j', '-o', temp_dir] + + import_args + options.flatbuffers) + + with action_helpers.atomic_output(options.srcjar) as f: + zip_helpers.zip_directory(f, temp_dir) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/flatc_java.pydeps b/android/gyp/flatc_java.pydeps new file mode 100644 index 000000000000..8c0c4f01fc06 --- /dev/null +++ b/android/gyp/flatc_java.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/flatc_java.pydeps build/android/gyp/flatc_java.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +flatc_java.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/gcc_preprocess.py b/android/gyp/gcc_preprocess.py new file mode 100755 index 000000000000..2e5b3b3b5db2 --- /dev/null +++ b/android/gyp/gcc_preprocess.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import posixpath +import re +import sys +import zipfile + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def _ParsePackageName(data): + m = re.match(r'^\s*package\s+(.*?)\s*;', data, re.MULTILINE) + return m.group(1) if m else '' + + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = argparse.ArgumentParser() + parser.add_argument('--include-dirs', help='GN list of include directories.') + parser.add_argument('--output', help='Path for .srcjar.') + parser.add_argument('--define', + action='append', + dest='defines', + help='List of -D args') + parser.add_argument('templates', nargs='+', help='Template files.') + options = parser.parse_args(args) + + options.defines = action_helpers.parse_gn_list(options.defines) + options.include_dirs = action_helpers.parse_gn_list(options.include_dirs) + + gcc_cmd = [ + 'gcc', + '-E', # stop after preprocessing. + '-DANDROID', # Specify ANDROID define for pre-processor. + '-x', + 'c-header', # treat sources as C header files + '-P', # disable line markers, i.e. '#line 309' + ] + gcc_cmd.extend('-D' + x for x in options.defines) + gcc_cmd.extend('-I' + x for x in options.include_dirs) + + with action_helpers.atomic_output(options.output) as f: + with zipfile.ZipFile(f, 'w') as z: + for template in options.templates: + data = build_utils.CheckOutput(gcc_cmd + [template]) + package_name = _ParsePackageName(data) + if not package_name: + raise Exception('Could not find java package of ' + template) + zip_path = posixpath.join( + package_name.replace('.', '/'), + os.path.splitext(os.path.basename(template))[0]) + '.java' + zip_helpers.add_to_zip_hermetic(z, zip_path, data=data) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/gcc_preprocess.pydeps b/android/gyp/gcc_preprocess.pydeps new file mode 100644 index 000000000000..b57d40042751 --- /dev/null +++ b/android/gyp/gcc_preprocess.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +gcc_preprocess.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/generate_android_wrapper.py b/android/gyp/generate_android_wrapper.py new file mode 100755 index 000000000000..46c7afeabed8 --- /dev/null +++ b/android/gyp/generate_android_wrapper.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + +sys.path.append( + os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', 'util'))) + +import generate_wrapper + +_WRAPPED_PATH_LIST_RE = re.compile(r'@WrappedPathList\(([^,]+), ([^)]+)\)') + + +def ExpandWrappedPathLists(args): + expanded_args = [] + for arg in args: + m = _WRAPPED_PATH_LIST_RE.match(arg) + if m: + for p in action_helpers.parse_gn_list(m.group(2)): + expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p]) + else: + expanded_args.append(arg) + return expanded_args + + +def main(raw_args): + parser = generate_wrapper.CreateArgumentParser() + expanded_raw_args = build_utils.ExpandFileArgs(raw_args) + expanded_raw_args = ExpandWrappedPathLists(expanded_raw_args) + args = parser.parse_args(expanded_raw_args) + return generate_wrapper.Wrap(args) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/generate_linker_version_script.py b/android/gyp/generate_linker_version_script.py new file mode 100755 index 000000000000..4f34457626f1 --- /dev/null +++ b/android/gyp/generate_linker_version_script.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generate linker version scripts for Chrome on Android shared libraries.""" + +import argparse +import os + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + +_SCRIPT_HEADER = """\ +# AUTO-GENERATED FILE. DO NOT MODIFY. +# +# See: %s + +{ + global: +""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT) + +_SCRIPT_FOOTER = """\ + local: + *; +}; +""" + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--output', + required=True, + help='Path to output linker version script file.') + parser.add_argument( + '--jni-multiplexing', + action='store_true', + help='Export only the JNI methods generated by multiplexing') + parser.add_argument('--export-fortesting-java-symbols', + action='store_true', + help='Export Java_*_ForTesting JNI methods') + parser.add_argument( + '--export-symbol-allowlist-file', + action='append', + default=[], + dest='allowlists', + help='Path to an input file containing an allowlist of extra symbols to ' + 'export, one symbol per line. Multiple files may be specified.') + parser.add_argument( + '--export-feature-registrations', + action='store_true', + help='Export JNI_OnLoad_* methods') + options = parser.parse_args() + + # JNI_OnLoad is always exported. + # CrashpadHandlerMain() is the entry point to the Crashpad handler, required + # for libcrashpad_handler_trampoline.so. + symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad'] + + if options.jni_multiplexing: + symbol_list.append('Java_*_resolve_1for_*') + elif options.export_fortesting_java_symbols: + symbol_list.append('Java_*') + else: + # The linker uses unix shell globbing patterns, not regex. So, we have to + # include everything that doesn't end in "ForTest(ing)" with this set of + # globs. + symbol_list.append('Java_*[!F]orTesting') + symbol_list.append('Java_*[!o]rTesting') + symbol_list.append('Java_*[!r]Testing') + symbol_list.append('Java_*[!T]esting') + symbol_list.append('Java_*[!e]sting') + symbol_list.append('Java_*[!s]ting') + symbol_list.append('Java_*[!t]ing') + symbol_list.append('Java_*[!i]ng') + symbol_list.append('Java_*[!n]g') + symbol_list.append('Java_*[!F]orTest') + symbol_list.append('Java_*[!o]rTest') + symbol_list.append('Java_*[!r]Test') + symbol_list.append('Java_*[!T]est') + symbol_list.append('Java_*[!e]st') + symbol_list.append('Java_*[!s]t') + symbol_list.append('Java_*[!gt]') + + if options.export_feature_registrations: + symbol_list.append('JNI_OnLoad_*') + + for allowlist in options.allowlists: + with open(allowlist, 'rt') as f: + for line in f: + line = line.strip() + if not line or line[0] == '#': + continue + symbol_list.append(line) + + script_content = [_SCRIPT_HEADER] + for symbol in symbol_list: + script_content.append(' %s;\n' % symbol) + script_content.append(_SCRIPT_FOOTER) + + script = ''.join(script_content) + + with action_helpers.atomic_output(options.output, mode='w') as f: + f.write(script) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/generate_linker_version_script.pydeps b/android/gyp/generate_linker_version_script.pydeps new file mode 100644 index 000000000000..03ac25d5c058 --- /dev/null +++ b/android/gyp/generate_linker_version_script.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py +../../action_helpers.py +../../gn_helpers.py +generate_linker_version_script.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/ijar.py b/android/gyp/ijar.py new file mode 100755 index 000000000000..ec12cecaa171 --- /dev/null +++ b/android/gyp/ijar.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import subprocess +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +# python -c "import zipfile; zipfile.ZipFile('test.jar', 'w')" +# du -b test.jar +_EMPTY_JAR_SIZE = 22 + + +def main(): + # The point of this wrapper is to use AtomicOutput so that output timestamps + # are not updated when outputs are unchanged. + if len(sys.argv) != 4: + raise ValueError('unexpected arguments were given. %s' % sys.argv) + ijar_bin, in_jar, out_jar = sys.argv[1], sys.argv[2], sys.argv[3] + with action_helpers.atomic_output(out_jar) as f: + # ijar fails on empty jars: https://github.com/bazelbuild/bazel/issues/10162 + if os.path.getsize(in_jar) <= _EMPTY_JAR_SIZE: + with open(in_jar, 'rb') as in_f: + f.write(in_f.read()) + else: + build_utils.CheckOutput([ijar_bin, in_jar, f.name]) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/ijar.pydeps b/android/gyp/ijar.pydeps new file mode 100644 index 000000000000..530aabe8e5d6 --- /dev/null +++ b/android/gyp/ijar.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py +../../action_helpers.py +../../gn_helpers.py +ijar.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/jacoco_instr.py b/android/gyp/jacoco_instr.py new file mode 100755 index 000000000000..f32d6e87d235 --- /dev/null +++ b/android/gyp/jacoco_instr.py @@ -0,0 +1,257 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Instruments classes and jar files. + +This script corresponds to the 'jacoco_instr' action in the Java build process. +Depending on whether jacoco_instrument is set, the 'jacoco_instr' action will +call the instrument command which accepts a jar and instruments it using +jacococli.jar. + +""" + +import argparse +import json +import os +import shutil +import sys +import zipfile + +from util import build_utils +import action_helpers +import zip_helpers + + +# This should be same as recipe side token. See bit.ly/3STSPcE. +INSTRUMENT_ALL_JACOCO_OVERRIDE_TOKEN = 'INSTRUMENT_ALL_JACOCO' + + +def _AddArguments(parser): + """Adds arguments related to instrumentation to parser. + + Args: + parser: ArgumentParser object. + """ + parser.add_argument( + '--input-path', + required=True, + help='Path to input file(s). Either the classes ' + 'directory, or the path to a jar.') + parser.add_argument( + '--output-path', + required=True, + help='Path to output final file(s) to. Either the ' + 'final classes directory, or the directory in ' + 'which to place the instrumented/copied jar.') + parser.add_argument( + '--sources-json-file', + required=True, + help='File to create with the list of source directories ' + 'and input path.') + parser.add_argument( + '--target-sources-file', + required=True, + help='File containing newline-separated .java and .kt paths') + parser.add_argument( + '--jacococli-jar', required=True, help='Path to jacococli.jar.') + parser.add_argument( + '--files-to-instrument', + help='Path to a file containing which source files are affected.') + + +def _GetSourceDirsFromSourceFiles(source_files): + """Returns list of directories for the files in |source_files|. + + Args: + source_files: List of source files. + + Returns: + List of source directories. + """ + return list(set(os.path.dirname(source_file) for source_file in source_files)) + + +def _CreateSourcesJsonFile(source_dirs, input_path, sources_json_file, + src_root): + """Adds all normalized source directories and input path to + |sources_json_file|. + + Args: + source_dirs: List of source directories. + input_path: The input path to non-instrumented class files. + sources_json_file: File into which to write the list of source directories + and input path. + src_root: Root which sources added to the file should be relative to. + + Returns: + An exit code. + """ + src_root = os.path.abspath(src_root) + relative_sources = [] + for s in source_dirs: + abs_source = os.path.abspath(s) + if abs_source[:len(src_root)] != src_root: + print('Error: found source directory not under repository root: %s %s' % + (abs_source, src_root)) + return 1 + rel_source = os.path.relpath(abs_source, src_root) + + relative_sources.append(rel_source) + + data = {} + data['source_dirs'] = relative_sources + data['input_path'] = [] + data['output_dir'] = src_root + if input_path: + data['input_path'].append(os.path.abspath(input_path)) + with open(sources_json_file, 'w') as f: + json.dump(data, f) + return 0 + + +def _GetAffectedClasses(jar_file, source_files): + """Gets affected classes by affected source files to a jar. + + Args: + jar_file: The jar file to get all members. + source_files: The list of affected source files. + + Returns: + A tuple of affected classes and unaffected members. + """ + with zipfile.ZipFile(jar_file) as f: + members = f.namelist() + + affected_classes = [] + unaffected_members = [] + + for member in members: + if not member.endswith('.class'): + unaffected_members.append(member) + continue + + is_affected = False + index = member.find('$') + if index == -1: + index = member.find('.class') + for source_file in source_files: + if source_file.endswith( + (member[:index] + '.java', member[:index] + '.kt')): + affected_classes.append(member) + is_affected = True + break + if not is_affected: + unaffected_members.append(member) + + return affected_classes, unaffected_members + + +def _InstrumentClassFiles(instrument_cmd, + input_path, + output_path, + temp_dir, + affected_source_files=None): + """Instruments class files from input jar. + + Args: + instrument_cmd: JaCoCo instrument command. + input_path: The input path to non-instrumented jar. + output_path: The output path to instrumented jar. + temp_dir: The temporary directory. + affected_source_files: The affected source file paths to input jar. + Default is None, which means instrumenting all class files in jar. + """ + affected_classes = None + unaffected_members = None + if affected_source_files: + affected_classes, unaffected_members = _GetAffectedClasses( + input_path, affected_source_files) + + # Extract affected class files. + with zipfile.ZipFile(input_path) as f: + f.extractall(temp_dir, affected_classes) + + instrumented_dir = os.path.join(temp_dir, 'instrumented') + + # Instrument extracted class files. + instrument_cmd.extend([temp_dir, '--dest', instrumented_dir]) + build_utils.CheckOutput(instrument_cmd) + + if affected_source_files and unaffected_members: + # Extract unaffected members to instrumented_dir. + with zipfile.ZipFile(input_path) as f: + f.extractall(instrumented_dir, unaffected_members) + + # Zip all files to output_path + with action_helpers.atomic_output(output_path) as f: + zip_helpers.zip_directory(f, instrumented_dir) + + +def _RunInstrumentCommand(parser): + """Instruments class or Jar files using JaCoCo. + + Args: + parser: ArgumentParser object. + + Returns: + An exit code. + """ + args = parser.parse_args() + + source_files = [] + if args.target_sources_file: + source_files.extend(build_utils.ReadSourcesList(args.target_sources_file)) + + with build_utils.TempDir() as temp_dir: + instrument_cmd = build_utils.JavaCmd() + [ + '-jar', args.jacococli_jar, 'instrument' + ] + + if not args.files_to_instrument: + affected_source_files = None + else: + affected_files = build_utils.ReadSourcesList(args.files_to_instrument) + # Check if coverage recipe decided to instrument everything by overriding + # the try builder default setting(selective instrumentation). This can + # happen in cases like a DEPS roll of jacoco library + + # Note: This token is preceded by ../../ because the paths to be + # instrumented are expected to be relative to the build directory. + # See _rebase_paths() at https://bit.ly/40oiixX + token = '../../' + INSTRUMENT_ALL_JACOCO_OVERRIDE_TOKEN + if token in affected_files: + affected_source_files = None + else: + source_set = set(source_files) + affected_source_files = [f for f in affected_files if f in source_set] + + # Copy input_path to output_path and return if no source file affected. + if not affected_source_files: + shutil.copyfile(args.input_path, args.output_path) + # Create a dummy sources_json_file. + _CreateSourcesJsonFile([], None, args.sources_json_file, + build_utils.DIR_SOURCE_ROOT) + return 0 + _InstrumentClassFiles(instrument_cmd, args.input_path, args.output_path, + temp_dir, affected_source_files) + + source_dirs = _GetSourceDirsFromSourceFiles(source_files) + # TODO(GYP): In GN, we are passed the list of sources, detecting source + # directories, then walking them to re-establish the list of sources. + # This can obviously be simplified! + _CreateSourcesJsonFile(source_dirs, args.input_path, args.sources_json_file, + build_utils.DIR_SOURCE_ROOT) + + return 0 + + +def main(): + parser = argparse.ArgumentParser() + _AddArguments(parser) + _RunInstrumentCommand(parser) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/gyp/jacoco_instr.pydeps b/android/gyp/jacoco_instr.pydeps new file mode 100644 index 000000000000..9c763fc6249f --- /dev/null +++ b/android/gyp/jacoco_instr.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jacoco_instr.pydeps build/android/gyp/jacoco_instr.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +jacoco_instr.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/java_cpp_enum.py b/android/gyp/java_cpp_enum.py new file mode 100755 index 000000000000..9098cfc82b1b --- /dev/null +++ b/android/gyp/java_cpp_enum.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +from datetime import date +import re +import optparse +import os +from string import Template +import sys +import textwrap +import zipfile + +from util import build_utils +from util import java_cpp_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +# List of C++ types that are compatible with the Java code generated by this +# script. +# +# This script can parse .idl files however, at present it ignores special +# rules such as [cpp_enum_prefix_override="ax_attr"]. +ENUM_FIXED_TYPE_ALLOWLIST = [ + 'char', 'unsigned char', 'short', 'unsigned short', 'int', 'int8_t', + 'int16_t', 'int32_t', 'uint8_t', 'uint16_t' +] + + +class EnumDefinition: + def __init__(self, original_enum_name=None, class_name_override=None, + enum_package=None, entries=None, comments=None, fixed_type=None): + self.original_enum_name = original_enum_name + self.class_name_override = class_name_override + self.enum_package = enum_package + self.entries = collections.OrderedDict(entries or []) + self.comments = collections.OrderedDict(comments or []) + self.prefix_to_strip = None + self.fixed_type = fixed_type + + def AppendEntry(self, key, value): + if key in self.entries: + raise Exception('Multiple definitions of key %s found.' % key) + self.entries[key] = value + + def AppendEntryComment(self, key, value): + if key in self.comments: + raise Exception('Multiple definitions of key %s found.' % key) + self.comments[key] = value + + @property + def class_name(self): + return self.class_name_override or self.original_enum_name + + def Finalize(self): + self._Validate() + self._AssignEntryIndices() + self._StripPrefix() + self._NormalizeNames() + + def _Validate(self): + assert self.class_name + assert self.enum_package + assert self.entries + if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_ALLOWLIST: + raise Exception('Fixed type %s for enum %s not in allowlist.' % + (self.fixed_type, self.class_name)) + + def _AssignEntryIndices(self): + # Enums, if given no value, are given the value of the previous enum + 1. + if not all(self.entries.values()): + prev_enum_value = -1 + for key, value in self.entries.items(): + if not value: + self.entries[key] = prev_enum_value + 1 + elif value in self.entries: + self.entries[key] = self.entries[value] + else: + try: + self.entries[key] = int(value) + except ValueError as e: + raise Exception('Could not interpret integer from enum value "%s" ' + 'for key %s.' % (value, key)) from e + prev_enum_value = self.entries[key] + + + def _StripPrefix(self): + prefix_to_strip = self.prefix_to_strip + if not prefix_to_strip: + shout_case = self.original_enum_name + shout_case = re.sub('(?!^)([A-Z]+)', r'_\1', shout_case).upper() + shout_case += '_' + + prefixes = [shout_case, self.original_enum_name, + 'k' + self.original_enum_name] + + for prefix in prefixes: + if all(w.startswith(prefix) for w in self.entries.keys()): + prefix_to_strip = prefix + break + else: + prefix_to_strip = '' + + def StripEntries(entries): + ret = collections.OrderedDict() + for k, v in entries.items(): + stripped_key = k.replace(prefix_to_strip, '', 1) + if isinstance(v, str): + stripped_value = v.replace(prefix_to_strip, '') + else: + stripped_value = v + ret[stripped_key] = stripped_value + + return ret + + self.entries = StripEntries(self.entries) + self.comments = StripEntries(self.comments) + + def _NormalizeNames(self): + self.entries = _TransformKeys(self.entries, java_cpp_utils.KCamelToShouty) + self.comments = _TransformKeys(self.comments, java_cpp_utils.KCamelToShouty) + + +def _TransformKeys(d, func): + """Normalize keys in |d| and update references to old keys in |d| values.""" + keys_map = {k: func(k) for k in d} + ret = collections.OrderedDict() + for k, v in d.items(): + # Need to transform values as well when the entry value was explicitly set + # (since it could contain references to other enum entry values). + if isinstance(v, str): + # First check if a full replacement is available. This avoids issues when + # one key is a substring of another. + if v in d: + v = keys_map[v] + else: + for old_key, new_key in keys_map.items(): + v = v.replace(old_key, new_key) + ret[keys_map[k]] = v + return ret + + +class DirectiveSet: + class_name_override_key = 'CLASS_NAME_OVERRIDE' + enum_package_key = 'ENUM_PACKAGE' + prefix_to_strip_key = 'PREFIX_TO_STRIP' + + known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key] + + def __init__(self): + self._directives = {} + + def Update(self, key, value): + if key not in DirectiveSet.known_keys: + raise Exception("Unknown directive: " + key) + self._directives[key] = value + + @property + def empty(self): + return len(self._directives) == 0 + + def UpdateDefinition(self, definition): + definition.class_name_override = self._directives.get( + DirectiveSet.class_name_override_key, '') + definition.enum_package = self._directives.get( + DirectiveSet.enum_package_key) + definition.prefix_to_strip = self._directives.get( + DirectiveSet.prefix_to_strip_key) + + +class HeaderParser: + single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)') + multi_line_comment_start_re = re.compile(r'\s*/\*') + enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?') + enum_end_re = re.compile(r'^\s*}\s*;\.*$') + generator_error_re = re.compile(r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*$') + generator_directive_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$') + multi_line_generator_directive_start_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$') + multi_line_directive_continuation_re = re.compile(r'^\s*//\s+([\.\w]+)$') + multi_line_directive_end_re = re.compile(r'^\s*//\s+([\.\w]*)\)$') + + optional_class_or_struct_re = r'(class|struct)?' + enum_name_re = r'(\w+)' + optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?' + enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' + + optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' + + optional_fixed_type_re + '\s*{\s*') + enum_single_line_re = re.compile( + r'^\s*(?:\[cpp.*\])?\s*enum.*{(?P.*)}.*$') + + def __init__(self, lines, path=''): + self._lines = lines + self._path = path + self._enum_definitions = [] + self._in_enum = False + self._current_definition = None + self._current_comments = [] + self._generator_directives = DirectiveSet() + self._multi_line_generator_directive = None + self._current_enum_entry = '' + + def _ApplyGeneratorDirectives(self): + self._generator_directives.UpdateDefinition(self._current_definition) + self._generator_directives = DirectiveSet() + + def ParseDefinitions(self): + for line in self._lines: + self._ParseLine(line) + return self._enum_definitions + + def _ParseLine(self, line): + if self._multi_line_generator_directive: + self._ParseMultiLineDirectiveLine(line) + elif not self._in_enum: + self._ParseRegularLine(line) + else: + self._ParseEnumLine(line) + + def _ParseEnumLine(self, line): + if HeaderParser.multi_line_comment_start_re.match(line): + raise Exception('Multi-line comments in enums are not supported in ' + + self._path) + + enum_comment = HeaderParser.single_line_comment_re.match(line) + if enum_comment: + comment = enum_comment.groups()[0] + if comment: + self._current_comments.append(comment) + elif HeaderParser.enum_end_re.match(line): + self._FinalizeCurrentEnumDefinition() + else: + self._AddToCurrentEnumEntry(line) + if ',' in line: + self._ParseCurrentEnumEntry() + + def _ParseSingleLineEnum(self, line): + for entry in line.split(','): + self._AddToCurrentEnumEntry(entry) + self._ParseCurrentEnumEntry() + + self._FinalizeCurrentEnumDefinition() + + def _ParseCurrentEnumEntry(self): + if not self._current_enum_entry: + return + + enum_entry = HeaderParser.enum_line_re.match(self._current_enum_entry) + if not enum_entry: + raise Exception('Unexpected error while attempting to parse %s as enum ' + 'entry.' % self._current_enum_entry) + + enum_key = enum_entry.groups()[0] + enum_value = enum_entry.groups()[2] + self._current_definition.AppendEntry(enum_key, enum_value) + if self._current_comments: + self._current_definition.AppendEntryComment( + enum_key, ' '.join(self._current_comments)) + self._current_comments = [] + self._current_enum_entry = '' + + def _AddToCurrentEnumEntry(self, line): + self._current_enum_entry += ' ' + line.strip() + + def _FinalizeCurrentEnumDefinition(self): + if self._current_enum_entry: + self._ParseCurrentEnumEntry() + self._ApplyGeneratorDirectives() + self._current_definition.Finalize() + self._enum_definitions.append(self._current_definition) + self._current_definition = None + self._in_enum = False + + def _ParseMultiLineDirectiveLine(self, line): + multi_line_directive_continuation = ( + HeaderParser.multi_line_directive_continuation_re.match(line)) + multi_line_directive_end = ( + HeaderParser.multi_line_directive_end_re.match(line)) + + if multi_line_directive_continuation: + value_cont = multi_line_directive_continuation.groups()[0] + self._multi_line_generator_directive[1].append(value_cont) + elif multi_line_directive_end: + directive_name = self._multi_line_generator_directive[0] + directive_value = "".join(self._multi_line_generator_directive[1]) + directive_value += multi_line_directive_end.groups()[0] + self._multi_line_generator_directive = None + self._generator_directives.Update(directive_name, directive_value) + else: + raise Exception('Malformed multi-line directive declaration in ' + + self._path) + + def _ParseRegularLine(self, line): + enum_start = HeaderParser.enum_start_re.match(line) + generator_directive_error = HeaderParser.generator_error_re.match(line) + generator_directive = HeaderParser.generator_directive_re.match(line) + multi_line_generator_directive_start = ( + HeaderParser.multi_line_generator_directive_start_re.match(line)) + single_line_enum = HeaderParser.enum_single_line_re.match(line) + + if generator_directive_error: + raise Exception('Malformed directive declaration in ' + self._path + + '. Use () for multi-line directives. E.g.\n' + + '// GENERATED_JAVA_ENUM_PACKAGE: (\n' + + '// foo.package)') + if generator_directive: + directive_name = generator_directive.groups()[0] + directive_value = generator_directive.groups()[1] + self._generator_directives.Update(directive_name, directive_value) + elif multi_line_generator_directive_start: + directive_name = multi_line_generator_directive_start.groups()[0] + directive_value = multi_line_generator_directive_start.groups()[1] + self._multi_line_generator_directive = (directive_name, [directive_value]) + elif enum_start or single_line_enum: + if self._generator_directives.empty: + return + self._current_definition = EnumDefinition( + original_enum_name=enum_start.groups()[1], + fixed_type=enum_start.groups()[3]) + self._in_enum = True + if single_line_enum: + self._ParseSingleLineEnum(single_line_enum.group('enum_entries')) + + +def DoGenerate(source_paths): + for source_path in source_paths: + enum_definitions = DoParseHeaderFile(source_path) + if not enum_definitions: + raise Exception('No enums found in %s\n' + 'Did you forget prefixing enums with ' + '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' % + source_path) + for enum_definition in enum_definitions: + output_path = java_cpp_utils.GetJavaFilePath(enum_definition.enum_package, + enum_definition.class_name) + output = GenerateOutput(source_path, enum_definition) + yield output_path, output + + +def DoParseHeaderFile(path): + with open(path) as f: + return HeaderParser(f.readlines(), path).ParseDefinitions() + + +def GenerateOutput(source_path, enum_definition): + template = Template(""" +// Copyright ${YEAR} The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// ${SCRIPT_NAME} +// From +// ${SOURCE_PATH} + +package ${PACKAGE}; + +import androidx.annotation.IntDef; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@IntDef({ +${INT_DEF} +}) +@Retention(RetentionPolicy.SOURCE) +public @interface ${CLASS_NAME} { +${ENUM_ENTRIES} +} +""") + + enum_template = Template(' int ${NAME} = ${VALUE};') + enum_entries_string = [] + enum_names = [] + for enum_name, enum_value in enum_definition.entries.items(): + values = { + 'NAME': enum_name, + 'VALUE': enum_value, + } + enum_comments = enum_definition.comments.get(enum_name) + if enum_comments: + enum_comments_indent = ' * ' + comments_line_wrapper = textwrap.TextWrapper( + initial_indent=enum_comments_indent, + subsequent_indent=enum_comments_indent, + width=100) + enum_entries_string.append(' /**') + enum_entries_string.append('\n'.join( + comments_line_wrapper.wrap(enum_comments))) + enum_entries_string.append(' */') + enum_entries_string.append(enum_template.substitute(values)) + if enum_name != "NUM_ENTRIES": + enum_names.append(enum_definition.class_name + '.' + enum_name) + enum_entries_string = '\n'.join(enum_entries_string) + + enum_names_indent = ' ' * 4 + wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent, + subsequent_indent = enum_names_indent, + width = 100) + enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names))) + + values = { + 'CLASS_NAME': enum_definition.class_name, + 'ENUM_ENTRIES': enum_entries_string, + 'PACKAGE': enum_definition.enum_package, + 'INT_DEF': enum_names_string, + 'SCRIPT_NAME': java_cpp_utils.GetScriptName(), + 'SOURCE_PATH': source_path, + 'YEAR': str(date.today().year) + } + return template.substitute(values) + + +def DoMain(argv): + usage = 'usage: %prog [options] [output_dir] input_file(s)...' + parser = optparse.OptionParser(usage=usage) + + parser.add_option('--srcjar', + help='When specified, a .srcjar at the given path is ' + 'created instead of individual .java files.') + + options, args = parser.parse_args(argv) + + if not args: + parser.error('Need to specify at least one input file') + input_paths = args + + with action_helpers.atomic_output(options.srcjar) as f: + with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: + for output_path, data in DoGenerate(input_paths): + zip_helpers.add_to_zip_hermetic(srcjar, output_path, data=data) + + +if __name__ == '__main__': + DoMain(sys.argv[1:]) diff --git a/android/gyp/java_cpp_enum.pydeps b/android/gyp/java_cpp_enum.pydeps new file mode 100644 index 000000000000..3e63ff861d30 --- /dev/null +++ b/android/gyp/java_cpp_enum.pydeps @@ -0,0 +1,9 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +java_cpp_enum.py +util/__init__.py +util/build_utils.py +util/java_cpp_utils.py diff --git a/android/gyp/java_cpp_enum_tests.py b/android/gyp/java_cpp_enum_tests.py new file mode 100755 index 000000000000..c14f2a085edb --- /dev/null +++ b/android/gyp/java_cpp_enum_tests.py @@ -0,0 +1,783 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for enum_preprocess.py. + +This test suite contains various tests for the C++ -> Java enum generator. +""" + +import collections +from datetime import date +import unittest + +import java_cpp_enum +from java_cpp_enum import EnumDefinition, GenerateOutput +from java_cpp_enum import HeaderParser +from util import java_cpp_utils + + +class TestPreprocess(unittest.TestCase): + def testOutput(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='some.package', + entries=[('E1', 1), ('E2', '2 << 2')], + comments=[('E2', 'This is a comment.'), + ('E1', 'This is a multiple line ' + 'comment that is really long. ' + 'This is a multiple line ' + 'comment that is really ' + 'really long.')]) + output = GenerateOutput('path/to/file', definition) + expected = """ +// Copyright %d The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// %s +// From +// path/to/file + +package some.package; + +import androidx.annotation.IntDef; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@IntDef({ + ClassName.E1, ClassName.E2 +}) +@Retention(RetentionPolicy.SOURCE) +public @interface ClassName { + /** + * %s + * really really long. + */ + int E1 = 1; + /** + * This is a comment. + */ + int E2 = 2 << 2; +} +""" + long_comment = ('This is a multiple line comment that is really long. ' + 'This is a multiple line comment that is') + self.assertEqual( + expected % (date.today().year, java_cpp_utils.GetScriptName(), + long_comment), output) + + def testParseSimpleEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO, + VALUE_ONE, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0), + ('VALUE_ONE', 1)]), + definition.entries) + + def testParseBitShifts(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO = 1 << 0, + VALUE_ONE = 1 << 1, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + ENUM_NAME_ZERO = 1 << 0, + ENUM_NAME_ONE = 1 << 1, + ENUM_NAME_TWO = ENUM_NAME_ZERO | ENUM_NAME_ONE, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'), + ('VALUE_ONE', '1 << 1')]), + definition.entries) + + definition = definitions[1] + expected_entries = collections.OrderedDict([ + ('ZERO', '1 << 0'), + ('ONE', '1 << 1'), + ('TWO', 'ZERO | ONE')]) + self.assertEqual(expected_entries, definition.entries) + + def testParseMultilineEnumEntry(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace + enum Foo { + VALUE_ZERO = 1 << 0, + VALUE_ONE = + SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ControlKey, + VALUE_TWO = 1 << 18, + }; + """.split('\n') + expected_entries = collections.OrderedDict([ + ('VALUE_ZERO', '1 << 0'), + ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ' + 'ControlKey'), + ('VALUE_TWO', '1 << 18')]) + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('bar.namespace', definition.enum_package) + self.assertEqual(expected_entries, definition.entries) + + def testParseEnumEntryWithTrailingMultilineEntry(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace + enum Foo { + VALUE_ZERO = 1, + VALUE_ONE = + SymbolKey | FnKey | AltGrKey | MetaKey | + AltKey | ControlKey | ShiftKey, + }; + """.split('\n') + expected_entries = collections.OrderedDict([ + ('VALUE_ZERO', '1'), + ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ' + 'ControlKey | ShiftKey')]) + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('bar.namespace', definition.enum_package) + self.assertEqual(expected_entries, definition.entries) + + def testParseNoCommaAfterLastEntry(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace + enum Foo { + VALUE_ZERO = 1, + + // This is a multiline + // + // comment with an empty line. + VALUE_ONE = 2 + }; + """.split('\n') + expected_entries = collections.OrderedDict([ + ('VALUE_ZERO', '1'), + ('VALUE_ONE', '2')]) + expected_comments = collections.OrderedDict([ + ('VALUE_ONE', 'This is a multiline comment with an empty line.')]) + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('bar.namespace', definition.enum_package) + self.assertEqual(expected_entries, definition.entries) + self.assertEqual(expected_comments, definition.comments) + + def testParseClassNameOverride(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + FOO + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride + enum PrefixTest { + PREFIX_TEST_A, + PREFIX_TEST_B, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('OverrideName', definition.class_name) + + definition = definitions[1] + self.assertEqual('OtherOverride', definition.class_name) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParsePreservesCommentsWhenPrefixStripping(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + ENUM_ONE_A = 1, + // Comment there + ENUM_ONE_B = A, + }; + + enum EnumIgnore { + C, D, E + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { + P_A, + // This comment spans + // two lines. + P_B + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumOne', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', 'A')]), + definition.entries) + self.assertEqual(collections.OrderedDict([('B', 'Comment there')]), + definition.comments) + definition = definitions[1] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict( + [('B', 'This comment spans two lines.')]), definition.comments) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseTwoEnums(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum AnEnum { + ENUM_ONE_A = 1, + ENUM_ONE_B = A, + }; + + enum EnumIgnore { + C, D, E + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + enum EnumTwo { + P_A, + P_B + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('AnEnum', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('ENUM_ONE_A', '1'), + ('ENUM_ONE_B', 'A')]), + definition.entries) + definition = definitions[1] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict([('P_A', 0), + ('P_B', 1)]), + definition.entries) + + def testParseSingleLineEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { P_A, P_B }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseWithStrippingAndRelativeReferences(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { + P_A = 1, + // P_A is old-don't use P_A. + P_B = P_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', 'A')]), + definition.entries) + self.assertEqual(collections.OrderedDict([('B', 'A is old-don\'t use A.')]), + definition.comments) + + def testParseSingleLineAndRegularEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + ENUM_ONE_A = 1, + // Comment there + ENUM_ONE_B = A, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + enum EnumTwo { P_A, P_B }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + ENUM_NAME_FOO + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual( + collections.OrderedDict([('A', '1'), ('B', 'A')]), definition.entries) + self.assertEqual(collections.OrderedDict([('B', 'Comment there')]), + definition.comments) + + self.assertEqual(3, len(definitions)) + definition = definitions[1] + self.assertEqual( + collections.OrderedDict([('P_A', 0), ('P_B', 1)]), definition.entries) + + definition = definitions[2] + self.assertEqual(collections.OrderedDict([('FOO', 0)]), definition.entries) + + def testParseWithCamelCaseNames(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumTest { + EnumTestA = 1, + // comment for EnumTestB. + EnumTestB = 2, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_PREFIX_TO_STRIP: Test + enum AnEnum { + TestHTTPOption, + TestHTTPSOption, + }; + + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual( + collections.OrderedDict([('A', '1'), ('B', '2')]), + definition.entries) + self.assertEqual( + collections.OrderedDict([('B', 'comment for B.')]), + definition.comments) + + definition = definitions[1] + self.assertEqual( + collections.OrderedDict([('HTTP_OPTION', 0), ('HTTPS_OPTION', 1)]), + definition.entries) + + def testParseWithKCamelCaseNames(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + kEnumOne = 1, + // comment for kEnumTwo. + kEnumTwo = 2, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + kEnumNameFoo, + kEnumNameBar + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + kEnumNameFoo, + kEnumBar, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Keys { + kSymbolKey = 1 << 0, + kAltKey = 1 << 1, + kUpKey = 1 << 2, + kKeyModifiers = kSymbolKey | kAltKey | kUpKey | kKeyModifiers, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Mixed { + kTestVal, + kCodecMPEG2 + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual( + collections.OrderedDict([('ENUM_ONE', '1'), ('ENUM_TWO', '2')]), + definition.entries) + self.assertEqual( + collections.OrderedDict([('ENUM_TWO', 'comment for ENUM_TWO.')]), + definition.comments) + + definition = definitions[1] + self.assertEqual( + collections.OrderedDict([('FOO', 0), ('BAR', 1)]), + definition.entries) + + definition = definitions[2] + self.assertEqual( + collections.OrderedDict([('ENUM_NAME_FOO', 0), ('ENUM_BAR', 1)]), + definition.entries) + + definition = definitions[3] + expected_entries = collections.OrderedDict([ + ('SYMBOL_KEY', '1 << 0'), + ('ALT_KEY', '1 << 1'), + ('UP_KEY', '1 << 2'), + ('KEY_MODIFIERS', 'SYMBOL_KEY | ALT_KEY | UP_KEY | KEY_MODIFIERS')]) + self.assertEqual(expected_entries, definition.entries) + + definition = definitions[4] + self.assertEqual( + collections.OrderedDict([('TEST_VAL', 0), ('CODEC_MPEG2', 1)]), + definition.entries) + + def testParseThrowsOnUnknownDirective(self): + test_data = """ + // GENERATED_JAVA_UNKNOWN: Value + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseReturnsEmptyListWithoutDirectives(self): + test_data = """ + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + self.assertEqual([], HeaderParser(test_data).ParseDefinitions()) + + def testParseEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseEnumClassOneValueSubstringOfAnother(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class SafeBrowsingStatus { + kChecking = 0, + kEnabled = 1, + kDisabled = 2, + kDisabledByAdmin = 3, + kDisabledByExtension = 4, + kEnabledStandard = 5, + kEnabledEnhanced = 6, + // New enum values must go above here. + kMaxValue = kEnabledEnhanced, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('SafeBrowsingStatus', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual( + collections.OrderedDict([ + ('CHECKING', '0'), + ('ENABLED', '1'), + ('DISABLED', '2'), + ('DISABLED_BY_ADMIN', '3'), + ('DISABLED_BY_EXTENSION', '4'), + ('ENABLED_STANDARD', '5'), + ('ENABLED_ENHANCED', '6'), + ('MAX_VALUE', 'ENABLED_ENHANCED'), + ]), definition.entries) + self.assertEqual( + collections.OrderedDict([ + ('MAX_VALUE', 'New enum values must go above here.') + ]), definition.comments) + + def testParseEnumStruct(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum struct Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Foo : int { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('int', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: unsigned short { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('unsigned short', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseUnknownFixedTypeRaises(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: foo_type { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseSimpleMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: (te + // st.name + // space) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + + def testParseMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: ( + // Ba + // r + // ) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMalformedMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirectiveShort(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirectiveMissingBrackets(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: + // test.namespace + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testEnumValueAssignmentNoneDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2)]), + definition.entries) + + def testEnumValueAssignmentAllDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', '1') + definition.AppendEntry('B', '2') + definition.AppendEntry('C', '3') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', '2'), + ('C', '3')]), + definition.entries) + + def testEnumValueAssignmentReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', None) + definition.AppendEntry('D', 'C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 1), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentSet(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', '2') + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 2), + ('C', 3)]), + definition.entries) + + def testEnumValueAssignmentSetReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', 'B') + definition.AppendEntry('D', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 0), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentRaises(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'foo') + definition.AppendEntry('C', None) + with self.assertRaises(Exception): + definition.Finalize() + + def testExplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('P_A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('P_C', None) + definition.AppendEntry('P_LAST', 'P_C') + definition.prefix_to_strip = 'P_' + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='p') + definition.AppendEntry('CLASS_NAME_A', None) + definition.AppendEntry('CLASS_NAME_B', None) + definition.AppendEntry('CLASS_NAME_C', None) + definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self): + definition = EnumDefinition(original_enum_name='Name', + enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('NAME_LAST', None) + definition.Finalize() + self.assertEqual(['A', 'B', 'NAME_LAST'], list(definition.entries.keys())) + + def testGenerateThrowsOnEmptyInput(self): + with self.assertRaises(Exception): + original_do_parse = java_cpp_enum.DoParseHeaderFile + try: + java_cpp_enum.DoParseHeaderFile = lambda _: [] + for _ in java_cpp_enum.DoGenerate(['file']): + pass + finally: + java_cpp_enum.DoParseHeaderFile = original_do_parse + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/java_cpp_features.py b/android/gyp/java_cpp_features.py new file mode 100755 index 000000000000..10639a54895f --- /dev/null +++ b/android/gyp/java_cpp_features.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python3 +# +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import re +import sys +import zipfile + +from util import build_utils +from util import java_cpp_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +class FeatureParserDelegate(java_cpp_utils.CppConstantParser.Delegate): + # Ex. 'BASE_FEATURE(kConstantName, "StringNameOfTheFeature", ...);' + # would parse as: + # ExtractConstantName() -> 'ConstantName' + # ExtractValue() -> '"StringNameOfTheFeature"' + FEATURE_RE = re.compile(r'BASE_FEATURE\(k([^,]+),') + VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*,') + + def ExtractConstantName(self, line): + match = FeatureParserDelegate.FEATURE_RE.match(line) + return match.group(1) if match else None + + def ExtractValue(self, line): + match = FeatureParserDelegate.VALUE_RE.search(line) + return match.group(1) if match else None + + def CreateJavaConstant(self, name, value, comments): + return java_cpp_utils.JavaString(name, value, comments) + + +def _GenerateOutput(template, source_paths, template_path, features): + description_template = """ + // This following string constants were inserted by + // {SCRIPT_NAME} + // From + // {SOURCE_PATHS} + // Into + // {TEMPLATE_PATH} + +""" + values = { + 'SCRIPT_NAME': java_cpp_utils.GetScriptName(), + 'SOURCE_PATHS': ',\n // '.join(source_paths), + 'TEMPLATE_PATH': template_path, + } + description = description_template.format(**values) + native_features = '\n\n'.join(x.Format() for x in features) + + values = { + 'NATIVE_FEATURES': description + native_features, + } + return template.format(**values) + + +def _ParseFeatureFile(path): + with open(path) as f: + feature_file_parser = java_cpp_utils.CppConstantParser( + FeatureParserDelegate(), f.readlines()) + return feature_file_parser.Parse() + + +def _Generate(source_paths, template_path): + with open(template_path) as f: + lines = f.readlines() + + template = ''.join(lines) + package, class_name = java_cpp_utils.ParseTemplateFile(lines) + output_path = java_cpp_utils.GetJavaFilePath(package, class_name) + + features = [] + for source_path in source_paths: + features.extend(_ParseFeatureFile(source_path)) + + output = _GenerateOutput(template, source_paths, template_path, features) + return output, output_path + + +def _Main(argv): + parser = argparse.ArgumentParser() + + parser.add_argument('--srcjar', + required=True, + help='The path at which to generate the .srcjar file') + + parser.add_argument('--template', + required=True, + help='The template file with which to generate the Java ' + 'class. Must have "{NATIVE_FEATURES}" somewhere in ' + 'the template.') + + parser.add_argument('inputs', + nargs='+', + help='Input file(s)', + metavar='INPUTFILE') + args = parser.parse_args(argv) + + with action_helpers.atomic_output(args.srcjar) as f: + with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: + data, path = _Generate(args.inputs, args.template) + zip_helpers.add_to_zip_hermetic(srcjar, path, data=data) + + +if __name__ == '__main__': + _Main(sys.argv[1:]) diff --git a/android/gyp/java_cpp_features.pydeps b/android/gyp/java_cpp_features.pydeps new file mode 100644 index 000000000000..4faa9033e29a --- /dev/null +++ b/android/gyp/java_cpp_features.pydeps @@ -0,0 +1,9 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_features.pydeps build/android/gyp/java_cpp_features.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +java_cpp_features.py +util/__init__.py +util/build_utils.py +util/java_cpp_utils.py diff --git a/android/gyp/java_cpp_features_tests.py b/android/gyp/java_cpp_features_tests.py new file mode 100755 index 000000000000..3053955ec1ae --- /dev/null +++ b/android/gyp/java_cpp_features_tests.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 + +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests for java_cpp_features.py. + +This test suite contains various tests for the C++ -> Java base::Feature +generator. +""" + +import unittest + +import java_cpp_features +from util import java_cpp_utils + + +class _TestFeaturesParser(unittest.TestCase): + def testParseComments(self): + test_data = """ +/** + * This should be ignored as well. + */ + +// Comment followed by a blank line. + +// Comment followed by unrelated code. +int foo() { return 3; } + +// Real comment. base::Feature intentionally split across two lines. +BASE_FEATURE(kSomeFeature, "SomeFeature", + base::FEATURE_DISABLED_BY_DEFAULT); + +// Real comment that spans +// multiple lines. +BASE_FEATURE(kSomeOtherFeature, "SomeOtherFeature", + base::FEATURE_ENABLED_BY_DEFAULT); + +// Comment followed by nothing. +""".split('\n') + feature_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_features.FeatureParserDelegate(), test_data) + features = feature_file_parser.Parse() + self.assertEqual(2, len(features)) + self.assertEqual('SOME_FEATURE', features[0].name) + self.assertEqual('"SomeFeature"', features[0].value) + self.assertEqual(1, len(features[0].comments.split('\n'))) + self.assertEqual('SOME_OTHER_FEATURE', features[1].name) + self.assertEqual('"SomeOtherFeature"', features[1].value) + self.assertEqual(2, len(features[1].comments.split('\n'))) + + def testWhitespace(self): + test_data = """ +// 1 line +BASE_FEATURE(kShort, "Short", base::FEATURE_DISABLED_BY_DEFAULT); + +// 2 lines +BASE_FEATURE(kTwoLineFeatureA, "TwoLineFeatureA", + base::FEATURE_DISABLED_BY_DEFAULT); +BASE_FEATURE(kTwoLineFeatureB, + "TwoLineFeatureB", base::FEATURE_DISABLED_BY_DEFAULT); + +// 3 lines +BASE_FEATURE(kFeatureWithAVeryLongNameThatWillHaveToWrap, + "FeatureWithAVeryLongNameThatWillHaveToWrap", + base::FEATURE_DISABLED_BY_DEFAULT); +""".split('\n') + feature_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_features.FeatureParserDelegate(), test_data) + features = feature_file_parser.Parse() + self.assertEqual(4, len(features)) + self.assertEqual('SHORT', features[0].name) + self.assertEqual('"Short"', features[0].value) + self.assertEqual('TWO_LINE_FEATURE_A', features[1].name) + self.assertEqual('"TwoLineFeatureA"', features[1].value) + self.assertEqual('TWO_LINE_FEATURE_B', features[2].name) + self.assertEqual('"TwoLineFeatureB"', features[2].value) + self.assertEqual('FEATURE_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP', + features[3].name) + self.assertEqual('"FeatureWithAVeryLongNameThatWillHaveToWrap"', + features[3].value) + + def testCppSyntax(self): + test_data = """ +// Mismatched name +BASE_FEATURE(kMismatchedFeature, "MismatchedName", + base::FEATURE_DISABLED_BY_DEFAULT); + +namespace myfeature { +// In a namespace +BASE_FEATURE(kSomeFeature, "SomeFeature", + base::FEATURE_DISABLED_BY_DEFAULT); +} + +// Build config-specific base::Feature +#if BUILDFLAG(IS_ANDROID) +BASE_FEATURE(kAndroidOnlyFeature, "AndroidOnlyFeature", + base::FEATURE_DISABLED_BY_DEFAULT); +#endif + +// Value depends on build config +BASE_FEATURE(kMaybeEnabled, "MaybeEnabled", +#if BUILDFLAG(IS_ANDROID) + base::FEATURE_DISABLED_BY_DEFAULT +#else + base::FEATURE_ENABLED_BY_DEFAULT +#endif +); +""".split('\n') + feature_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_features.FeatureParserDelegate(), test_data) + features = feature_file_parser.Parse() + self.assertEqual(4, len(features)) + self.assertEqual('MISMATCHED_FEATURE', features[0].name) + self.assertEqual('"MismatchedName"', features[0].value) + self.assertEqual('SOME_FEATURE', features[1].name) + self.assertEqual('"SomeFeature"', features[1].value) + self.assertEqual('ANDROID_ONLY_FEATURE', features[2].name) + self.assertEqual('"AndroidOnlyFeature"', features[2].value) + self.assertEqual('MAYBE_ENABLED', features[3].name) + self.assertEqual('"MaybeEnabled"', features[3].value) + + def testNotYetSupported(self): + # Negative test for cases we don't yet support, to ensure we don't misparse + # these until we intentionally add proper support. + test_data = """ +// Not currently supported: name depends on C++ directive +BASE_FEATURE(kNameDependsOnOs, +#if BUILDFLAG(IS_ANDROID) + "MaybeName1", +#else + "MaybeName2", +#endif + base::FEATURE_DISABLED_BY_DEFAULT); + +// Not currently supported: feature named with a constant instead of literal +BASE_FEATURE(kNamedAfterConstant, kNamedStringConstant, + base::FEATURE_DISABLED_BY_DEFAULT}; +""".split('\n') + feature_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_features.FeatureParserDelegate(), test_data) + features = feature_file_parser.Parse() + self.assertEqual(0, len(features)) + + def testTreatWebViewLikeOneWord(self): + test_data = """ +BASE_FEATURE(kSomeWebViewFeature, "SomeWebViewFeature", + base::FEATURE_DISABLED_BY_DEFAULT); +BASE_FEATURE(kWebViewOtherFeature, "WebViewOtherFeature", + base::FEATURE_ENABLED_BY_DEFAULT); +BASE_FEATURE(kFeatureWithPluralWebViews, + "FeatureWithPluralWebViews", + base::FEATURE_ENABLED_BY_DEFAULT); +""".split('\n') + feature_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_features.FeatureParserDelegate(), test_data) + features = feature_file_parser.Parse() + self.assertEqual('SOME_WEBVIEW_FEATURE', features[0].name) + self.assertEqual('"SomeWebViewFeature"', features[0].value) + self.assertEqual('WEBVIEW_OTHER_FEATURE', features[1].name) + self.assertEqual('"WebViewOtherFeature"', features[1].value) + self.assertEqual('FEATURE_WITH_PLURAL_WEBVIEWS', features[2].name) + self.assertEqual('"FeatureWithPluralWebViews"', features[2].value) + + def testSpecialCharacters(self): + test_data = r""" +BASE_FEATURE(kFeatureWithEscapes, "Weird\tfeature\"name\n", + base::FEATURE_DISABLED_BY_DEFAULT); +BASE_FEATURE(kFeatureWithEscapes2, + "Weird\tfeature\"name\n", + base::FEATURE_ENABLED_BY_DEFAULT); +""".split('\n') + feature_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_features.FeatureParserDelegate(), test_data) + features = feature_file_parser.Parse() + self.assertEqual('FEATURE_WITH_ESCAPES', features[0].name) + self.assertEqual(r'"Weird\tfeature\"name\n"', features[0].value) + self.assertEqual('FEATURE_WITH_ESCAPES2', features[1].name) + self.assertEqual(r'"Weird\tfeature\"name\n"', features[1].value) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/java_cpp_strings.py b/android/gyp/java_cpp_strings.py new file mode 100755 index 000000000000..c3d05de6c64d --- /dev/null +++ b/android/gyp/java_cpp_strings.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python3 +# +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import re +import sys +import zipfile + +from util import build_utils +from util import java_cpp_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +class StringParserDelegate(java_cpp_utils.CppConstantParser.Delegate): + STRING_RE = re.compile(r'\s*const char k(.*)\[\]\s*=') + VALUE_RE = re.compile(r'\s*("(?:\"|[^"])*")\s*;') + + def ExtractConstantName(self, line): + match = StringParserDelegate.STRING_RE.match(line) + return match.group(1) if match else None + + def ExtractValue(self, line): + match = StringParserDelegate.VALUE_RE.search(line) + return match.group(1) if match else None + + def CreateJavaConstant(self, name, value, comments): + return java_cpp_utils.JavaString(name, value, comments) + + +def _GenerateOutput(template, source_paths, template_path, strings): + description_template = """ + // This following string constants were inserted by + // {SCRIPT_NAME} + // From + // {SOURCE_PATHS} + // Into + // {TEMPLATE_PATH} + +""" + values = { + 'SCRIPT_NAME': java_cpp_utils.GetScriptName(), + 'SOURCE_PATHS': ',\n // '.join(source_paths), + 'TEMPLATE_PATH': template_path, + } + description = description_template.format(**values) + native_strings = '\n\n'.join(x.Format() for x in strings) + + values = { + 'NATIVE_STRINGS': description + native_strings, + } + return template.format(**values) + + +def _ParseStringFile(path): + with open(path) as f: + string_file_parser = java_cpp_utils.CppConstantParser( + StringParserDelegate(), f.readlines()) + return string_file_parser.Parse() + + +def _Generate(source_paths, template_path): + with open(template_path) as f: + lines = f.readlines() + + template = ''.join(lines) + package, class_name = java_cpp_utils.ParseTemplateFile(lines) + output_path = java_cpp_utils.GetJavaFilePath(package, class_name) + strings = [] + for source_path in source_paths: + strings.extend(_ParseStringFile(source_path)) + + output = _GenerateOutput(template, source_paths, template_path, strings) + return output, output_path + + +def _Main(argv): + parser = argparse.ArgumentParser() + + parser.add_argument('--srcjar', + required=True, + help='The path at which to generate the .srcjar file') + + parser.add_argument('--template', + required=True, + help='The template file with which to generate the Java ' + 'class. Must have "{NATIVE_STRINGS}" somewhere in ' + 'the template.') + + parser.add_argument( + 'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE') + args = parser.parse_args(argv) + + with action_helpers.atomic_output(args.srcjar) as f: + with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: + data, path = _Generate(args.inputs, args.template) + zip_helpers.add_to_zip_hermetic(srcjar, path, data=data) + + +if __name__ == '__main__': + _Main(sys.argv[1:]) diff --git a/android/gyp/java_cpp_strings.pydeps b/android/gyp/java_cpp_strings.pydeps new file mode 100644 index 000000000000..39b299e4b1da --- /dev/null +++ b/android/gyp/java_cpp_strings.pydeps @@ -0,0 +1,9 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +java_cpp_strings.py +util/__init__.py +util/build_utils.py +util/java_cpp_utils.py diff --git a/android/gyp/java_cpp_strings_tests.py b/android/gyp/java_cpp_strings_tests.py new file mode 100755 index 000000000000..793b2c310dee --- /dev/null +++ b/android/gyp/java_cpp_strings_tests.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 + +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for java_cpp_strings.py. + +This test suite contains various tests for the C++ -> Java string generator. +""" + +import unittest + +import java_cpp_strings +from util import java_cpp_utils + + +class _TestStringsParser(unittest.TestCase): + + def testParseComments(self): + test_data = """ +/** + * This should be ignored as well. + */ + +// Comment followed by a blank line. + +// Comment followed by unrelated code. +int foo() { return 3; } + +// Real comment. +const char kASwitch[] = "a-value"; + +// Real comment that spans +// multiple lines. +const char kAnotherSwitch[] = "another-value"; + +// Comment followed by nothing. +""".split('\n') + string_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_strings.StringParserDelegate(), test_data) + strings = string_file_parser.Parse() + self.assertEqual(2, len(strings)) + self.assertEqual('A_SWITCH', strings[0].name) + self.assertEqual('"a-value"', strings[0].value) + self.assertEqual(1, len(strings[0].comments.split('\n'))) + self.assertEqual('ANOTHER_SWITCH', strings[1].name) + self.assertEqual('"another-value"', strings[1].value) + self.assertEqual(2, len(strings[1].comments.split('\n'))) + + def testStringValues(self): + test_data = r""" +// Single line string constants. +const char kAString[] = "a-value"; +const char kNoComment[] = "no-comment"; + +namespace myfeature { +const char kMyFeatureNoComment[] = "myfeature.no-comment"; +} + +// Single line switch with a big space. +const char kAStringWithSpace[] = "a-value"; + +// Wrapped constant definition. +const char kAStringWithAVeryLongNameThatWillHaveToWrap[] = + "a-string-with-a-very-long-name-that-will-have-to-wrap"; + +// This one has no comment before it. + +const char kAStringWithAVeryLongNameThatWillHaveToWrap2[] = + "a-string-with-a-very-long-name-that-will-have-to-wrap2"; + +const char kStringWithEscapes[] = "tab\tquote\"newline\n"; +const char kStringWithEscapes2[] = + "tab\tquote\"newline\n"; + +const char kEmptyString[] = ""; + +// These are valid C++ but not currently supported by the script. +const char kInvalidLineBreak[] = + + "invalid-line-break"; + +const char kConcatenateMultipleStringLiterals[] = + "first line" + "second line"; +""".split('\n') + string_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_strings.StringParserDelegate(), test_data) + strings = string_file_parser.Parse() + self.assertEqual(9, len(strings)) + self.assertEqual('A_STRING', strings[0].name) + self.assertEqual('"a-value"', strings[0].value) + self.assertEqual('NO_COMMENT', strings[1].name) + self.assertEqual('"no-comment"', strings[1].value) + self.assertEqual('MY_FEATURE_NO_COMMENT', strings[2].name) + self.assertEqual('"myfeature.no-comment"', strings[2].value) + self.assertEqual('A_STRING_WITH_SPACE', strings[3].name) + self.assertEqual('"a-value"', strings[3].value) + self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP', + strings[4].name) + self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"', + strings[4].value) + self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP2', + strings[5].name) + self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap2"', + strings[5].value) + self.assertEqual('STRING_WITH_ESCAPES', strings[6].name) + self.assertEqual(r'"tab\tquote\"newline\n"', strings[6].value) + self.assertEqual('STRING_WITH_ESCAPES2', strings[7].name) + self.assertEqual(r'"tab\tquote\"newline\n"', strings[7].value) + self.assertEqual('EMPTY_STRING', strings[8].name) + self.assertEqual('""', strings[8].value) + + def testTreatWebViewLikeOneWord(self): + test_data = """ +const char kSomeWebViewSwitch[] = "some-webview-switch"; +const char kWebViewOtherSwitch[] = "webview-other-switch"; +const char kSwitchWithPluralWebViews[] = "switch-with-plural-webviews"; +""".split('\n') + string_file_parser = java_cpp_utils.CppConstantParser( + java_cpp_strings.StringParserDelegate(), test_data) + strings = string_file_parser.Parse() + self.assertEqual('SOME_WEBVIEW_SWITCH', strings[0].name) + self.assertEqual('"some-webview-switch"', strings[0].value) + self.assertEqual('WEBVIEW_OTHER_SWITCH', strings[1].name) + self.assertEqual('"webview-other-switch"', strings[1].value) + self.assertEqual('SWITCH_WITH_PLURAL_WEBVIEWS', strings[2].name) + self.assertEqual('"switch-with-plural-webviews"', strings[2].value) + + def testTemplateParsing(self): + test_data = """ +// Copyright 2019 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package my.java.package; + +public any sort of class MyClass {{ + +{NATIVE_STRINGS} + +}} +""".split('\n') + package, class_name = java_cpp_utils.ParseTemplateFile(test_data) + self.assertEqual('my.java.package', package) + self.assertEqual('MyClass', class_name) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/java_google_api_keys.py b/android/gyp/java_google_api_keys.py new file mode 100755 index 000000000000..4e4fa1998b18 --- /dev/null +++ b/android/gyp/java_google_api_keys.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 +# +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates a Java file with API keys. + +import argparse +import os +import string +import sys +import zipfile + +from util import build_utils +import zip_helpers + +sys.path.append( + os.path.abspath(os.path.join(sys.path[0], '../../../google_apis'))) +import google_api_keys + + +PACKAGE = 'org.chromium.chrome' +CLASSNAME = 'GoogleAPIKeys' + + +def GetScriptName(): + return os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT) + + +def GenerateOutput(constant_definitions): + template = string.Template(""" +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// ${SCRIPT_NAME} +// From +// ${SOURCE_PATH} + +package ${PACKAGE}; + +public class ${CLASS_NAME} { +${CONSTANT_ENTRIES} +} +""") + + constant_template = string.Template( + ' public static final String ${NAME} = "${VALUE}";') + constant_entries_list = [] + for constant_name, constant_value in constant_definitions.items(): + values = { + 'NAME': constant_name, + 'VALUE': constant_value, + } + constant_entries_list.append(constant_template.substitute(values)) + constant_entries_string = '\n'.join(constant_entries_list) + + values = { + 'CLASS_NAME': CLASSNAME, + 'CONSTANT_ENTRIES': constant_entries_string, + 'PACKAGE': PACKAGE, + 'SCRIPT_NAME': GetScriptName(), + 'SOURCE_PATH': 'google_api_keys/google_api_keys.h', + } + return template.substitute(values) + + +def _DoWriteJavaOutput(output_path, constant_definition): + folder = os.path.dirname(output_path) + if folder and not os.path.exists(folder): + os.makedirs(folder) + with open(output_path, 'w') as out_file: + out_file.write(GenerateOutput(constant_definition)) + + +def _DoWriteJarOutput(output_path, constant_definition): + folder = os.path.dirname(output_path) + if folder and not os.path.exists(folder): + os.makedirs(folder) + with zipfile.ZipFile(output_path, 'w') as srcjar: + path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java') + data = GenerateOutput(constant_definition) + zip_helpers.add_to_zip_hermetic(srcjar, path, data=data) + + +def _DoMain(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--out", help="Path for java output.") + parser.add_argument("--srcjar", help="Path for srcjar output.") + options = parser.parse_args(argv) + if not options.out and not options.srcjar: + parser.print_help() + sys.exit(-1) + + values = {} + values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey() + values['GOOGLE_API_KEY_ANDROID_NON_STABLE'] = ( + google_api_keys.GetAPIKeyAndroidNonStable()) + values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN') + values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN') + values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING') + values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret( + 'REMOTING') + values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID( + 'REMOTING_HOST') + values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys. + GetClientSecret('REMOTING_HOST')) + + if options.out: + _DoWriteJavaOutput(options.out, values) + if options.srcjar: + _DoWriteJarOutput(options.srcjar, values) + + +if __name__ == '__main__': + _DoMain(sys.argv[1:]) diff --git a/android/gyp/java_google_api_keys.pydeps b/android/gyp/java_google_api_keys.pydeps new file mode 100644 index 000000000000..6c027a19d944 --- /dev/null +++ b/android/gyp/java_google_api_keys.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_google_api_keys.pydeps build/android/gyp/java_google_api_keys.py +../../../google_apis/google_api_keys.py +../../gn_helpers.py +../../zip_helpers.py +java_google_api_keys.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/java_google_api_keys_tests.py b/android/gyp/java_google_api_keys_tests.py new file mode 100755 index 000000000000..0610178d8582 --- /dev/null +++ b/android/gyp/java_google_api_keys_tests.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for java_google_api_keys.py. + +This test suite contains various tests for the C++ -> Java Google API Keys +generator. +""" + +import unittest + +import java_google_api_keys + + +class TestJavaGoogleAPIKeys(unittest.TestCase): + def testOutput(self): + definition = {'E1': 'abc', 'E2': 'defgh'} + output = java_google_api_keys.GenerateOutput(definition) + expected = """ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// %s +// From +// google_api_keys/google_api_keys.h + +package org.chromium.chrome; + +public class GoogleAPIKeys { + public static final String E1 = "abc"; + public static final String E2 = "defgh"; +} +""" + self.assertEqual(expected % java_google_api_keys.GetScriptName(), output) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/javac_output_processor.py b/android/gyp/javac_output_processor.py new file mode 100755 index 000000000000..6faf5de5abc9 --- /dev/null +++ b/android/gyp/javac_output_processor.py @@ -0,0 +1,216 @@ +#!/usr/bin/env python3 +# +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Contains helper class for processing javac output.""" + +import dataclasses +import os +import pathlib +import re +import sys +import traceback +from typing import List + +from util import build_utils + +sys.path.insert( + 0, + os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')) +import colorama +sys.path.insert( + 0, + os.path.join(build_utils.DIR_SOURCE_ROOT, 'tools', 'android', + 'modularization', 'convenience')) +import lookup_dep + + +def ReplaceGmsPackageIfNeeded(target_name: str) -> str: + if target_name.startswith( + ('//third_party/android_deps:google_play_services_', + '//clank/third_party/google3:google_play_services_')): + return f'$google_play_services_package:{target_name.split(":")[1]}' + return target_name + + +def _DisambiguateDeps(class_entries: List[lookup_dep.ClassEntry]): + def filter_if_not_empty(entries, filter_func): + filtered_entries = [e for e in entries if filter_func(e)] + return filtered_entries or entries + + # When some deps are preferred, ignore all other potential deps. + class_entries = filter_if_not_empty(class_entries, lambda e: e.preferred_dep) + + # E.g. javax_annotation_jsr250_api_java. + class_entries = filter_if_not_empty(class_entries, + lambda e: 'jsr' in e.target) + + # Avoid suggesting subtargets when regular targets exist. + class_entries = filter_if_not_empty(class_entries, + lambda e: '__' not in e.target) + + # Swap out GMS package names if needed. + class_entries = [ + dataclasses.replace(e, target=ReplaceGmsPackageIfNeeded(e.target)) + for e in class_entries + ] + + # Convert to dict and then use list to get the keys back to remove dups and + # keep order the same as before. + class_entries = list({e: True for e in class_entries}) + + return class_entries + + +class JavacOutputProcessor: + def __init__(self, target_name): + self._target_name = self._RemoveSuffixesIfPresent( + ["__compile_java", "__errorprone", "__header"], target_name) + self._suggested_deps = set() + + # Example: ../../ui/android/java/src/org/chromium/ui/base/Clipboard.java:45: + fileline_prefix = ( + r'(?P(?P[-.\w/\\]+.java):(?P[0-9]+):)') + + self._warning_re = re.compile( + fileline_prefix + r'(?P warning: (?P.*))$') + self._error_re = re.compile(fileline_prefix + + r'(?P (?P.*))$') + self._marker_re = re.compile(r'\s*(?P\^)\s*$') + + self._symbol_not_found_re_list = [ + # Example: + # error: package org.chromium.components.url_formatter does not exist + re.compile(fileline_prefix + + r'( error: package [\w.]+ does not exist)$'), + # Example: error: cannot find symbol + re.compile(fileline_prefix + r'( error: cannot find symbol)$'), + # Example: error: symbol not found org.chromium.url.GURL + re.compile(fileline_prefix + r'( error: symbol not found [\w.]+)$'), + ] + + # Example: import org.chromium.url.GURL; + self._import_re = re.compile(r'\s*import (?P[\w\.]+);$') + + self._warning_color = [ + 'full_message', colorama.Fore.YELLOW + colorama.Style.DIM + ] + self._error_color = [ + 'full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT + ] + self._marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT] + + self._class_lookup_index = None + + colorama.init() + + def Process(self, lines): + """ Processes javac output. + + - Applies colors to output. + - Suggests GN dep to add for 'unresolved symbol in Java import' errors. + """ + lines = self._ElaborateLinesForUnknownSymbol(iter(lines)) + for line in lines: + yield self._ApplyColors(line) + if self._suggested_deps: + + def yellow(text): + return colorama.Fore.YELLOW + text + colorama.Fore.RESET + + # Show them in quotes so they can be copy/pasted into BUILD.gn files. + yield yellow('Hint:') + ' One or more errors due to missing GN deps.' + yield (yellow('Hint:') + ' Try adding the following to ' + + yellow(self._target_name)) + for dep in sorted(self._suggested_deps): + yield ' "{}",'.format(dep) + + def _ElaborateLinesForUnknownSymbol(self, lines): + """ Elaborates passed-in javac output for unresolved symbols. + + Looks for unresolved symbols in imports. + Adds: + - Line with GN target which cannot compile. + - Mention of unresolved class if not present in error message. + - Line with suggestion of GN dep to add. + + Args: + lines: Generator with javac input. + Returns: + Generator with processed output. + """ + previous_line = next(lines, None) + line = next(lines, None) + while previous_line != None: + try: + self._LookForUnknownSymbol(previous_line, line) + except Exception: + elaborated_lines = ['Error in _LookForUnknownSymbol ---'] + elaborated_lines += traceback.format_exc().splitlines() + elaborated_lines += ['--- end _LookForUnknownSymbol error'] + for elaborated_line in elaborated_lines: + yield elaborated_line + + yield previous_line + previous_line = line + line = next(lines, None) + + def _ApplyColors(self, line): + """Adds colors to passed-in line and returns processed line.""" + if self._warning_re.match(line): + line = self._Colorize(line, self._warning_re, self._warning_color) + elif self._error_re.match(line): + line = self._Colorize(line, self._error_re, self._error_color) + elif self._marker_re.match(line): + line = self._Colorize(line, self._marker_re, self._marker_color) + return line + + def _LookForUnknownSymbol(self, line, next_line): + if not next_line: + return + + import_re_match = self._import_re.match(next_line) + if not import_re_match: + return + + for regex in self._symbol_not_found_re_list: + if regex.match(line): + break + else: + return + + if self._class_lookup_index is None: + self._class_lookup_index = lookup_dep.ClassLookupIndex( + pathlib.Path(os.getcwd()), + should_build=False, + ) + + class_to_lookup = import_re_match.group('imported_class') + suggested_deps = self._class_lookup_index.match(class_to_lookup) + + if not suggested_deps: + return + + suggested_deps = _DisambiguateDeps(suggested_deps) + suggested_deps_str = ', '.join(s.target for s in suggested_deps) + + if len(suggested_deps) > 1: + suggested_deps_str = 'one of: ' + suggested_deps_str + + self._suggested_deps.add(suggested_deps_str) + + @staticmethod + def _RemoveSuffixesIfPresent(suffixes, text): + for suffix in suffixes: + if text.endswith(suffix): + return text[:-len(suffix)] + return text + + @staticmethod + def _Colorize(line, regex, color): + match = regex.match(line) + start = match.start(color[0]) + end = match.end(color[0]) + return (line[:start] + color[1] + line[start:end] + colorama.Fore.RESET + + colorama.Style.RESET_ALL + line[end:]) diff --git a/android/gyp/jinja_template.py b/android/gyp/jinja_template.py new file mode 100755 index 000000000000..4a242683a6f5 --- /dev/null +++ b/android/gyp/jinja_template.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Renders one or more template files using the Jinja template engine.""" + +import codecs +import argparse +import os +import sys + +from util import build_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from pylib.constants import host_paths + +# Import jinja2 from third_party/jinja2 +sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party')) +import jinja2 # pylint: disable=F0401 + + +class _RecordingFileSystemLoader(jinja2.FileSystemLoader): + def __init__(self, searchpath): + jinja2.FileSystemLoader.__init__(self, searchpath) + self.loaded_templates = set() + + def get_source(self, environment, template): + contents, filename, uptodate = jinja2.FileSystemLoader.get_source( + self, environment, template) + self.loaded_templates.add(os.path.relpath(filename)) + return contents, filename, uptodate + + +class JinjaProcessor: + """Allows easy rendering of jinja templates with input file tracking.""" + def __init__(self, loader_base_dir, variables=None): + self.loader_base_dir = loader_base_dir + self.variables = variables or {} + self.loader = _RecordingFileSystemLoader(loader_base_dir) + self.env = jinja2.Environment(loader=self.loader) + self.env.undefined = jinja2.StrictUndefined + self.env.line_comment_prefix = '##' + self.env.trim_blocks = True + self.env.lstrip_blocks = True + self._template_cache = {} # Map of path -> Template + + def Render(self, input_filename, variables=None): + input_rel_path = os.path.relpath(input_filename, self.loader_base_dir) + template = self._template_cache.get(input_rel_path) + if not template: + template = self.env.get_template(input_rel_path) + self._template_cache[input_rel_path] = template + return template.render(variables or self.variables) + + def GetLoadedTemplates(self): + return list(self.loader.loaded_templates) + + +def _ProcessFile(processor, input_filename, output_filename): + output = processor.Render(input_filename) + + # If |output| is same with the file content, we skip update and + # ninja's restat will avoid rebuilding things that depend on it. + if os.path.isfile(output_filename): + with codecs.open(output_filename, 'r', 'utf-8') as f: + if f.read() == output: + return + + with codecs.open(output_filename, 'w', 'utf-8') as output_file: + output_file.write(output) + + +def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip): + with build_utils.TempDir() as temp_dir: + path_info = resource_utils.ResourceInfoFile() + for input_filename in input_filenames: + relpath = os.path.relpath(os.path.abspath(input_filename), + os.path.abspath(inputs_base_dir)) + if relpath.startswith(os.pardir): + raise Exception('input file %s is not contained in inputs base dir %s' + % (input_filename, inputs_base_dir)) + + output_filename = os.path.join(temp_dir, relpath) + parent_dir = os.path.dirname(output_filename) + build_utils.MakeDirectory(parent_dir) + _ProcessFile(processor, input_filename, output_filename) + path_info.AddMapping(relpath, input_filename) + + path_info.Write(outputs_zip + '.info') + with action_helpers.atomic_output(outputs_zip) as f: + zip_helpers.zip_directory(f, temp_dir) + + +def _ParseVariables(variables_arg, error_func): + variables = {} + for v in action_helpers.parse_gn_list(variables_arg): + if '=' not in v: + error_func('--variables argument must contain "=": ' + v) + name, _, value = v.partition('=') + variables[name] = value + return variables + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--inputs', required=True, + help='GN-list of template files to process.') + parser.add_argument('--includes', default='', + help="GN-list of files that get {% include %}'ed.") + parser.add_argument('--output', help='The output file to generate. Valid ' + 'only if there is a single input.') + parser.add_argument('--outputs-zip', help='A zip file for the processed ' + 'templates. Required if there are multiple inputs.') + parser.add_argument('--inputs-base-dir', help='A common ancestor directory ' + 'of the inputs. Each output\'s path in the output zip ' + 'will match the relative path from INPUTS_BASE_DIR to ' + 'the input. Required if --output-zip is given.') + parser.add_argument('--loader-base-dir', help='Base path used by the ' + 'template loader. Must be a common ancestor directory of ' + 'the inputs. Defaults to DIR_SOURCE_ROOT.', + default=host_paths.DIR_SOURCE_ROOT) + parser.add_argument('--variables', help='Variables to be made available in ' + 'the template processing environment, as a GYP list ' + '(e.g. --variables "channel=beta mstone=39")', default='') + parser.add_argument('--check-includes', action='store_true', + help='Enable inputs and includes checks.') + options = parser.parse_args() + + inputs = action_helpers.parse_gn_list(options.inputs) + includes = action_helpers.parse_gn_list(options.includes) + + if (options.output is None) == (options.outputs_zip is None): + parser.error('Exactly one of --output and --output-zip must be given') + if options.output and len(inputs) != 1: + parser.error('--output cannot be used with multiple inputs') + if options.outputs_zip and not options.inputs_base_dir: + parser.error('--inputs-base-dir must be given when --output-zip is used') + + variables = _ParseVariables(options.variables, parser.error) + processor = JinjaProcessor(options.loader_base_dir, variables=variables) + + if options.output: + _ProcessFile(processor, inputs[0], options.output) + else: + _ProcessFiles(processor, inputs, options.inputs_base_dir, + options.outputs_zip) + + if options.check_includes: + all_inputs = set(processor.GetLoadedTemplates()) + all_inputs.difference_update(inputs) + all_inputs.difference_update(includes) + if all_inputs: + raise Exception('Found files not listed via --includes:\n' + + '\n'.join(sorted(all_inputs))) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/jinja_template.pydeps b/android/gyp/jinja_template.pydeps new file mode 100644 index 000000000000..1eafd884a28a --- /dev/null +++ b/android/gyp/jinja_template.pydeps @@ -0,0 +1,43 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jinja_template.pydeps build/android/gyp/jinja_template.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +../pylib/__init__.py +../pylib/constants/__init__.py +../pylib/constants/host_paths.py +jinja_template.py +util/__init__.py +util/build_utils.py +util/resource_utils.py diff --git a/android/gyp/lint.py b/android/gyp/lint.py new file mode 100755 index 000000000000..ae26a18085df --- /dev/null +++ b/android/gyp/lint.py @@ -0,0 +1,530 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Runs Android's lint tool.""" + +import argparse +import logging +import os +import shutil +import sys +import time +from xml.dom import minidom +from xml.etree import ElementTree + +from util import build_utils +from util import manifest_utils +from util import server_utils +import action_helpers # build_utils adds //build to sys.path. + +_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/main/build/android/docs/lint.md' # pylint: disable=line-too-long + +# These checks are not useful for chromium. +_DISABLED_ALWAYS = [ + "AppCompatResource", # Lint does not correctly detect our appcompat lib. + "Assert", # R8 --force-enable-assertions is used to enable java asserts. + "InflateParams", # Null is ok when inflating views for dialogs. + "InlinedApi", # Constants are copied so they are always available. + "LintBaseline", # Don't warn about using baseline.xml files. + "MissingInflatedId", # False positives https://crbug.com/1394222 + "MissingApplicationIcon", # False positive for non-production targets. + "ObsoleteLintCustomCheck", # We have no control over custom lint checks. + "SwitchIntDef", # Many C++ enums are not used at all in java. + "Typos", # Strings are committed in English first and later translated. + "UniqueConstants", # Chromium enums allow aliases. + "UnusedAttribute", # Chromium apks have various minSdkVersion values. +] + +# These checks are not useful for test targets and adds an unnecessary burden +# to suppress them. +_DISABLED_FOR_TESTS = [ + # We should not require test strings.xml files to explicitly add + # translatable=false since they are not translated and not used in + # production. + "MissingTranslation", + # Test strings.xml files often have simple names and are not translatable, + # so it may conflict with a production string and cause this error. + "Untranslatable", + # Test targets often use the same strings target and resources target as the + # production targets but may not use all of them. + "UnusedResources", + # TODO(wnwen): Turn this back on since to crash it would require running on + # a device with all the various minSdkVersions. + # Real NewApi violations crash the app, so the only ones that lint catches + # but tests still succeed are false positives. + "NewApi", + # Tests should be allowed to access these methods/classes. + "VisibleForTests", +] + +_RES_ZIP_DIR = 'RESZIPS' +_SRCJAR_DIR = 'SRCJARS' +_AAR_DIR = 'AARS' + + +def _SrcRelative(path): + """Returns relative path to top-level src dir.""" + return os.path.relpath(path, build_utils.DIR_SOURCE_ROOT) + + +def _GenerateProjectFile(android_manifest, + android_sdk_root, + cache_dir, + sources=None, + classpath=None, + srcjar_sources=None, + resource_sources=None, + custom_lint_jars=None, + custom_annotation_zips=None, + android_sdk_version=None, + baseline_path=None): + project = ElementTree.Element('project') + root = ElementTree.SubElement(project, 'root') + # Run lint from output directory: crbug.com/1115594 + root.set('dir', os.getcwd()) + sdk = ElementTree.SubElement(project, 'sdk') + # Lint requires that the sdk path be an absolute path. + sdk.set('dir', os.path.abspath(android_sdk_root)) + if baseline_path is not None: + baseline = ElementTree.SubElement(project, 'baseline') + baseline.set('file', baseline_path) + cache = ElementTree.SubElement(project, 'cache') + cache.set('dir', cache_dir) + main_module = ElementTree.SubElement(project, 'module') + main_module.set('name', 'main') + main_module.set('android', 'true') + main_module.set('library', 'false') + if android_sdk_version: + main_module.set('compile_sdk_version', android_sdk_version) + manifest = ElementTree.SubElement(main_module, 'manifest') + manifest.set('file', android_manifest) + if srcjar_sources: + for srcjar_file in srcjar_sources: + src = ElementTree.SubElement(main_module, 'src') + src.set('file', srcjar_file) + if sources: + for source in sources: + src = ElementTree.SubElement(main_module, 'src') + src.set('file', source) + if classpath: + for file_path in classpath: + classpath_element = ElementTree.SubElement(main_module, 'classpath') + classpath_element.set('file', file_path) + if resource_sources: + for resource_file in resource_sources: + resource = ElementTree.SubElement(main_module, 'resource') + resource.set('file', resource_file) + if custom_lint_jars: + for lint_jar in custom_lint_jars: + lint = ElementTree.SubElement(main_module, 'lint-checks') + lint.set('file', lint_jar) + if custom_annotation_zips: + for annotation_zip in custom_annotation_zips: + annotation = ElementTree.SubElement(main_module, 'annotations') + annotation.set('file', annotation_zip) + return project + + +def _RetrieveBackportedMethods(backported_methods_path): + with open(backported_methods_path) as f: + methods = f.read().splitlines() + # Methods look like: + # java/util/Set#of(Ljava/lang/Object;)Ljava/util/Set; + # But error message looks like: + # Call requires API level R (current min is 21): java.util.Set#of [NewApi] + methods = (m.replace('/', '\\.') for m in methods) + methods = (m[:m.index('(')] for m in methods) + return sorted(set(methods)) + + +def _GenerateConfigXmlTree(orig_config_path, backported_methods): + if orig_config_path: + root_node = ElementTree.parse(orig_config_path).getroot() + else: + root_node = ElementTree.fromstring('') + + issue_node = ElementTree.SubElement(root_node, 'issue') + issue_node.attrib['id'] = 'NewApi' + ignore_node = ElementTree.SubElement(issue_node, 'ignore') + ignore_node.attrib['regexp'] = '|'.join(backported_methods) + return root_node + + +def _GenerateAndroidManifest(original_manifest_path, extra_manifest_paths, + min_sdk_version, android_sdk_version): + # Set minSdkVersion in the manifest to the correct value. + doc, manifest, app_node = manifest_utils.ParseManifest(original_manifest_path) + + # TODO(crbug.com/1126301): Should this be done using manifest merging? + # Add anything in the application node of the extra manifests to the main + # manifest to prevent unused resource errors. + for path in extra_manifest_paths: + _, _, extra_app_node = manifest_utils.ParseManifest(path) + for node in extra_app_node: + app_node.append(node) + + uses_sdk = manifest.find('./uses-sdk') + if uses_sdk is None: + uses_sdk = ElementTree.Element('uses-sdk') + manifest.insert(0, uses_sdk) + uses_sdk.set('{%s}minSdkVersion' % manifest_utils.ANDROID_NAMESPACE, + min_sdk_version) + uses_sdk.set('{%s}targetSdkVersion' % manifest_utils.ANDROID_NAMESPACE, + android_sdk_version) + return doc + + +def _WriteXmlFile(root, path): + logging.info('Writing xml file %s', path) + build_utils.MakeDirectory(os.path.dirname(path)) + with action_helpers.atomic_output(path) as f: + # Although we can write it just with ElementTree.tostring, using minidom + # makes it a lot easier to read as a human (also on code search). + f.write( + minidom.parseString(ElementTree.tostring( + root, encoding='utf-8')).toprettyxml(indent=' ').encode('utf-8')) + + +def _RunLint(create_cache, + custom_lint_jar_path, + lint_jar_path, + backported_methods_path, + config_path, + manifest_path, + extra_manifest_paths, + sources, + classpath, + cache_dir, + android_sdk_version, + aars, + srcjars, + min_sdk_version, + resource_sources, + resource_zips, + android_sdk_root, + lint_gen_dir, + baseline, + testonly_target=False, + warnings_as_errors=False): + logging.info('Lint starting') + + if create_cache: + # Occasionally lint may crash due to re-using intermediate files from older + # lint runs. See https://crbug.com/1258178 for context. + logging.info('Clearing cache dir %s before creating cache.', cache_dir) + shutil.rmtree(cache_dir, ignore_errors=True) + os.makedirs(cache_dir) + + if baseline and not os.path.exists(baseline): + # Generating new baselines is only done locally, and requires more memory to + # avoid OOMs. + creating_baseline = True + lint_xmx = '4G' + else: + creating_baseline = False + lint_xmx = '2G' + + # All paths in lint are based off of relative paths from root with root as the + # prefix. Path variable substitution is based off of prefix matching so custom + # path variables need to match exactly in order to show up in baseline files. + # e.g. lint_path=path/to/output/dir/../../file/in/src + root_path = os.getcwd() # This is usually the output directory. + pathvar_src = os.path.join( + root_path, os.path.relpath(build_utils.DIR_SOURCE_ROOT, start=root_path)) + + cmd = build_utils.JavaCmd(xmx=lint_xmx) + [ + '-cp', + '{}:{}'.format(lint_jar_path, custom_lint_jar_path), + 'org.chromium.build.CustomLint', + '--sdk-home', + android_sdk_root, + '--jdk-home', + build_utils.JAVA_HOME, + '--path-variables', + f'SRC={pathvar_src}', + '--quiet', # Silences lint's "." progress updates. + '--stacktrace', # Prints full stacktraces for internal lint errors. + '--disable', + ','.join(_DISABLED_ALWAYS), + ] + + if testonly_target: + cmd.extend(['--disable', ','.join(_DISABLED_FOR_TESTS)]) + + if not manifest_path: + manifest_path = os.path.join(build_utils.DIR_SOURCE_ROOT, 'build', + 'android', 'AndroidManifest.xml') + + logging.info('Generating config.xml') + backported_methods = _RetrieveBackportedMethods(backported_methods_path) + config_xml_node = _GenerateConfigXmlTree(config_path, backported_methods) + generated_config_path = os.path.join(lint_gen_dir, 'config.xml') + _WriteXmlFile(config_xml_node, generated_config_path) + cmd.extend(['--config', generated_config_path]) + + logging.info('Generating Android manifest file') + android_manifest_tree = _GenerateAndroidManifest(manifest_path, + extra_manifest_paths, + min_sdk_version, + android_sdk_version) + # Include the rebased manifest_path in the lint generated path so that it is + # clear in error messages where the original AndroidManifest.xml came from. + lint_android_manifest_path = os.path.join(lint_gen_dir, manifest_path) + _WriteXmlFile(android_manifest_tree.getroot(), lint_android_manifest_path) + + resource_root_dir = os.path.join(lint_gen_dir, _RES_ZIP_DIR) + # These are zip files with generated resources (e. g. strings from GRD). + logging.info('Extracting resource zips') + for resource_zip in resource_zips: + # Use a consistent root and name rather than a temporary file so that + # suppressions can be local to the lint target and the resource target. + resource_dir = os.path.join(resource_root_dir, resource_zip) + shutil.rmtree(resource_dir, True) + os.makedirs(resource_dir) + resource_sources.extend( + build_utils.ExtractAll(resource_zip, path=resource_dir)) + + logging.info('Extracting aars') + aar_root_dir = os.path.join(lint_gen_dir, _AAR_DIR) + custom_lint_jars = [] + custom_annotation_zips = [] + if aars: + for aar in aars: + # Use relative source for aar files since they are not generated. + aar_dir = os.path.join(aar_root_dir, + os.path.splitext(_SrcRelative(aar))[0]) + shutil.rmtree(aar_dir, True) + os.makedirs(aar_dir) + aar_files = build_utils.ExtractAll(aar, path=aar_dir) + for f in aar_files: + if f.endswith('lint.jar'): + custom_lint_jars.append(f) + elif f.endswith('annotations.zip'): + custom_annotation_zips.append(f) + + logging.info('Extracting srcjars') + srcjar_root_dir = os.path.join(lint_gen_dir, _SRCJAR_DIR) + srcjar_sources = [] + if srcjars: + for srcjar in srcjars: + # Use path without extensions since otherwise the file name includes + # .srcjar and lint treats it as a srcjar. + srcjar_dir = os.path.join(srcjar_root_dir, os.path.splitext(srcjar)[0]) + shutil.rmtree(srcjar_dir, True) + os.makedirs(srcjar_dir) + # Sadly lint's srcjar support is broken since it only considers the first + # srcjar. Until we roll a lint version with that fixed, we need to extract + # it ourselves. + srcjar_sources.extend(build_utils.ExtractAll(srcjar, path=srcjar_dir)) + + logging.info('Generating project file') + project_file_root = _GenerateProjectFile(lint_android_manifest_path, + android_sdk_root, cache_dir, sources, + classpath, srcjar_sources, + resource_sources, custom_lint_jars, + custom_annotation_zips, + android_sdk_version, baseline) + + project_xml_path = os.path.join(lint_gen_dir, 'project.xml') + _WriteXmlFile(project_file_root, project_xml_path) + cmd += ['--project', project_xml_path] + + # This filter is necessary for JDK11. + stderr_filter = build_utils.FilterReflectiveAccessJavaWarnings + stdout_filter = lambda x: build_utils.FilterLines(x, 'No issues found') + + start = time.time() + logging.debug('Lint command %s', ' '.join(cmd)) + failed = True + + if creating_baseline and not warnings_as_errors: + # Allow error code 6 when creating a baseline: ERRNO_CREATED_BASELINE + fail_func = lambda returncode, _: returncode not in (0, 6) + else: + fail_func = lambda returncode, _: returncode != 0 + + try: + failed = bool( + build_utils.CheckOutput(cmd, + print_stdout=True, + stdout_filter=stdout_filter, + stderr_filter=stderr_filter, + fail_on_output=warnings_as_errors, + fail_func=fail_func)) + finally: + # When not treating warnings as errors, display the extra footer. + is_debug = os.environ.get('LINT_DEBUG', '0') != '0' + + if failed: + print('- For more help with lint in Chrome:', _LINT_MD_URL) + if is_debug: + print('- DEBUG MODE: Here is the project.xml: {}'.format( + _SrcRelative(project_xml_path))) + else: + print('- Run with LINT_DEBUG=1 to enable lint configuration debugging') + + end = time.time() - start + logging.info('Lint command took %ss', end) + if not is_debug: + shutil.rmtree(aar_root_dir, ignore_errors=True) + shutil.rmtree(resource_root_dir, ignore_errors=True) + shutil.rmtree(srcjar_root_dir, ignore_errors=True) + os.unlink(project_xml_path) + + logging.info('Lint completed') + + +def _ParseArgs(argv): + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--target-name', help='Fully qualified GN target name.') + parser.add_argument('--skip-build-server', + action='store_true', + help='Avoid using the build server.') + parser.add_argument('--use-build-server', + action='store_true', + help='Always use the build server.') + parser.add_argument('--lint-jar-path', + required=True, + help='Path to the lint jar.') + parser.add_argument('--custom-lint-jar-path', + required=True, + help='Path to our custom lint jar.') + parser.add_argument('--backported-methods', + help='Path to backported methods file created by R8.') + parser.add_argument('--cache-dir', + required=True, + help='Path to the directory in which the android cache ' + 'directory tree should be stored.') + parser.add_argument('--config-path', help='Path to lint suppressions file.') + parser.add_argument('--lint-gen-dir', + required=True, + help='Path to store generated xml files.') + parser.add_argument('--stamp', help='Path to stamp upon success.') + parser.add_argument('--android-sdk-version', + help='Version (API level) of the Android SDK used for ' + 'building.') + parser.add_argument('--min-sdk-version', + required=True, + help='Minimal SDK version to lint against.') + parser.add_argument('--android-sdk-root', + required=True, + help='Lint needs an explicit path to the android sdk.') + parser.add_argument('--testonly', + action='store_true', + help='If set, some checks like UnusedResources will be ' + 'disabled since they are not helpful for test ' + 'targets.') + parser.add_argument('--create-cache', + action='store_true', + help='Whether this invocation is just warming the cache.') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + parser.add_argument('--sources', + help='A list of files containing java and kotlin source ' + 'files.') + parser.add_argument('--aars', help='GN list of included aars.') + parser.add_argument('--srcjars', help='GN list of included srcjars.') + parser.add_argument('--manifest-path', + help='Path to original AndroidManifest.xml') + parser.add_argument('--extra-manifest-paths', + action='append', + help='GYP-list of manifest paths to merge into the ' + 'original AndroidManifest.xml') + parser.add_argument('--resource-sources', + default=[], + action='append', + help='GYP-list of resource sources files, similar to ' + 'java sources files, but for resource files.') + parser.add_argument('--resource-zips', + default=[], + action='append', + help='GYP-list of resource zips, zip files of generated ' + 'resource files.') + parser.add_argument('--classpath', + help='List of jars to add to the classpath.') + parser.add_argument('--baseline', + help='Baseline file to ignore existing errors and fail ' + 'on new errors.') + + args = parser.parse_args(build_utils.ExpandFileArgs(argv)) + args.sources = action_helpers.parse_gn_list(args.sources) + args.aars = action_helpers.parse_gn_list(args.aars) + args.srcjars = action_helpers.parse_gn_list(args.srcjars) + args.resource_sources = action_helpers.parse_gn_list(args.resource_sources) + args.extra_manifest_paths = action_helpers.parse_gn_list( + args.extra_manifest_paths) + args.resource_zips = action_helpers.parse_gn_list(args.resource_zips) + args.classpath = action_helpers.parse_gn_list(args.classpath) + + if args.baseline: + assert os.path.basename(args.baseline) == 'lint-baseline.xml', ( + 'The baseline file needs to be named "lint-baseline.xml" in order for ' + 'the autoroller to find and update it whenever lint is rolled to a new ' + 'version.') + + return args + + +def main(): + build_utils.InitLogging('LINT_DEBUG') + args = _ParseArgs(sys.argv[1:]) + + # TODO(wnwen): Consider removing lint cache now that there are only two lint + # invocations. + # Avoid parallelizing cache creation since lint runs without the cache defeat + # the purpose of creating the cache in the first place. + if (not args.create_cache and not args.skip_build_server + and server_utils.MaybeRunCommand(name=args.target_name, + argv=sys.argv, + stamp_file=args.stamp, + force=args.use_build_server)): + return + + sources = [] + for sources_file in args.sources: + sources.extend(build_utils.ReadSourcesList(sources_file)) + resource_sources = [] + for resource_sources_file in args.resource_sources: + resource_sources.extend(build_utils.ReadSourcesList(resource_sources_file)) + + possible_depfile_deps = (args.srcjars + args.resource_zips + sources + + resource_sources + [ + args.baseline, + args.manifest_path, + ]) + depfile_deps = [p for p in possible_depfile_deps if p] + + _RunLint(args.create_cache, + args.custom_lint_jar_path, + args.lint_jar_path, + args.backported_methods, + args.config_path, + args.manifest_path, + args.extra_manifest_paths, + sources, + args.classpath, + args.cache_dir, + args.android_sdk_version, + args.aars, + args.srcjars, + args.min_sdk_version, + resource_sources, + args.resource_zips, + args.android_sdk_root, + args.lint_gen_dir, + args.baseline, + testonly_target=args.testonly, + warnings_as_errors=args.warnings_as_errors) + logging.info('Creating stamp file') + build_utils.Touch(args.stamp) + + if args.depfile: + action_helpers.write_depfile(args.depfile, args.stamp, depfile_deps) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/gyp/lint.pydeps b/android/gyp/lint.pydeps new file mode 100644 index 000000000000..84bafde4795d --- /dev/null +++ b/android/gyp/lint.pydeps @@ -0,0 +1,9 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py +../../action_helpers.py +../../gn_helpers.py +lint.py +util/__init__.py +util/build_utils.py +util/manifest_utils.py +util/server_utils.py diff --git a/android/gyp/merge_manifest.py b/android/gyp/merge_manifest.py new file mode 100755 index 000000000000..a9c2535ae851 --- /dev/null +++ b/android/gyp/merge_manifest.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 + +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Merges dependency Android manifests into a root manifest.""" + +import argparse +import collections +import contextlib +import os +import sys +import tempfile +import xml.etree.ElementTree as ElementTree + +from util import build_utils +from util import manifest_utils +import action_helpers # build_utils adds //build to sys.path. + +_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger' + + +@contextlib.contextmanager +def _ProcessMainManifest(manifest_path, min_sdk_version, target_sdk_version, + max_sdk_version, manifest_package): + """Patches the main Android manifest""" + doc, manifest, _ = manifest_utils.ParseManifest(manifest_path) + manifest_utils.SetUsesSdk(manifest, target_sdk_version, min_sdk_version, + max_sdk_version) + assert manifest_utils.GetPackage(manifest) or manifest_package, \ + 'Must set manifest package in GN or in AndroidManifest.xml' + if manifest_package: + manifest.set('package', manifest_package) + tmp_prefix = manifest_path.replace(os.path.sep, '-') + with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest: + manifest_utils.SaveManifest(doc, patched_manifest.name) + yield patched_manifest.name, manifest_utils.GetPackage(manifest) + + +@contextlib.contextmanager +def _ProcessOtherManifest(manifest_path, target_sdk_version, + seen_package_names): + """Patches non-main AndroidManifest.xml if necessary.""" + # 1. Ensure targetSdkVersion is set to the expected value to avoid + # spurious permissions being added (b/222331337). + # 2. Ensure all manifests have a unique package name so that the merger + # does not fail when this happens. + doc, manifest, _ = manifest_utils.ParseManifest(manifest_path) + + changed_api = manifest_utils.SetTargetApiIfUnset(manifest, target_sdk_version) + + package_name = manifest_utils.GetPackage(manifest) + package_count = seen_package_names[package_name] + seen_package_names[package_name] += 1 + if package_count > 0: + manifest.set('package', f'{package_name}_{package_count}') + + if package_count > 0 or changed_api: + tmp_prefix = manifest_path.replace(os.path.sep, '-') + with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest: + manifest_utils.SaveManifest(doc, patched_manifest.name) + yield patched_manifest.name + else: + yield manifest_path + + +def main(argv): + argv = build_utils.ExpandFileArgs(argv) + parser = argparse.ArgumentParser(description=__doc__) + action_helpers.add_depfile_arg(parser) + parser.add_argument('--manifest-merger-jar', + help='Path to SDK\'s manifest merger jar.', + required=True) + parser.add_argument('--root-manifest', + help='Root manifest which to merge into', + required=True) + parser.add_argument('--output', help='Output manifest path', required=True) + parser.add_argument('--extras', + help='GN list of additional manifest to merge') + parser.add_argument( + '--min-sdk-version', + required=True, + help='android:minSdkVersion for merging.') + parser.add_argument( + '--target-sdk-version', + required=True, + help='android:targetSdkVersion for merging.') + parser.add_argument( + '--max-sdk-version', help='android:maxSdkVersion for merging.') + parser.add_argument( + '--manifest-package', + help='Package name of the merged AndroidManifest.xml.') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + args = parser.parse_args(argv) + + with action_helpers.atomic_output(args.output) as output: + cmd = build_utils.JavaCmd() + [ + '-cp', + args.manifest_merger_jar, + _MANIFEST_MERGER_MAIN_CLASS, + '--out', + output.name, + '--property', + 'MIN_SDK_VERSION=' + args.min_sdk_version, + '--property', + 'TARGET_SDK_VERSION=' + args.target_sdk_version, + ] + + if args.max_sdk_version: + cmd += [ + '--property', + 'MAX_SDK_VERSION=' + args.max_sdk_version, + ] + + extras = action_helpers.parse_gn_list(args.extras) + + with contextlib.ExitStack() as stack: + root_manifest, package = stack.enter_context( + _ProcessMainManifest(args.root_manifest, args.min_sdk_version, + args.target_sdk_version, args.max_sdk_version, + args.manifest_package)) + if extras: + seen_package_names = collections.Counter() + extras_processed = [ + stack.enter_context( + _ProcessOtherManifest(e, args.target_sdk_version, + seen_package_names)) for e in extras + ] + cmd += ['--libs', ':'.join(extras_processed)] + cmd += [ + '--main', + root_manifest, + '--property', + 'PACKAGE=' + package, + '--remove-tools-declarations', + ] + build_utils.CheckOutput( + cmd, + # https://issuetracker.google.com/issues/63514300: + # The merger doesn't set a nonzero exit code for failures. + fail_func=lambda returncode, stderr: returncode != 0 or build_utils. + IsTimeStale(output.name, [root_manifest] + extras), + fail_on_output=args.warnings_as_errors) + + if args.depfile: + action_helpers.write_depfile(args.depfile, args.output, inputs=extras) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/merge_manifest.pydeps b/android/gyp/merge_manifest.pydeps new file mode 100644 index 000000000000..003690ff3029 --- /dev/null +++ b/android/gyp/merge_manifest.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py +../../action_helpers.py +../../gn_helpers.py +merge_manifest.py +util/__init__.py +util/build_utils.py +util/manifest_utils.py diff --git a/android/gyp/nocompile_test.py b/android/gyp/nocompile_test.py new file mode 100755 index 000000000000..c3b02d2c961f --- /dev/null +++ b/android/gyp/nocompile_test.py @@ -0,0 +1,212 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Checks that compiling targets in BUILD.gn file fails.""" + +import argparse +import json +import os +import subprocess +import re +import sys +from util import build_utils + +_CHROMIUM_SRC = os.path.normpath(os.path.join(__file__, '..', '..', '..', '..')) +_NINJA_PATH = os.path.join(_CHROMIUM_SRC, 'third_party', 'ninja', 'ninja') + +# Relative to _CHROMIUM_SRC +_GN_SRC_REL_PATH = os.path.join('buildtools', 'linux64', 'gn') + +# Regex for determining whether compile failed because 'gn gen' needs to be run. +_GN_GEN_REGEX = re.compile(r'ninja: (error|fatal):') + + +def _raise_command_exception(args, returncode, output): + """Raises an exception whose message describes a command failure. + + Args: + args: shell command-line (as passed to subprocess.Popen()) + returncode: status code. + output: command output. + Raises: + a new Exception. + """ + message = 'Command failed with status {}: {}\n' \ + 'Output:-----------------------------------------\n{}\n' \ + '------------------------------------------------\n'.format( + returncode, args, output) + raise Exception(message) + + +def _run_command(args, cwd=None): + """Runs shell command. Raises exception if command fails.""" + p = subprocess.Popen(args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=cwd) + pout, _ = p.communicate() + if p.returncode != 0: + _raise_command_exception(args, p.returncode, pout) + + +def _run_command_get_failure_output(args): + """Runs shell command. + + Returns: + Command output if command fails, None if command succeeds. + """ + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + pout, _ = p.communicate() + + if p.returncode == 0: + return None + + # For Python3 only: + if isinstance(pout, bytes) and sys.version_info >= (3, ): + pout = pout.decode('utf-8') + return '' if pout is None else pout + + +def _copy_and_append_gn_args(src_args_path, dest_args_path, extra_args): + """Copies args.gn. + + Args: + src_args_path: args.gn file to copy. + dest_args_path: Copy file destination. + extra_args: Text to append to args.gn after copy. + """ + with open(src_args_path) as f_in, open(dest_args_path, 'w') as f_out: + f_out.write(f_in.read()) + f_out.write('\n') + f_out.write('\n'.join(extra_args)) + + +def _find_regex_in_test_failure_output(test_output, regex): + """Searches for regex in test output. + + Args: + test_output: test output. + regex: regular expression to search for. + Returns: + Whether the regular expression was found in the part of the test output + after the 'FAILED' message. + + If the regex does not contain '\n': + the first 5 lines after the 'FAILED' message (including the text on the + line after the 'FAILED' message) is searched. + Otherwise: + the entire test output after the 'FAILED' message is searched. + """ + if test_output is None: + return False + + failed_index = test_output.find('FAILED') + if failed_index < 0: + return False + + failure_message = test_output[failed_index:] + if regex.find('\n') >= 0: + return re.search(regex, failure_message) + + return _search_regex_in_list(failure_message.split('\n')[:5], regex) + + +def _search_regex_in_list(value, regex): + for line in value: + if re.search(regex, line): + return True + return False + + +def _do_build_get_failure_output(gn_path, gn_cmd, options): + # Extract directory from test target. As all of the test targets are declared + # in the same BUILD.gn file, it does not matter which test target is used. + target_dir = gn_path.rsplit(':', 1)[0] + + if gn_cmd is not None: + gn_args = [ + _GN_SRC_REL_PATH, '--root-target=' + target_dir, gn_cmd, + os.path.relpath(options.out_dir, _CHROMIUM_SRC) + ] + _run_command(gn_args, cwd=_CHROMIUM_SRC) + + ninja_args = [_NINJA_PATH, '-C', options.out_dir, gn_path] + return _run_command_get_failure_output(ninja_args) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--gn-args-path', + required=True, + help='Path to args.gn file.') + parser.add_argument('--test-configs-path', + required=True, + help='Path to file with test configurations') + parser.add_argument('--out-dir', + required=True, + help='Path to output directory to use for compilation.') + parser.add_argument('--stamp', help='Path to touch.') + options = parser.parse_args() + + with open(options.test_configs_path) as f: + # Escape '\' in '\.' now. This avoids having to do the escaping in the test + # specification. + config_text = f.read().replace(r'\.', r'\\.') + test_configs = json.loads(config_text) + + if not os.path.exists(options.out_dir): + os.makedirs(options.out_dir) + + out_gn_args_path = os.path.join(options.out_dir, 'args.gn') + extra_gn_args = [ + 'enable_android_nocompile_tests = true', + 'treat_warnings_as_errors = true', + # GOMA does not work with non-standard output directories. + 'use_goma = false', + ] + _copy_and_append_gn_args(options.gn_args_path, out_gn_args_path, + extra_gn_args) + + ran_gn_gen = False + did_clean_build = False + error_messages = [] + for config in test_configs: + # Strip leading '//' + gn_path = config['target'][2:] + expect_regex = config['expect_regex'] + + test_output = _do_build_get_failure_output(gn_path, None, options) + + # 'gn gen' takes > 1s to run. Only run 'gn gen' if it is needed for compile. + if (test_output + and _search_regex_in_list(test_output.split('\n'), _GN_GEN_REGEX)): + assert not ran_gn_gen + ran_gn_gen = True + test_output = _do_build_get_failure_output(gn_path, 'gen', options) + + if (not _find_regex_in_test_failure_output(test_output, expect_regex) + and not did_clean_build): + # Ensure the failure is not due to incremental build. + did_clean_build = True + test_output = _do_build_get_failure_output(gn_path, 'clean', options) + + if not _find_regex_in_test_failure_output(test_output, expect_regex): + if test_output is None: + # Purpose of quotes at beginning of message is to make it clear that + # "Compile successful." is not a compiler log message. + test_output = '""\nCompile successful.' + error_message = '//{} failed.\nExpected compile output pattern:\n'\ + '{}\nActual compile output:\n{}'.format( + gn_path, expect_regex, test_output) + error_messages.append(error_message) + + if error_messages: + raise Exception('\n'.join(error_messages)) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/optimize_resources.py b/android/gyp/optimize_resources.py new file mode 100755 index 000000000000..f1be4ccf1a9c --- /dev/null +++ b/android/gyp/optimize_resources.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python3 +# +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import logging +import os +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from argparse.ArgumentParser.parse_args() + """ + parser = argparse.ArgumentParser() + parser.add_argument('--aapt2-path', + required=True, + help='Path to the Android aapt2 tool.') + parser.add_argument( + '--short-resource-paths', + action='store_true', + help='Whether to shorten resource paths inside the apk or module.') + parser.add_argument( + '--strip-resource-names', + action='store_true', + help='Whether to strip resource names from the resource table of the apk ' + 'or module.') + parser.add_argument('--proto-path', + required=True, + help='Input proto format resources APK.') + parser.add_argument('--resources-config-paths', + default='[]', + help='GN list of paths to aapt2 resources config files.') + parser.add_argument('--r-text-in', + required=True, + help='Path to R.txt. Used to exclude id/ resources.') + parser.add_argument( + '--resources-path-map-out-path', + help='Path to file produced by aapt2 that maps original resource paths ' + 'to shortened resource paths inside the apk or module.') + parser.add_argument('--optimized-proto-path', + required=True, + help='Output for `aapt2 optimize`.') + options = parser.parse_args(args) + + options.resources_config_paths = action_helpers.parse_gn_list( + options.resources_config_paths) + + if options.resources_path_map_out_path and not options.short_resource_paths: + parser.error( + '--resources-path-map-out-path requires --short-resource-paths') + return options + + +def _CombineResourceConfigs(resources_config_paths, out_config_path): + with open(out_config_path, 'w') as out_config: + for config_path in resources_config_paths: + with open(config_path) as config: + out_config.write(config.read()) + out_config.write('\n') + + +def _ExtractNonCollapsableResources(rtxt_path): + """Extract resources that should not be collapsed from the R.txt file + + Resources of type ID are references to UI elements/views. They are used by + UI automation testing frameworks. They are kept in so that they don't break + tests, even though they may not actually be used during runtime. See + https://crbug.com/900993 + App icons (aka mipmaps) are sometimes referenced by other apps by name so must + be keps as well. See https://b/161564466 + + Args: + rtxt_path: Path to R.txt file with all the resources + Returns: + List of resources in the form of / + """ + resources = [] + _NO_COLLAPSE_TYPES = ['id', 'mipmap'] + with open(rtxt_path) as rtxt: + for line in rtxt: + for resource_type in _NO_COLLAPSE_TYPES: + if ' {} '.format(resource_type) in line: + resource_name = line.split()[2] + resources.append('{}/{}'.format(resource_type, resource_name)) + return resources + + +def _OptimizeApk(output, options, temp_dir, unoptimized_path, r_txt_path): + """Optimize intermediate .ap_ file with aapt2. + + Args: + output: Path to write to. + options: The command-line options. + temp_dir: A temporary directory. + unoptimized_path: path of the apk to optimize. + r_txt_path: path to the R.txt file of the unoptimized apk. + """ + optimize_command = [ + options.aapt2_path, + 'optimize', + unoptimized_path, + '-o', + output, + ] + + # Optimize the resources.pb file by obfuscating resource names and only + # allow usage via R.java constant. + if options.strip_resource_names: + no_collapse_resources = _ExtractNonCollapsableResources(r_txt_path) + gen_config_path = os.path.join(temp_dir, 'aapt2.config') + if options.resources_config_paths: + _CombineResourceConfigs(options.resources_config_paths, gen_config_path) + with open(gen_config_path, 'a') as config: + for resource in no_collapse_resources: + config.write('{}#no_collapse\n'.format(resource)) + + optimize_command += [ + '--collapse-resource-names', + '--resources-config-path', + gen_config_path, + ] + + if options.short_resource_paths: + optimize_command += ['--shorten-resource-paths'] + if options.resources_path_map_out_path: + optimize_command += [ + '--resource-path-shortening-map', options.resources_path_map_out_path + ] + + logging.debug('Running aapt2 optimize') + build_utils.CheckOutput(optimize_command, + print_stdout=False, + print_stderr=False) + + +def main(args): + options = _ParseArgs(args) + with build_utils.TempDir() as temp_dir: + _OptimizeApk(options.optimized_proto_path, options, temp_dir, + options.proto_path, options.r_text_in) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/optimize_resources.pydeps b/android/gyp/optimize_resources.pydeps new file mode 100644 index 000000000000..be3e8e7135d4 --- /dev/null +++ b/android/gyp/optimize_resources.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/optimize_resources.pydeps build/android/gyp/optimize_resources.py +../../action_helpers.py +../../gn_helpers.py +optimize_resources.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/prepare_resources.py b/android/gyp/prepare_resources.py new file mode 100755 index 000000000000..e86711c3b765 --- /dev/null +++ b/android/gyp/prepare_resources.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python3 +# +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Process Android resource directories to generate .resources.zip and R.txt +files.""" + +import argparse +import os +import shutil +import sys +import zipfile + +from util import build_utils +from util import jar_info_utils +from util import md5_check +from util import resources_parser +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from argparse.ArgumentParser.parse_args() + """ + parser = argparse.ArgumentParser(description=__doc__) + action_helpers.add_depfile_arg(parser) + + parser.add_argument('--res-sources-path', + required=True, + help='Path to a list of input resources for this target.') + + parser.add_argument( + '--r-text-in', + help='Path to pre-existing R.txt. Its resource IDs override those found ' + 'in the generated R.txt when generating R.java.') + + parser.add_argument( + '--allow-missing-resources', + action='store_true', + help='Do not fail if some resources exist in the res/ dir but are not ' + 'listed in the sources.') + + parser.add_argument( + '--resource-zip-out', + help='Path to a zip archive containing all resources from ' + '--resource-dirs, merged into a single directory tree.') + + parser.add_argument('--r-text-out', + help='Path to store the generated R.txt file.') + + parser.add_argument('--strip-drawables', + action="store_true", + help='Remove drawables from the resources.') + + options = parser.parse_args(args) + + with open(options.res_sources_path) as f: + options.sources = f.read().splitlines() + options.resource_dirs = resource_utils.DeduceResourceDirsFromFileList( + options.sources) + + return options + + +def _CheckAllFilesListed(resource_files, resource_dirs): + resource_files = set(resource_files) + missing_files = [] + for path, _ in resource_utils.IterResourceFilesInDirectories(resource_dirs): + if path not in resource_files: + missing_files.append(path) + + if missing_files: + sys.stderr.write('Error: Found files not listed in the sources list of ' + 'the BUILD.gn target:\n') + for path in missing_files: + sys.stderr.write('{}\n'.format(path)) + sys.exit(1) + + +def _ZipResources(resource_dirs, zip_path, ignore_pattern): + # ignore_pattern is a string of ':' delimited list of globs used to ignore + # files that should not be part of the final resource zip. + files_to_zip = [] + path_info = resource_utils.ResourceInfoFile() + for index, resource_dir in enumerate(resource_dirs): + attributed_aar = None + if not resource_dir.startswith('..'): + aar_source_info_path = os.path.join( + os.path.dirname(resource_dir), 'source.info') + if os.path.exists(aar_source_info_path): + attributed_aar = jar_info_utils.ReadAarSourceInfo(aar_source_info_path) + + for path, archive_path in resource_utils.IterResourceFilesInDirectories( + [resource_dir], ignore_pattern): + attributed_path = path + if attributed_aar: + attributed_path = os.path.join(attributed_aar, 'res', + path[len(resource_dir) + 1:]) + # Use the non-prefixed archive_path in the .info file. + path_info.AddMapping(archive_path, attributed_path) + + resource_dir_name = os.path.basename(resource_dir) + archive_path = '{}_{}/{}'.format(index, resource_dir_name, archive_path) + files_to_zip.append((archive_path, path)) + + path_info.Write(zip_path + '.info') + + with zipfile.ZipFile(zip_path, 'w') as z: + # This magic comment signals to resource_utils.ExtractDeps that this zip is + # not just the contents of a single res dir, without the encapsulating res/ + # (like the outputs of android_generated_resources targets), but instead has + # the contents of possibly multiple res/ dirs each within an encapsulating + # directory within the zip. + z.comment = resource_utils.MULTIPLE_RES_MAGIC_STRING + zip_helpers.add_files_to_zip(files_to_zip, z) + + +def _GenerateRTxt(options, r_txt_path): + """Generate R.txt file. + + Args: + options: The command-line options tuple. + r_txt_path: Locates where the R.txt file goes. + """ + ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN + if options.strip_drawables: + ignore_pattern += ':*drawable*' + + resources_parser.RTxtGenerator(options.resource_dirs, + ignore_pattern).WriteRTxtFile(r_txt_path) + + +def _OnStaleMd5(options): + with resource_utils.BuildContext() as build: + if options.sources and not options.allow_missing_resources: + _CheckAllFilesListed(options.sources, options.resource_dirs) + if options.r_text_in: + r_txt_path = options.r_text_in + else: + _GenerateRTxt(options, build.r_txt_path) + r_txt_path = build.r_txt_path + + if options.r_text_out: + shutil.copyfile(r_txt_path, options.r_text_out) + + if options.resource_zip_out: + ignore_pattern = resource_utils.AAPT_IGNORE_PATTERN + if options.strip_drawables: + ignore_pattern += ':*drawable*' + _ZipResources(options.resource_dirs, options.resource_zip_out, + ignore_pattern) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + # Order of these must match order specified in GN so that the correct one + # appears first in the depfile. + output_paths = [ + options.resource_zip_out, + options.resource_zip_out + '.info', + options.r_text_out, + ] + + input_paths = [options.res_sources_path] + if options.r_text_in: + input_paths += [options.r_text_in] + + # Resource files aren't explicitly listed in GN. Listing them in the depfile + # ensures the target will be marked stale when resource files are removed. + depfile_deps = [] + resource_names = [] + for resource_dir in options.resource_dirs: + for resource_file in build_utils.FindInDirectory(resource_dir, '*'): + # Don't list the empty .keep file in depfile. Since it doesn't end up + # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors + # if ever moved. + if not resource_file.endswith(os.path.join('empty', '.keep')): + input_paths.append(resource_file) + depfile_deps.append(resource_file) + resource_names.append(os.path.relpath(resource_file, resource_dir)) + + # Resource filenames matter to the output, so add them to strings as well. + # This matters if a file is renamed but not changed (http://crbug.com/597126). + input_strings = sorted(resource_names) + [ + options.strip_drawables, + ] + + # Since android_resources targets like *__all_dfm_resources depend on java + # targets that they do not need (in reality it only needs the transitive + # resource targets that those java targets depend on), md5_check is used to + # prevent outputs from being re-written when real inputs have not changed. + md5_check.CallAndWriteDepfileIfStale(lambda: _OnStaleMd5(options), + options, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + depfile_deps=depfile_deps) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/prepare_resources.pydeps b/android/gyp/prepare_resources.pydeps new file mode 100644 index 000000000000..5c7c4410a237 --- /dev/null +++ b/android/gyp/prepare_resources.pydeps @@ -0,0 +1,35 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +prepare_resources.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py +util/md5_check.py +util/resource_utils.py +util/resources_parser.py diff --git a/android/gyp/process_native_prebuilt.py b/android/gyp/process_native_prebuilt.py new file mode 100755 index 000000000000..060adae81400 --- /dev/null +++ b/android/gyp/process_native_prebuilt.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import shutil +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def main(args): + parser = argparse.ArgumentParser(args) + parser.add_argument('--strip-path', required=True, help='') + parser.add_argument('--input-path', required=True, help='') + parser.add_argument('--stripped-output-path', required=True, help='') + parser.add_argument('--unstripped-output-path', required=True, help='') + options = parser.parse_args(args) + + # eu-strip's output keeps mode from source file which might not be writable + # thus it fails to override its output on the next run. AtomicOutput fixes + # the issue. + with action_helpers.atomic_output(options.stripped_output_path) as out: + cmd = [ + options.strip_path, + options.input_path, + '-o', + out.name, + ] + build_utils.CheckOutput(cmd) + shutil.copyfile(options.input_path, options.unstripped_output_path) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/process_native_prebuilt.pydeps b/android/gyp/process_native_prebuilt.pydeps new file mode 100644 index 000000000000..baf9eff7a28d --- /dev/null +++ b/android/gyp/process_native_prebuilt.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/process_native_prebuilt.pydeps build/android/gyp/process_native_prebuilt.py +../../action_helpers.py +../../gn_helpers.py +process_native_prebuilt.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/proguard.py b/android/gyp/proguard.py new file mode 100755 index 000000000000..579501c7ded3 --- /dev/null +++ b/android/gyp/proguard.py @@ -0,0 +1,691 @@ +#!/usr/bin/env python3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import logging +import os +import pathlib +import re +import shutil +import sys +import zipfile + +import dex +from util import build_utils +from util import diff_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + +_BLOCKLISTED_EXPECTATION_PATHS = [ + # A separate expectation file is created for these files. + 'clank/third_party/google3/pg_confs/', +] + +_DUMP_DIR_NAME = 'r8inputs_dir' + + +def _ParseOptions(): + args = build_utils.ExpandFileArgs(sys.argv[1:]) + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--r8-path', + required=True, + help='Path to the R8.jar to use.') + parser.add_argument('--input-paths', + action='append', + required=True, + help='GN-list of .jar files to optimize.') + parser.add_argument('--output-path', help='Path to the generated .jar file.') + parser.add_argument( + '--proguard-configs', + action='append', + required=True, + help='GN-list of configuration files.') + parser.add_argument( + '--apply-mapping', help='Path to ProGuard mapping to apply.') + parser.add_argument( + '--mapping-output', + required=True, + help='Path for ProGuard to output mapping file to.') + parser.add_argument( + '--extra-mapping-output-paths', + help='GN-list of additional paths to copy output mapping file to.') + parser.add_argument( + '--classpath', + action='append', + help='GN-list of .jar files to include as libraries.') + parser.add_argument('--main-dex-rules-path', + action='append', + help='Path to main dex rules for multidex.') + parser.add_argument( + '--min-api', help='Minimum Android API level compatibility.') + parser.add_argument('--enable-obfuscation', + action='store_true', + help='Minify symbol names') + parser.add_argument( + '--verbose', '-v', action='store_true', help='Print all ProGuard output') + parser.add_argument( + '--repackage-classes', help='Package all optimized classes are put in.') + parser.add_argument( + '--disable-checks', + action='store_true', + help='Disable -checkdiscard directives and missing symbols check') + parser.add_argument('--source-file', help='Value for source file attribute.') + parser.add_argument('--package-name', + help='Goes into a comment in the mapping file.') + parser.add_argument( + '--force-enable-assertions', + action='store_true', + help='Forcefully enable javac generated assertion code.') + parser.add_argument('--assertion-handler', + help='The class name of the assertion handler class.') + parser.add_argument( + '--feature-jars', + action='append', + help='GN list of path to jars which comprise the corresponding feature.') + parser.add_argument( + '--dex-dest', + action='append', + dest='dex_dests', + help='Destination for dex file of the corresponding feature.') + parser.add_argument( + '--feature-name', + action='append', + dest='feature_names', + help='The name of the feature module.') + parser.add_argument( + '--uses-split', + action='append', + help='List of name pairs separated by : mapping a feature module to a ' + 'dependent feature module.') + parser.add_argument( + '--keep-rules-targets-regex', + metavar='KEEP_RULES_REGEX', + help='If passed outputs keep rules for references from all other inputs ' + 'to the subset of inputs that satisfy the KEEP_RULES_REGEX.') + parser.add_argument( + '--keep-rules-output-path', + help='Output path to the keep rules for references to the ' + '--keep-rules-targets-regex inputs from the rest of the inputs.') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + parser.add_argument('--show-desugar-default-interface-warnings', + action='store_true', + help='Enable desugaring warnings.') + parser.add_argument('--dump-inputs', + action='store_true', + help='Use when filing R8 bugs to capture inputs.' + ' Stores inputs to r8inputs.zip') + parser.add_argument( + '--dump-unknown-refs', + action='store_true', + help='Log all reasons why API modelling cannot determine API level') + parser.add_argument( + '--stamp', + help='File to touch upon success. Mutually exclusive with --output-path') + parser.add_argument('--desugared-library-keep-rule-output', + help='Path to desugared library keep rule output file.') + + diff_utils.AddCommandLineFlags(parser) + options = parser.parse_args(args) + + if options.feature_names: + if options.output_path: + parser.error('Feature splits cannot specify an output in GN.') + if not options.actual_file and not options.stamp: + parser.error('Feature splits require a stamp file as output.') + elif not options.output_path: + parser.error('Output path required when feature splits aren\'t used') + + if bool(options.keep_rules_targets_regex) != bool( + options.keep_rules_output_path): + parser.error('You must path both --keep-rules-targets-regex and ' + '--keep-rules-output-path') + + if options.force_enable_assertions and options.assertion_handler: + parser.error('Cannot use both --force-enable-assertions and ' + '--assertion-handler') + + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.proguard_configs = action_helpers.parse_gn_list( + options.proguard_configs) + options.input_paths = action_helpers.parse_gn_list(options.input_paths) + options.extra_mapping_output_paths = action_helpers.parse_gn_list( + options.extra_mapping_output_paths) + + if options.feature_names: + if 'base' not in options.feature_names: + parser.error('"base" feature required when feature arguments are used.') + if len(options.feature_names) != len(options.feature_jars) or len( + options.feature_names) != len(options.dex_dests): + parser.error('Invalid feature argument lengths.') + + options.feature_jars = [ + action_helpers.parse_gn_list(x) for x in options.feature_jars + ] + + split_map = {} + if options.uses_split: + for split_pair in options.uses_split: + child, parent = split_pair.split(':') + for name in (child, parent): + if name not in options.feature_names: + parser.error('"%s" referenced in --uses-split not present.' % name) + split_map[child] = parent + options.uses_split = split_map + + return options + + +class _SplitContext: + def __init__(self, name, output_path, input_jars, work_dir, parent_name=None): + self.name = name + self.parent_name = parent_name + self.input_jars = set(input_jars) + self.final_output_path = output_path + self.staging_dir = os.path.join(work_dir, name) + os.mkdir(self.staging_dir) + + def CreateOutput(self): + found_files = build_utils.FindInDirectory(self.staging_dir) + if not found_files: + raise Exception('Missing dex outputs in {}'.format(self.staging_dir)) + + if self.final_output_path.endswith('.dex'): + if len(found_files) != 1: + raise Exception('Expected exactly 1 dex file output, found: {}'.format( + '\t'.join(found_files))) + shutil.move(found_files[0], self.final_output_path) + return + + # Add to .jar using Python rather than having R8 output to a .zip directly + # in order to disable compression of the .jar, saving ~500ms. + tmp_jar_output = self.staging_dir + '.jar' + zip_helpers.add_files_to_zip(found_files, + tmp_jar_output, + base_dir=self.staging_dir) + shutil.move(tmp_jar_output, self.final_output_path) + + +def _OptimizeWithR8(options, + config_paths, + libraries, + dynamic_config_data, + print_stdout=False): + with build_utils.TempDir() as tmp_dir: + if dynamic_config_data: + dynamic_config_path = os.path.join(tmp_dir, 'dynamic_config.flags') + with open(dynamic_config_path, 'w') as f: + f.write(dynamic_config_data) + config_paths = config_paths + [dynamic_config_path] + + tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') + # If there is no output (no classes are kept), this prevents this script + # from failing. + build_utils.Touch(tmp_mapping_path) + + tmp_output = os.path.join(tmp_dir, 'r8out') + os.mkdir(tmp_output) + + split_contexts_by_name = {} + if options.feature_names: + for name, dest_dex, input_jars in zip(options.feature_names, + options.dex_dests, + options.feature_jars): + parent_name = options.uses_split.get(name) + if parent_name is None and name != 'base': + parent_name = 'base' + split_context = _SplitContext(name, + dest_dex, + input_jars, + tmp_output, + parent_name=parent_name) + split_contexts_by_name[name] = split_context + else: + # Base context will get populated via "extra_jars" below. + split_contexts_by_name['base'] = _SplitContext('base', + options.output_path, [], + tmp_output) + base_context = split_contexts_by_name['base'] + + # R8 OOMs with the default xmx=1G. + cmd = build_utils.JavaCmd(xmx='2G') + [ + # Allows -whyareyounotinlining, which we don't have by default, but + # which is useful for one-off queries. + '-Dcom.android.tools.r8.experimental.enablewhyareyounotinlining=1', + # Restricts horizontal class merging to apply only to classes that + # share a .java file (nested classes). https://crbug.com/1363709 + '-Dcom.android.tools.r8.enableSameFilePolicy=1', + ] + if options.dump_inputs: + cmd += [f'-Dcom.android.tools.r8.dumpinputtodirectory={_DUMP_DIR_NAME}'] + if options.dump_unknown_refs: + cmd += ['-Dcom.android.tools.r8.reportUnknownApiReferences=1'] + cmd += [ + '-cp', + options.r8_path, + 'com.android.tools.r8.R8', + '--no-data-resources', + '--map-id-template', + f'{options.source_file} ({options.package_name})', + '--source-file-template', + options.source_file, + '--output', + base_context.staging_dir, + '--pg-map-output', + tmp_mapping_path, + ] + + if options.disable_checks: + cmd += ['--map-diagnostics:CheckDiscardDiagnostic', 'error', 'none'] + cmd += ['--map-diagnostics', 'info', 'warning'] + # An "error" level diagnostic causes r8 to return an error exit code. Doing + # this allows our filter to decide what should/shouldn't break our build. + cmd += ['--map-diagnostics', 'error', 'warning'] + + if options.min_api: + cmd += ['--min-api', options.min_api] + + if options.assertion_handler: + cmd += ['--force-assertions-handler:' + options.assertion_handler] + elif options.force_enable_assertions: + cmd += ['--force-enable-assertions'] + + for lib in libraries: + cmd += ['--lib', lib] + + for config_file in config_paths: + cmd += ['--pg-conf', config_file] + + if options.main_dex_rules_path: + for main_dex_rule in options.main_dex_rules_path: + cmd += ['--main-dex-rules', main_dex_rule] + + # Add any extra inputs to the base context (e.g. desugar runtime). + extra_jars = set(options.input_paths) + for split_context in split_contexts_by_name.values(): + extra_jars -= split_context.input_jars + base_context.input_jars.update(extra_jars) + + for split_context in split_contexts_by_name.values(): + if split_context is base_context: + continue + for in_jar in sorted(split_context.input_jars): + cmd += ['--feature', in_jar, split_context.staging_dir] + + cmd += sorted(base_context.input_jars) + + try: + stderr_filter = dex.CreateStderrFilter( + options.show_desugar_default_interface_warnings) + logging.debug('Running R8') + build_utils.CheckOutput(cmd, + print_stdout=print_stdout, + stderr_filter=stderr_filter, + fail_on_output=options.warnings_as_errors) + except build_utils.CalledProcessError as e: + # Do not output command line because it is massive and makes the actual + # error message hard to find. + sys.stderr.write(e.output) + sys.exit(1) + + logging.debug('Collecting ouputs') + base_context.CreateOutput() + for split_context in split_contexts_by_name.values(): + if split_context is not base_context: + split_context.CreateOutput() + + shutil.move(tmp_mapping_path, options.mapping_output) + return split_contexts_by_name + + +def _OutputKeepRules(r8_path, input_paths, classpath, targets_re_string, + keep_rules_output): + + cmd = build_utils.JavaCmd() + [ + '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences', + '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning', + '--keep-rules', '--output', keep_rules_output + ] + targets_re = re.compile(targets_re_string) + for path in input_paths: + if targets_re.search(path): + cmd += ['--target', path] + else: + cmd += ['--source', path] + for path in classpath: + cmd += ['--lib', path] + + build_utils.CheckOutput(cmd, print_stderr=False, fail_on_output=False) + + +def _CheckForMissingSymbols(r8_path, dex_files, classpath, warnings_as_errors, + dump_inputs, error_title): + cmd = build_utils.JavaCmd() + + if dump_inputs: + cmd += [f'-Dcom.android.tools.r8.dumpinputtodirectory={_DUMP_DIR_NAME}'] + + cmd += [ + '-cp', r8_path, 'com.android.tools.r8.tracereferences.TraceReferences', + '--map-diagnostics:MissingDefinitionsDiagnostic', 'error', 'warning', + '--check' + ] + + for path in classpath: + cmd += ['--lib', path] + for path in dex_files: + cmd += ['--source', path] + + failed_holder = [False] + + def stderr_filter(stderr): + ignored_lines = [ + # Summary contains warning count, which our filtering makes wrong. + 'Warning: Tracereferences found', + + # TODO(agrieve): Create interface jars for these missing classes rather + # than allowlisting here. + 'dalvik.system', + 'libcore.io', + 'sun.misc.Unsafe', + + # Found in: com/facebook/fbui/textlayoutbuilder/StaticLayoutHelper + 'android.text.StaticLayout.', + # TODO(crbug/1426964): Remove once chrome builds with Android U SDK. + 'android.adservices.measurement', + + # Explicictly guarded by try (NoClassDefFoundError) in Flogger's + # PlatformProvider. + 'com.google.common.flogger.backend.google.GooglePlatform', + 'com.google.common.flogger.backend.system.DefaultPlatform', + + # TODO(agrieve): Exclude these only when use_jacoco_coverage=true. + 'java.lang.instrument.ClassFileTransformer', + 'java.lang.instrument.IllegalClassFormatException', + 'java.lang.instrument.Instrumentation', + 'java.lang.management.ManagementFactory', + 'javax.management.MBeanServer', + 'javax.management.ObjectInstance', + 'javax.management.ObjectName', + 'javax.management.StandardMBean', + + # Explicitly guarded by try (NoClassDefFoundError) in Firebase's + # KotlinDetector: com.google.firebase.platforminfo.KotlinDetector. + 'kotlin.KotlinVersion', + + # TODO(agrieve): Remove once we move to Android U SDK. + 'android.window.BackEvent', + 'android.window.OnBackAnimationCallback', + ] + + had_unfiltered_items = ' ' in stderr + stderr = build_utils.FilterLines( + stderr, '|'.join(re.escape(x) for x in ignored_lines)) + if stderr: + if 'Missing' in stderr: + failed_holder[0] = True + stderr = 'TraceReferences failed: ' + error_title + """ +Tip: Build with: + is_java_debug=false + treat_warnings_as_errors=false + enable_proguard_obfuscation=false + and then use dexdump to see which class(s) reference them. + + E.g.: + third_party/android_sdk/public/build-tools/*/dexdump -d \ +out/Release/apks/YourApk.apk > dex.txt +""" + stderr + + if 'FragmentActivity' in stderr: + stderr += """ +You may need to update build configs to run FragmentActivityReplacer for +additional targets. See +https://chromium.googlesource.com/chromium/src.git/+/main/docs/ui/android/bytecode_rewriting.md. +""" + elif had_unfiltered_items: + # Left only with empty headings. All indented items filtered out. + stderr = '' + return stderr + + try: + build_utils.CheckOutput(cmd, + print_stdout=True, + stderr_filter=stderr_filter, + fail_on_output=warnings_as_errors) + except build_utils.CalledProcessError as e: + # Do not output command line because it is massive and makes the actual + # error message hard to find. + sys.stderr.write(e.output) + sys.exit(1) + return failed_holder[0] + + +def _CombineConfigs(configs, + dynamic_config_data, + embedded_configs, + exclude_generated=False): + # Sort in this way so //clank versions of the same libraries will sort + # to the same spot in the file. + def sort_key(path): + return tuple(reversed(path.split(os.path.sep))) + + def format_config_contents(path, contents): + formatted_contents = [] + if not contents.strip(): + return [] + + # Fix up line endings (third_party configs can have windows endings). + contents = contents.replace('\r', '') + # Remove numbers from generated rule comments to make file more + # diff'able. + contents = re.sub(r' #generated:\d+', '', contents) + formatted_contents.append('# File: ' + path) + formatted_contents.append(contents) + formatted_contents.append('') + return formatted_contents + + ret = [] + for config in sorted(configs, key=sort_key): + if exclude_generated and config.endswith('.resources.proguard.txt'): + continue + + # Exclude some confs from expectations. + if any(entry in config for entry in _BLOCKLISTED_EXPECTATION_PATHS): + continue + + with open(config) as config_file: + contents = config_file.read().rstrip() + + ret.extend(format_config_contents(config, contents)) + + for path, contents in sorted(embedded_configs.items()): + ret.extend(format_config_contents(path, contents)) + + + if dynamic_config_data: + ret.append('# File: //build/android/gyp/proguard.py (generated rules)') + ret.append(dynamic_config_data) + ret.append('') + return '\n'.join(ret) + + +def _CreateDynamicConfig(options): + ret = [] + if options.enable_obfuscation: + ret.append("-repackageclasses ''") + else: + ret.append("-dontobfuscate") + + if options.apply_mapping: + ret.append("-applymapping '%s'" % options.apply_mapping) + + return '\n'.join(ret) + + +def _ExtractEmbeddedConfigs(jar_path, embedded_configs): + with zipfile.ZipFile(jar_path) as z: + proguard_names = [] + r8_names = [] + for info in z.infolist(): + if info.is_dir(): + continue + if info.filename.startswith('META-INF/proguard/'): + proguard_names.append(info.filename) + elif info.filename.startswith('META-INF/com.android.tools/r8/'): + r8_names.append(info.filename) + elif info.filename.startswith('META-INF/com.android.tools/r8-from'): + # Assume our version of R8 is always latest. + if '-upto-' not in info.filename: + r8_names.append(info.filename) + + # Give preference to r8-from-*, then r8/, then proguard/. + active = r8_names or proguard_names + for filename in active: + config_path = '{}:{}'.format(jar_path, filename) + embedded_configs[config_path] = z.read(filename).decode('utf-8').rstrip() + + +def _ContainsDebuggingConfig(config_str): + debugging_configs = ('-whyareyoukeeping', '-whyareyounotinlining') + return any(config in config_str for config in debugging_configs) + + +def _MaybeWriteStampAndDepFile(options, inputs): + output = options.output_path + if options.stamp: + build_utils.Touch(options.stamp) + output = options.stamp + if options.depfile: + action_helpers.write_depfile(options.depfile, output, inputs=inputs) + + +def _IterParentContexts(context_name, split_contexts_by_name): + while context_name: + context = split_contexts_by_name[context_name] + yield context + context_name = context.parent_name + + +def _DoTraceReferencesChecks(options, split_contexts_by_name): + # Set of all contexts that are a parent to another. + parent_splits_context_names = { + c.parent_name + for c in split_contexts_by_name.values() if c.parent_name + } + context_sets = [ + list(_IterParentContexts(n, split_contexts_by_name)) + for n in parent_splits_context_names + ] + # Visit them in order of: base, base+chrome, base+chrome+thing. + context_sets.sort(key=lambda x: (len(x), x[0].name)) + + # Ensure there are no missing references when considering all dex files. + error_title = 'DEX contains references to non-existent symbols after R8.' + dex_files = sorted(c.final_output_path + for c in split_contexts_by_name.values()) + if _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath, + options.warnings_as_errors, options.dump_inputs, + error_title): + # Failed but didn't raise due to warnings_as_errors=False + return + + for context_set in context_sets: + # Ensure there are no references from base -> chrome module, or from + # chrome -> feature modules. + error_title = (f'DEX within module "{context_set[0].name}" contains ' + 'reference(s) to symbols within child splits') + dex_files = [c.final_output_path for c in context_set] + # Each check currently takes about 3 seconds on a fast dev machine, and we + # run 3 of them (all, base, base+chrome). + # We could run them concurrently, to shave off 5-6 seconds, but would need + # to make sure that the order is maintained. + if _CheckForMissingSymbols(options.r8_path, dex_files, options.classpath, + options.warnings_as_errors, options.dump_inputs, + error_title): + # Failed but didn't raise due to warnings_as_errors=False + return + + +def _Run(options): + # ProGuard configs that are derived from flags. + logging.debug('Preparing configs') + dynamic_config_data = _CreateDynamicConfig(options) + + logging.debug('Looking for embedded configs') + # If a jar is part of input no need to include it as library jar. + libraries = [p for p in options.classpath if p not in options.input_paths] + + embedded_configs = {} + for jar_path in options.input_paths + libraries: + _ExtractEmbeddedConfigs(jar_path, embedded_configs) + + # ProGuard configs that are derived from flags. + merged_configs = _CombineConfigs(options.proguard_configs, + dynamic_config_data, + embedded_configs, + exclude_generated=True) + print_stdout = _ContainsDebuggingConfig(merged_configs) or options.verbose + + depfile_inputs = options.proguard_configs + options.input_paths + libraries + if options.expected_file: + diff_utils.CheckExpectations(merged_configs, options) + if options.only_verify_expectations: + action_helpers.write_depfile(options.depfile, + options.actual_file, + inputs=depfile_inputs) + return + + if options.keep_rules_output_path: + _OutputKeepRules(options.r8_path, options.input_paths, options.classpath, + options.keep_rules_targets_regex, + options.keep_rules_output_path) + return + + split_contexts_by_name = _OptimizeWithR8(options, options.proguard_configs, + libraries, dynamic_config_data, + print_stdout) + + if not options.disable_checks: + logging.debug('Running tracereferences') + _DoTraceReferencesChecks(options, split_contexts_by_name) + + for output in options.extra_mapping_output_paths: + shutil.copy(options.mapping_output, output) + + if options.apply_mapping: + depfile_inputs.append(options.apply_mapping) + + _MaybeWriteStampAndDepFile(options, depfile_inputs) + + +def main(): + build_utils.InitLogging('PROGUARD_DEBUG') + options = _ParseOptions() + + if options.dump_inputs: + # Dumping inputs causes output to be emitted, avoid failing due to stdout. + options.warnings_as_errors = False + # Use dumpinputtodirectory instead of dumpinputtofile to avoid failing the + # build and keep running tracereferences. + dump_dir_name = _DUMP_DIR_NAME + dump_dir_path = pathlib.Path(dump_dir_name) + if dump_dir_path.exists(): + shutil.rmtree(dump_dir_path) + # The directory needs to exist before r8 adds the zip files in it. + dump_dir_path.mkdir() + + # This ensure that the final outputs are zipped and easily uploaded to a bug. + try: + _Run(options) + finally: + if options.dump_inputs: + zip_helpers.zip_directory('r8inputs.zip', _DUMP_DIR_NAME) + + +if __name__ == '__main__': + main() diff --git a/android/gyp/proguard.pydeps b/android/gyp/proguard.pydeps new file mode 100644 index 000000000000..7ee251b8ebf0 --- /dev/null +++ b/android/gyp/proguard.pydeps @@ -0,0 +1,12 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +dex.py +proguard.py +util/__init__.py +util/build_utils.py +util/diff_utils.py +util/md5_check.py diff --git a/android/gyp/proto/Configuration_pb2.py b/android/gyp/proto/Configuration_pb2.py new file mode 100644 index 000000000000..859183089a2f --- /dev/null +++ b/android/gyp/proto/Configuration_pb2.py @@ -0,0 +1,697 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: frameworks/base/tools/aapt2/Configuration.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='frameworks/base/tools/aapt2/Configuration.proto', + package='aapt.pb', + syntax='proto3', + serialized_options=_b('\n\020com.android.aapt'), + serialized_pb=_b('\n/frameworks/base/tools/aapt2/Configuration.proto\x12\x07\x61\x61pt.pb\"\xd9\x14\n\rConfiguration\x12\x0b\n\x03mcc\x18\x01 \x01(\r\x12\x0b\n\x03mnc\x18\x02 \x01(\r\x12\x0e\n\x06locale\x18\x03 \x01(\t\x12@\n\x10layout_direction\x18\x04 \x01(\x0e\x32&.aapt.pb.Configuration.LayoutDirection\x12\x14\n\x0cscreen_width\x18\x05 \x01(\r\x12\x15\n\rscreen_height\x18\x06 \x01(\r\x12\x17\n\x0fscreen_width_dp\x18\x07 \x01(\r\x12\x18\n\x10screen_height_dp\x18\x08 \x01(\r\x12 \n\x18smallest_screen_width_dp\x18\t \x01(\r\x12\x43\n\x12screen_layout_size\x18\n \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutSize\x12\x43\n\x12screen_layout_long\x18\x0b \x01(\x0e\x32\'.aapt.pb.Configuration.ScreenLayoutLong\x12\x38\n\x0cscreen_round\x18\x0c \x01(\x0e\x32\".aapt.pb.Configuration.ScreenRound\x12?\n\x10wide_color_gamut\x18\r \x01(\x0e\x32%.aapt.pb.Configuration.WideColorGamut\x12\'\n\x03hdr\x18\x0e \x01(\x0e\x32\x1a.aapt.pb.Configuration.Hdr\x12\x37\n\x0borientation\x18\x0f \x01(\x0e\x32\".aapt.pb.Configuration.Orientation\x12\x37\n\x0cui_mode_type\x18\x10 \x01(\x0e\x32!.aapt.pb.Configuration.UiModeType\x12\x39\n\rui_mode_night\x18\x11 \x01(\x0e\x32\".aapt.pb.Configuration.UiModeNight\x12\x0f\n\x07\x64\x65nsity\x18\x12 \x01(\r\x12\x37\n\x0btouchscreen\x18\x13 \x01(\x0e\x32\".aapt.pb.Configuration.Touchscreen\x12\x36\n\x0bkeys_hidden\x18\x14 \x01(\x0e\x32!.aapt.pb.Configuration.KeysHidden\x12\x31\n\x08keyboard\x18\x15 \x01(\x0e\x32\x1f.aapt.pb.Configuration.Keyboard\x12\x34\n\nnav_hidden\x18\x16 \x01(\x0e\x32 .aapt.pb.Configuration.NavHidden\x12\x35\n\nnavigation\x18\x17 \x01(\x0e\x32!.aapt.pb.Configuration.Navigation\x12\x13\n\x0bsdk_version\x18\x18 \x01(\r\x12\x0f\n\x07product\x18\x19 \x01(\t\"a\n\x0fLayoutDirection\x12\x1a\n\x16LAYOUT_DIRECTION_UNSET\x10\x00\x12\x18\n\x14LAYOUT_DIRECTION_LTR\x10\x01\x12\x18\n\x14LAYOUT_DIRECTION_RTL\x10\x02\"\xaa\x01\n\x10ScreenLayoutSize\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_UNSET\x10\x00\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_SMALL\x10\x01\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_NORMAL\x10\x02\x12\x1c\n\x18SCREEN_LAYOUT_SIZE_LARGE\x10\x03\x12\x1d\n\x19SCREEN_LAYOUT_SIZE_XLARGE\x10\x04\"m\n\x10ScreenLayoutLong\x12\x1c\n\x18SCREEN_LAYOUT_LONG_UNSET\x10\x00\x12\x1b\n\x17SCREEN_LAYOUT_LONG_LONG\x10\x01\x12\x1e\n\x1aSCREEN_LAYOUT_LONG_NOTLONG\x10\x02\"X\n\x0bScreenRound\x12\x16\n\x12SCREEN_ROUND_UNSET\x10\x00\x12\x16\n\x12SCREEN_ROUND_ROUND\x10\x01\x12\x19\n\x15SCREEN_ROUND_NOTROUND\x10\x02\"h\n\x0eWideColorGamut\x12\x1a\n\x16WIDE_COLOR_GAMUT_UNSET\x10\x00\x12\x1b\n\x17WIDE_COLOR_GAMUT_WIDECG\x10\x01\x12\x1d\n\x19WIDE_COLOR_GAMUT_NOWIDECG\x10\x02\"3\n\x03Hdr\x12\r\n\tHDR_UNSET\x10\x00\x12\x0e\n\nHDR_HIGHDR\x10\x01\x12\r\n\tHDR_LOWDR\x10\x02\"h\n\x0bOrientation\x12\x15\n\x11ORIENTATION_UNSET\x10\x00\x12\x14\n\x10ORIENTATION_PORT\x10\x01\x12\x14\n\x10ORIENTATION_LAND\x10\x02\x12\x16\n\x12ORIENTATION_SQUARE\x10\x03\"\xd7\x01\n\nUiModeType\x12\x16\n\x12UI_MODE_TYPE_UNSET\x10\x00\x12\x17\n\x13UI_MODE_TYPE_NORMAL\x10\x01\x12\x15\n\x11UI_MODE_TYPE_DESK\x10\x02\x12\x14\n\x10UI_MODE_TYPE_CAR\x10\x03\x12\x1b\n\x17UI_MODE_TYPE_TELEVISION\x10\x04\x12\x1a\n\x16UI_MODE_TYPE_APPLIANCE\x10\x05\x12\x16\n\x12UI_MODE_TYPE_WATCH\x10\x06\x12\x1a\n\x16UI_MODE_TYPE_VRHEADSET\x10\x07\"[\n\x0bUiModeNight\x12\x17\n\x13UI_MODE_NIGHT_UNSET\x10\x00\x12\x17\n\x13UI_MODE_NIGHT_NIGHT\x10\x01\x12\x1a\n\x16UI_MODE_NIGHT_NOTNIGHT\x10\x02\"m\n\x0bTouchscreen\x12\x15\n\x11TOUCHSCREEN_UNSET\x10\x00\x12\x17\n\x13TOUCHSCREEN_NOTOUCH\x10\x01\x12\x16\n\x12TOUCHSCREEN_STYLUS\x10\x02\x12\x16\n\x12TOUCHSCREEN_FINGER\x10\x03\"v\n\nKeysHidden\x12\x15\n\x11KEYS_HIDDEN_UNSET\x10\x00\x12\x1b\n\x17KEYS_HIDDEN_KEYSEXPOSED\x10\x01\x12\x1a\n\x16KEYS_HIDDEN_KEYSHIDDEN\x10\x02\x12\x18\n\x14KEYS_HIDDEN_KEYSSOFT\x10\x03\"`\n\x08Keyboard\x12\x12\n\x0eKEYBOARD_UNSET\x10\x00\x12\x13\n\x0fKEYBOARD_NOKEYS\x10\x01\x12\x13\n\x0fKEYBOARD_QWERTY\x10\x02\x12\x16\n\x12KEYBOARD_TWELVEKEY\x10\x03\"V\n\tNavHidden\x12\x14\n\x10NAV_HIDDEN_UNSET\x10\x00\x12\x19\n\x15NAV_HIDDEN_NAVEXPOSED\x10\x01\x12\x18\n\x14NAV_HIDDEN_NAVHIDDEN\x10\x02\"}\n\nNavigation\x12\x14\n\x10NAVIGATION_UNSET\x10\x00\x12\x14\n\x10NAVIGATION_NONAV\x10\x01\x12\x13\n\x0fNAVIGATION_DPAD\x10\x02\x12\x18\n\x14NAVIGATION_TRACKBALL\x10\x03\x12\x14\n\x10NAVIGATION_WHEEL\x10\x04\x42\x12\n\x10\x63om.android.aaptb\x06proto3') +) + + + +_CONFIGURATION_LAYOUTDIRECTION = _descriptor.EnumDescriptor( + name='LayoutDirection', + full_name='aapt.pb.Configuration.LayoutDirection', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='LAYOUT_DIRECTION_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LAYOUT_DIRECTION_LTR', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LAYOUT_DIRECTION_RTL', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1119, + serialized_end=1216, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_LAYOUTDIRECTION) + +_CONFIGURATION_SCREENLAYOUTSIZE = _descriptor.EnumDescriptor( + name='ScreenLayoutSize', + full_name='aapt.pb.Configuration.ScreenLayoutSize', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_SIZE_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_SIZE_SMALL', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_SIZE_NORMAL', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_SIZE_LARGE', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_SIZE_XLARGE', index=4, number=4, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1219, + serialized_end=1389, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTSIZE) + +_CONFIGURATION_SCREENLAYOUTLONG = _descriptor.EnumDescriptor( + name='ScreenLayoutLong', + full_name='aapt.pb.Configuration.ScreenLayoutLong', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_LONG_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_LONG_LONG', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_LAYOUT_LONG_NOTLONG', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1391, + serialized_end=1500, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENLAYOUTLONG) + +_CONFIGURATION_SCREENROUND = _descriptor.EnumDescriptor( + name='ScreenRound', + full_name='aapt.pb.Configuration.ScreenRound', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SCREEN_ROUND_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_ROUND_ROUND', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SCREEN_ROUND_NOTROUND', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1502, + serialized_end=1590, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_SCREENROUND) + +_CONFIGURATION_WIDECOLORGAMUT = _descriptor.EnumDescriptor( + name='WideColorGamut', + full_name='aapt.pb.Configuration.WideColorGamut', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='WIDE_COLOR_GAMUT_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WIDE_COLOR_GAMUT_WIDECG', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WIDE_COLOR_GAMUT_NOWIDECG', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1592, + serialized_end=1696, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_WIDECOLORGAMUT) + +_CONFIGURATION_HDR = _descriptor.EnumDescriptor( + name='Hdr', + full_name='aapt.pb.Configuration.Hdr', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='HDR_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HDR_HIGHDR', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HDR_LOWDR', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1698, + serialized_end=1749, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_HDR) + +_CONFIGURATION_ORIENTATION = _descriptor.EnumDescriptor( + name='Orientation', + full_name='aapt.pb.Configuration.Orientation', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ORIENTATION_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORIENTATION_PORT', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORIENTATION_LAND', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORIENTATION_SQUARE', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1751, + serialized_end=1855, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_ORIENTATION) + +_CONFIGURATION_UIMODETYPE = _descriptor.EnumDescriptor( + name='UiModeType', + full_name='aapt.pb.Configuration.UiModeType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_NORMAL', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_DESK', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_CAR', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_TELEVISION', index=4, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_APPLIANCE', index=5, number=5, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_WATCH', index=6, number=6, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_TYPE_VRHEADSET', index=7, number=7, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1858, + serialized_end=2073, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODETYPE) + +_CONFIGURATION_UIMODENIGHT = _descriptor.EnumDescriptor( + name='UiModeNight', + full_name='aapt.pb.Configuration.UiModeNight', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UI_MODE_NIGHT_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_NIGHT_NIGHT', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UI_MODE_NIGHT_NOTNIGHT', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2075, + serialized_end=2166, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_UIMODENIGHT) + +_CONFIGURATION_TOUCHSCREEN = _descriptor.EnumDescriptor( + name='Touchscreen', + full_name='aapt.pb.Configuration.Touchscreen', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TOUCHSCREEN_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TOUCHSCREEN_NOTOUCH', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TOUCHSCREEN_STYLUS', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TOUCHSCREEN_FINGER', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2168, + serialized_end=2277, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_TOUCHSCREEN) + +_CONFIGURATION_KEYSHIDDEN = _descriptor.EnumDescriptor( + name='KeysHidden', + full_name='aapt.pb.Configuration.KeysHidden', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='KEYS_HIDDEN_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEYS_HIDDEN_KEYSEXPOSED', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEYS_HIDDEN_KEYSHIDDEN', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEYS_HIDDEN_KEYSSOFT', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2279, + serialized_end=2397, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYSHIDDEN) + +_CONFIGURATION_KEYBOARD = _descriptor.EnumDescriptor( + name='Keyboard', + full_name='aapt.pb.Configuration.Keyboard', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='KEYBOARD_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEYBOARD_NOKEYS', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEYBOARD_QWERTY', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KEYBOARD_TWELVEKEY', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2399, + serialized_end=2495, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_KEYBOARD) + +_CONFIGURATION_NAVHIDDEN = _descriptor.EnumDescriptor( + name='NavHidden', + full_name='aapt.pb.Configuration.NavHidden', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NAV_HIDDEN_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NAV_HIDDEN_NAVEXPOSED', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NAV_HIDDEN_NAVHIDDEN', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2497, + serialized_end=2583, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVHIDDEN) + +_CONFIGURATION_NAVIGATION = _descriptor.EnumDescriptor( + name='Navigation', + full_name='aapt.pb.Configuration.Navigation', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NAVIGATION_UNSET', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NAVIGATION_NONAV', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NAVIGATION_DPAD', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NAVIGATION_TRACKBALL', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NAVIGATION_WHEEL', index=4, number=4, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2585, + serialized_end=2710, +) +_sym_db.RegisterEnumDescriptor(_CONFIGURATION_NAVIGATION) + + +_CONFIGURATION = _descriptor.Descriptor( + name='Configuration', + full_name='aapt.pb.Configuration', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='mcc', full_name='aapt.pb.Configuration.mcc', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mnc', full_name='aapt.pb.Configuration.mnc', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='locale', full_name='aapt.pb.Configuration.locale', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='layout_direction', full_name='aapt.pb.Configuration.layout_direction', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='screen_width', full_name='aapt.pb.Configuration.screen_width', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='screen_height', full_name='aapt.pb.Configuration.screen_height', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='screen_width_dp', full_name='aapt.pb.Configuration.screen_width_dp', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='screen_height_dp', full_name='aapt.pb.Configuration.screen_height_dp', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='smallest_screen_width_dp', full_name='aapt.pb.Configuration.smallest_screen_width_dp', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='screen_layout_size', full_name='aapt.pb.Configuration.screen_layout_size', index=9, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='screen_layout_long', full_name='aapt.pb.Configuration.screen_layout_long', index=10, + number=11, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='screen_round', full_name='aapt.pb.Configuration.screen_round', index=11, + number=12, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='wide_color_gamut', full_name='aapt.pb.Configuration.wide_color_gamut', index=12, + number=13, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='hdr', full_name='aapt.pb.Configuration.hdr', index=13, + number=14, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='orientation', full_name='aapt.pb.Configuration.orientation', index=14, + number=15, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ui_mode_type', full_name='aapt.pb.Configuration.ui_mode_type', index=15, + number=16, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ui_mode_night', full_name='aapt.pb.Configuration.ui_mode_night', index=16, + number=17, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='density', full_name='aapt.pb.Configuration.density', index=17, + number=18, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='touchscreen', full_name='aapt.pb.Configuration.touchscreen', index=18, + number=19, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='keys_hidden', full_name='aapt.pb.Configuration.keys_hidden', index=19, + number=20, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='keyboard', full_name='aapt.pb.Configuration.keyboard', index=20, + number=21, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='nav_hidden', full_name='aapt.pb.Configuration.nav_hidden', index=21, + number=22, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='navigation', full_name='aapt.pb.Configuration.navigation', index=22, + number=23, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='sdk_version', full_name='aapt.pb.Configuration.sdk_version', index=23, + number=24, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='product', full_name='aapt.pb.Configuration.product', index=24, + number=25, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _CONFIGURATION_LAYOUTDIRECTION, + _CONFIGURATION_SCREENLAYOUTSIZE, + _CONFIGURATION_SCREENLAYOUTLONG, + _CONFIGURATION_SCREENROUND, + _CONFIGURATION_WIDECOLORGAMUT, + _CONFIGURATION_HDR, + _CONFIGURATION_ORIENTATION, + _CONFIGURATION_UIMODETYPE, + _CONFIGURATION_UIMODENIGHT, + _CONFIGURATION_TOUCHSCREEN, + _CONFIGURATION_KEYSHIDDEN, + _CONFIGURATION_KEYBOARD, + _CONFIGURATION_NAVHIDDEN, + _CONFIGURATION_NAVIGATION, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=61, + serialized_end=2710, +) + +_CONFIGURATION.fields_by_name['layout_direction'].enum_type = _CONFIGURATION_LAYOUTDIRECTION +_CONFIGURATION.fields_by_name['screen_layout_size'].enum_type = _CONFIGURATION_SCREENLAYOUTSIZE +_CONFIGURATION.fields_by_name['screen_layout_long'].enum_type = _CONFIGURATION_SCREENLAYOUTLONG +_CONFIGURATION.fields_by_name['screen_round'].enum_type = _CONFIGURATION_SCREENROUND +_CONFIGURATION.fields_by_name['wide_color_gamut'].enum_type = _CONFIGURATION_WIDECOLORGAMUT +_CONFIGURATION.fields_by_name['hdr'].enum_type = _CONFIGURATION_HDR +_CONFIGURATION.fields_by_name['orientation'].enum_type = _CONFIGURATION_ORIENTATION +_CONFIGURATION.fields_by_name['ui_mode_type'].enum_type = _CONFIGURATION_UIMODETYPE +_CONFIGURATION.fields_by_name['ui_mode_night'].enum_type = _CONFIGURATION_UIMODENIGHT +_CONFIGURATION.fields_by_name['touchscreen'].enum_type = _CONFIGURATION_TOUCHSCREEN +_CONFIGURATION.fields_by_name['keys_hidden'].enum_type = _CONFIGURATION_KEYSHIDDEN +_CONFIGURATION.fields_by_name['keyboard'].enum_type = _CONFIGURATION_KEYBOARD +_CONFIGURATION.fields_by_name['nav_hidden'].enum_type = _CONFIGURATION_NAVHIDDEN +_CONFIGURATION.fields_by_name['navigation'].enum_type = _CONFIGURATION_NAVIGATION +_CONFIGURATION_LAYOUTDIRECTION.containing_type = _CONFIGURATION +_CONFIGURATION_SCREENLAYOUTSIZE.containing_type = _CONFIGURATION +_CONFIGURATION_SCREENLAYOUTLONG.containing_type = _CONFIGURATION +_CONFIGURATION_SCREENROUND.containing_type = _CONFIGURATION +_CONFIGURATION_WIDECOLORGAMUT.containing_type = _CONFIGURATION +_CONFIGURATION_HDR.containing_type = _CONFIGURATION +_CONFIGURATION_ORIENTATION.containing_type = _CONFIGURATION +_CONFIGURATION_UIMODETYPE.containing_type = _CONFIGURATION +_CONFIGURATION_UIMODENIGHT.containing_type = _CONFIGURATION +_CONFIGURATION_TOUCHSCREEN.containing_type = _CONFIGURATION +_CONFIGURATION_KEYSHIDDEN.containing_type = _CONFIGURATION +_CONFIGURATION_KEYBOARD.containing_type = _CONFIGURATION +_CONFIGURATION_NAVHIDDEN.containing_type = _CONFIGURATION +_CONFIGURATION_NAVIGATION.containing_type = _CONFIGURATION +DESCRIPTOR.message_types_by_name['Configuration'] = _CONFIGURATION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Configuration = _reflection.GeneratedProtocolMessageType('Configuration', (_message.Message,), { + 'DESCRIPTOR' : _CONFIGURATION, + '__module__' : 'frameworks.base.tools.aapt2.Configuration_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Configuration) + }) +_sym_db.RegisterMessage(Configuration) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/android/gyp/proto/README.md b/android/gyp/proto/README.md new file mode 100644 index 000000000000..685041087af2 --- /dev/null +++ b/android/gyp/proto/README.md @@ -0,0 +1,13 @@ +# Protos +These protos are generated from Resources.proto and Configuration.proto from the +Android repo. They are found in the frameworks/base/tools/aapt2/ directory. To +regenerate these if there are changes, run this command from the root of an +Android checkout: + + protoc --python_out=some_dir frameworks/base/tools/aapt2/Resources.proto \ + frameworks/base/tools/aapt2/Configuration.proto + +Then copy the resulting \*pb2.py files from some_dir here. To make sure +Resources_pb2.py is able to import Configuration_pb2.py, replace the +"from frameworks.base.tools.aapt2" portion of the import statement with +"from ." so it will instead be imported from the current directory. diff --git a/android/gyp/proto/Resources_pb2.py b/android/gyp/proto/Resources_pb2.py new file mode 100644 index 000000000000..3bbd7028b5bb --- /dev/null +++ b/android/gyp/proto/Resources_pb2.py @@ -0,0 +1,2779 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: frameworks/base/tools/aapt2/Resources.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from . import Configuration_pb2 as frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='frameworks/base/tools/aapt2/Resources.proto', + package='aapt.pb', + syntax='proto3', + serialized_options=_b('\n\020com.android.aapt'), + serialized_pb=_b('\n+frameworks/base/tools/aapt2/Resources.proto\x12\x07\x61\x61pt.pb\x1a/frameworks/base/tools/aapt2/Configuration.proto\"\x1a\n\nStringPool\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"<\n\x0eSourcePosition\x12\x13\n\x0bline_number\x18\x01 \x01(\r\x12\x15\n\rcolumn_number\x18\x02 \x01(\r\"E\n\x06Source\x12\x10\n\x08path_idx\x18\x01 \x01(\r\x12)\n\x08position\x18\x02 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"0\n\x0fToolFingerprint\x12\x0c\n\x04tool\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"\xbb\x01\n\rResourceTable\x12(\n\x0bsource_pool\x18\x01 \x01(\x0b\x32\x13.aapt.pb.StringPool\x12!\n\x07package\x18\x02 \x03(\x0b\x32\x10.aapt.pb.Package\x12)\n\x0boverlayable\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Overlayable\x12\x32\n\x10tool_fingerprint\x18\x04 \x03(\x0b\x32\x18.aapt.pb.ToolFingerprint\"\x17\n\tPackageId\x12\n\n\x02id\x18\x01 \x01(\r\"d\n\x07Package\x12&\n\npackage_id\x18\x01 \x01(\x0b\x32\x12.aapt.pb.PackageId\x12\x14\n\x0cpackage_name\x18\x02 \x01(\t\x12\x1b\n\x04type\x18\x03 \x03(\x0b\x32\r.aapt.pb.Type\"\x14\n\x06TypeId\x12\n\n\x02id\x18\x01 \x01(\r\"U\n\x04Type\x12 \n\x07type_id\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.TypeId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1d\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x0e.aapt.pb.Entry\"\x97\x01\n\nVisibility\x12(\n\x05level\x18\x01 \x01(\x0e\x32\x19.aapt.pb.Visibility.Level\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x03 \x01(\t\"-\n\x05Level\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PRIVATE\x10\x01\x12\n\n\x06PUBLIC\x10\x02\"<\n\x08\x41llowNew\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\"K\n\x0bOverlayable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1f\n\x06source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\r\n\x05\x61\x63tor\x18\x03 \x01(\t\"\xf3\x01\n\x0fOverlayableItem\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12/\n\x06policy\x18\x03 \x03(\x0e\x32\x1f.aapt.pb.OverlayableItem.Policy\x12\x17\n\x0foverlayable_idx\x18\x04 \x01(\r\"d\n\x06Policy\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06PUBLIC\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\n\n\x06VENDOR\x10\x03\x12\x0b\n\x07PRODUCT\x10\x04\x12\r\n\tSIGNATURE\x10\x05\x12\x07\n\x03ODM\x10\x06\x12\x07\n\x03OEM\x10\x07\"\x15\n\x07\x45ntryId\x12\n\n\x02id\x18\x01 \x01(\r\"\xe8\x01\n\x05\x45ntry\x12\"\n\x08\x65ntry_id\x18\x01 \x01(\x0b\x32\x10.aapt.pb.EntryId\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\'\n\nvisibility\x18\x03 \x01(\x0b\x32\x13.aapt.pb.Visibility\x12$\n\tallow_new\x18\x04 \x01(\x0b\x32\x11.aapt.pb.AllowNew\x12\x32\n\x10overlayable_item\x18\x05 \x01(\x0b\x32\x18.aapt.pb.OverlayableItem\x12*\n\x0c\x63onfig_value\x18\x06 \x03(\x0b\x32\x14.aapt.pb.ConfigValue\"T\n\x0b\x43onfigValue\x12&\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x16.aapt.pb.Configuration\x12\x1d\n\x05value\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.Value\"\xa1\x01\n\x05Value\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x0c\n\x04weak\x18\x03 \x01(\x08\x12\x1d\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.ItemH\x00\x12\x30\n\x0e\x63ompound_value\x18\x05 \x01(\x0b\x32\x16.aapt.pb.CompoundValueH\x00\x42\x07\n\x05value\"\x8d\x02\n\x04Item\x12!\n\x03ref\x18\x01 \x01(\x0b\x32\x12.aapt.pb.ReferenceH\x00\x12\x1e\n\x03str\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.StringH\x00\x12%\n\x07raw_str\x18\x03 \x01(\x0b\x32\x12.aapt.pb.RawStringH\x00\x12+\n\nstyled_str\x18\x04 \x01(\x0b\x32\x15.aapt.pb.StyledStringH\x00\x12&\n\x04\x66ile\x18\x05 \x01(\x0b\x32\x16.aapt.pb.FileReferenceH\x00\x12\x19\n\x02id\x18\x06 \x01(\x0b\x32\x0b.aapt.pb.IdH\x00\x12\"\n\x04prim\x18\x07 \x01(\x0b\x32\x12.aapt.pb.PrimitiveH\x00\x42\x07\n\x05value\"\xca\x01\n\rCompoundValue\x12\"\n\x04\x61ttr\x18\x01 \x01(\x0b\x32\x12.aapt.pb.AttributeH\x00\x12\x1f\n\x05style\x18\x02 \x01(\x0b\x32\x0e.aapt.pb.StyleH\x00\x12\'\n\tstyleable\x18\x03 \x01(\x0b\x32\x12.aapt.pb.StyleableH\x00\x12\x1f\n\x05\x61rray\x18\x04 \x01(\x0b\x32\x0e.aapt.pb.ArrayH\x00\x12!\n\x06plural\x18\x05 \x01(\x0b\x32\x0f.aapt.pb.PluralH\x00\x42\x07\n\x05value\"\x18\n\x07\x42oolean\x12\r\n\x05value\x18\x01 \x01(\x08\"\xa9\x01\n\tReference\x12%\n\x04type\x18\x01 \x01(\x0e\x32\x17.aapt.pb.Reference.Type\x12\n\n\x02id\x18\x02 \x01(\r\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0f\n\x07private\x18\x04 \x01(\x08\x12$\n\nis_dynamic\x18\x05 \x01(\x0b\x32\x10.aapt.pb.Boolean\"$\n\x04Type\x12\r\n\tREFERENCE\x10\x00\x12\r\n\tATTRIBUTE\x10\x01\"\x04\n\x02Id\"\x17\n\x06String\x12\r\n\x05value\x18\x01 \x01(\t\"\x1a\n\tRawString\x12\r\n\x05value\x18\x01 \x01(\t\"\x83\x01\n\x0cStyledString\x12\r\n\x05value\x18\x01 \x01(\t\x12(\n\x04span\x18\x02 \x03(\x0b\x32\x1a.aapt.pb.StyledString.Span\x1a:\n\x04Span\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12\x12\n\nfirst_char\x18\x02 \x01(\r\x12\x11\n\tlast_char\x18\x03 \x01(\r\"\x85\x01\n\rFileReference\x12\x0c\n\x04path\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.aapt.pb.FileReference.Type\";\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03PNG\x10\x01\x12\x0e\n\nBINARY_XML\x10\x02\x12\r\n\tPROTO_XML\x10\x03\"\x83\x04\n\tPrimitive\x12\x31\n\nnull_value\x18\x01 \x01(\x0b\x32\x1b.aapt.pb.Primitive.NullTypeH\x00\x12\x33\n\x0b\x65mpty_value\x18\x02 \x01(\x0b\x32\x1c.aapt.pb.Primitive.EmptyTypeH\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x02H\x00\x12\x19\n\x0f\x64imension_value\x18\r \x01(\rH\x00\x12\x18\n\x0e\x66raction_value\x18\x0e \x01(\rH\x00\x12\x1b\n\x11int_decimal_value\x18\x06 \x01(\x05H\x00\x12\x1f\n\x15int_hexadecimal_value\x18\x07 \x01(\rH\x00\x12\x17\n\rboolean_value\x18\x08 \x01(\x08H\x00\x12\x1b\n\x11\x63olor_argb8_value\x18\t \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb8_value\x18\n \x01(\rH\x00\x12\x1b\n\x11\x63olor_argb4_value\x18\x0b \x01(\rH\x00\x12\x1a\n\x10\x63olor_rgb4_value\x18\x0c \x01(\rH\x00\x12(\n\x1a\x64imension_value_deprecated\x18\x04 \x01(\x02\x42\x02\x18\x01H\x00\x12\'\n\x19\x66raction_value_deprecated\x18\x05 \x01(\x02\x42\x02\x18\x01H\x00\x1a\n\n\x08NullType\x1a\x0b\n\tEmptyTypeB\r\n\x0boneof_value\"\x90\x03\n\tAttribute\x12\x14\n\x0c\x66ormat_flags\x18\x01 \x01(\r\x12\x0f\n\x07min_int\x18\x02 \x01(\x05\x12\x0f\n\x07max_int\x18\x03 \x01(\x05\x12)\n\x06symbol\x18\x04 \x03(\x0b\x32\x19.aapt.pb.Attribute.Symbol\x1ay\n\x06Symbol\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04name\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\r\n\x05value\x18\x04 \x01(\r\x12\x0c\n\x04type\x18\x05 \x01(\r\"\xa4\x01\n\x0b\x46ormatFlags\x12\x08\n\x04NONE\x10\x00\x12\t\n\x03\x41NY\x10\xff\xff\x03\x12\r\n\tREFERENCE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x0b\n\x07INTEGER\x10\x04\x12\x0b\n\x07\x42OOLEAN\x10\x08\x12\t\n\x05\x43OLOR\x10\x10\x12\t\n\x05\x46LOAT\x10 \x12\r\n\tDIMENSION\x10@\x12\r\n\x08\x46RACTION\x10\x80\x01\x12\n\n\x04\x45NUM\x10\x80\x80\x04\x12\x0b\n\x05\x46LAGS\x10\x80\x80\x08\"\xf1\x01\n\x05Style\x12\"\n\x06parent\x18\x01 \x01(\x0b\x32\x12.aapt.pb.Reference\x12&\n\rparent_source\x18\x02 \x01(\x0b\x32\x0f.aapt.pb.Source\x12#\n\x05\x65ntry\x18\x03 \x03(\x0b\x32\x14.aapt.pb.Style.Entry\x1aw\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1f\n\x03key\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"\x91\x01\n\tStyleable\x12\'\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x18.aapt.pb.Styleable.Entry\x1a[\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12 \n\x04\x61ttr\x18\x03 \x01(\x0b\x32\x12.aapt.pb.Reference\"\x8a\x01\n\x05\x41rray\x12\'\n\x07\x65lement\x18\x01 \x03(\x0b\x32\x16.aapt.pb.Array.Element\x1aX\n\x07\x45lement\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12\x1b\n\x04item\x18\x03 \x01(\x0b\x32\r.aapt.pb.Item\"\xef\x01\n\x06Plural\x12$\n\x05\x65ntry\x18\x01 \x03(\x0b\x32\x15.aapt.pb.Plural.Entry\x1a|\n\x05\x45ntry\x12\x1f\n\x06source\x18\x01 \x01(\x0b\x32\x0f.aapt.pb.Source\x12\x0f\n\x07\x63omment\x18\x02 \x01(\t\x12$\n\x05\x61rity\x18\x03 \x01(\x0e\x32\x15.aapt.pb.Plural.Arity\x12\x1b\n\x04item\x18\x04 \x01(\x0b\x32\r.aapt.pb.Item\"A\n\x05\x41rity\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\x12\x07\n\x03TWO\x10\x02\x12\x07\n\x03\x46\x45W\x10\x03\x12\x08\n\x04MANY\x10\x04\x12\t\n\x05OTHER\x10\x05\"r\n\x07XmlNode\x12&\n\x07\x65lement\x18\x01 \x01(\x0b\x32\x13.aapt.pb.XmlElementH\x00\x12\x0e\n\x04text\x18\x02 \x01(\tH\x00\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePositionB\x06\n\x04node\"\xb2\x01\n\nXmlElement\x12\x34\n\x15namespace_declaration\x18\x01 \x03(\x0b\x32\x15.aapt.pb.XmlNamespace\x12\x15\n\rnamespace_uri\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12(\n\tattribute\x18\x04 \x03(\x0b\x32\x15.aapt.pb.XmlAttribute\x12\x1f\n\x05\x63hild\x18\x05 \x03(\x0b\x32\x10.aapt.pb.XmlNode\"T\n\x0cXmlNamespace\x12\x0e\n\x06prefix\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\'\n\x06source\x18\x03 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\"\xa6\x01\n\x0cXmlAttribute\x12\x15\n\rnamespace_uri\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\'\n\x06source\x18\x04 \x01(\x0b\x32\x17.aapt.pb.SourcePosition\x12\x13\n\x0bresource_id\x18\x05 \x01(\r\x12$\n\rcompiled_item\x18\x06 \x01(\x0b\x32\r.aapt.pb.ItemB\x12\n\x10\x63om.android.aaptb\x06proto3') + , + dependencies=[frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2.DESCRIPTOR,]) + + + +_VISIBILITY_LEVEL = _descriptor.EnumDescriptor( + name='Level', + full_name='aapt.pb.Visibility.Level', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRIVATE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PUBLIC', index=2, number=2, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=849, + serialized_end=894, +) +_sym_db.RegisterEnumDescriptor(_VISIBILITY_LEVEL) + +_OVERLAYABLEITEM_POLICY = _descriptor.EnumDescriptor( + name='Policy', + full_name='aapt.pb.OverlayableItem.Policy', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PUBLIC', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SYSTEM', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VENDOR', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRODUCT', index=4, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SIGNATURE', index=5, number=5, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ODM', index=6, number=6, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OEM', index=7, number=7, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1179, + serialized_end=1279, +) +_sym_db.RegisterEnumDescriptor(_OVERLAYABLEITEM_POLICY) + +_REFERENCE_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='aapt.pb.Reference.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='REFERENCE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ATTRIBUTE', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2426, + serialized_end=2462, +) +_sym_db.RegisterEnumDescriptor(_REFERENCE_TYPE) + +_FILEREFERENCE_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='aapt.pb.FileReference.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PNG', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BINARY_XML', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROTO_XML', index=3, number=3, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=2732, + serialized_end=2791, +) +_sym_db.RegisterEnumDescriptor(_FILEREFERENCE_TYPE) + +_ATTRIBUTE_FORMATFLAGS = _descriptor.EnumDescriptor( + name='FormatFlags', + full_name='aapt.pb.Attribute.FormatFlags', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ANY', index=1, number=65535, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFERENCE', index=2, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STRING', index=3, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INTEGER', index=4, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BOOLEAN', index=5, number=8, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COLOR', index=6, number=16, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FLOAT', index=7, number=32, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DIMENSION', index=8, number=64, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FRACTION', index=9, number=128, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ENUM', index=10, number=65536, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FLAGS', index=11, number=131072, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=3548, + serialized_end=3712, +) +_sym_db.RegisterEnumDescriptor(_ATTRIBUTE_FORMATFLAGS) + +_PLURAL_ARITY = _descriptor.EnumDescriptor( + name='Arity', + full_name='aapt.pb.Plural.Arity', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ZERO', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ONE', index=1, number=1, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TWO', index=2, number=2, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FEW', index=3, number=3, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MANY', index=4, number=4, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OTHER', index=5, number=5, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=4422, + serialized_end=4487, +) +_sym_db.RegisterEnumDescriptor(_PLURAL_ARITY) + + +_STRINGPOOL = _descriptor.Descriptor( + name='StringPool', + full_name='aapt.pb.StringPool', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='aapt.pb.StringPool.data', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=105, + serialized_end=131, +) + + +_SOURCEPOSITION = _descriptor.Descriptor( + name='SourcePosition', + full_name='aapt.pb.SourcePosition', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='line_number', full_name='aapt.pb.SourcePosition.line_number', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='column_number', full_name='aapt.pb.SourcePosition.column_number', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=133, + serialized_end=193, +) + + +_SOURCE = _descriptor.Descriptor( + name='Source', + full_name='aapt.pb.Source', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path_idx', full_name='aapt.pb.Source.path_idx', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='position', full_name='aapt.pb.Source.position', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=195, + serialized_end=264, +) + + +_TOOLFINGERPRINT = _descriptor.Descriptor( + name='ToolFingerprint', + full_name='aapt.pb.ToolFingerprint', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tool', full_name='aapt.pb.ToolFingerprint.tool', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='aapt.pb.ToolFingerprint.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=266, + serialized_end=314, +) + + +_RESOURCETABLE = _descriptor.Descriptor( + name='ResourceTable', + full_name='aapt.pb.ResourceTable', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source_pool', full_name='aapt.pb.ResourceTable.source_pool', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='package', full_name='aapt.pb.ResourceTable.package', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='overlayable', full_name='aapt.pb.ResourceTable.overlayable', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='tool_fingerprint', full_name='aapt.pb.ResourceTable.tool_fingerprint', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=317, + serialized_end=504, +) + + +_PACKAGEID = _descriptor.Descriptor( + name='PackageId', + full_name='aapt.pb.PackageId', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='aapt.pb.PackageId.id', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=506, + serialized_end=529, +) + + +_PACKAGE = _descriptor.Descriptor( + name='Package', + full_name='aapt.pb.Package', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='package_id', full_name='aapt.pb.Package.package_id', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='package_name', full_name='aapt.pb.Package.package_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='aapt.pb.Package.type', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=531, + serialized_end=631, +) + + +_TYPEID = _descriptor.Descriptor( + name='TypeId', + full_name='aapt.pb.TypeId', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='aapt.pb.TypeId.id', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=633, + serialized_end=653, +) + + +_TYPE = _descriptor.Descriptor( + name='Type', + full_name='aapt.pb.Type', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type_id', full_name='aapt.pb.Type.type_id', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='aapt.pb.Type.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='entry', full_name='aapt.pb.Type.entry', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=655, + serialized_end=740, +) + + +_VISIBILITY = _descriptor.Descriptor( + name='Visibility', + full_name='aapt.pb.Visibility', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='level', full_name='aapt.pb.Visibility.level', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Visibility.source', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.Visibility.comment', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _VISIBILITY_LEVEL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=743, + serialized_end=894, +) + + +_ALLOWNEW = _descriptor.Descriptor( + name='AllowNew', + full_name='aapt.pb.AllowNew', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.AllowNew.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.AllowNew.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=896, + serialized_end=956, +) + + +_OVERLAYABLE = _descriptor.Descriptor( + name='Overlayable', + full_name='aapt.pb.Overlayable', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='aapt.pb.Overlayable.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Overlayable.source', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='actor', full_name='aapt.pb.Overlayable.actor', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=958, + serialized_end=1033, +) + + +_OVERLAYABLEITEM = _descriptor.Descriptor( + name='OverlayableItem', + full_name='aapt.pb.OverlayableItem', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.OverlayableItem.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.OverlayableItem.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='policy', full_name='aapt.pb.OverlayableItem.policy', index=2, + number=3, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='overlayable_idx', full_name='aapt.pb.OverlayableItem.overlayable_idx', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _OVERLAYABLEITEM_POLICY, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1036, + serialized_end=1279, +) + + +_ENTRYID = _descriptor.Descriptor( + name='EntryId', + full_name='aapt.pb.EntryId', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='aapt.pb.EntryId.id', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1281, + serialized_end=1302, +) + + +_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='aapt.pb.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entry_id', full_name='aapt.pb.Entry.entry_id', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='aapt.pb.Entry.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='visibility', full_name='aapt.pb.Entry.visibility', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='allow_new', full_name='aapt.pb.Entry.allow_new', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='overlayable_item', full_name='aapt.pb.Entry.overlayable_item', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='config_value', full_name='aapt.pb.Entry.config_value', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1305, + serialized_end=1537, +) + + +_CONFIGVALUE = _descriptor.Descriptor( + name='ConfigValue', + full_name='aapt.pb.ConfigValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='config', full_name='aapt.pb.ConfigValue.config', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='aapt.pb.ConfigValue.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1539, + serialized_end=1623, +) + + +_VALUE = _descriptor.Descriptor( + name='Value', + full_name='aapt.pb.Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Value.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.Value.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='weak', full_name='aapt.pb.Value.weak', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='item', full_name='aapt.pb.Value.item', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='compound_value', full_name='aapt.pb.Value.compound_value', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value', full_name='aapt.pb.Value.value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1626, + serialized_end=1787, +) + + +_ITEM = _descriptor.Descriptor( + name='Item', + full_name='aapt.pb.Item', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ref', full_name='aapt.pb.Item.ref', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='str', full_name='aapt.pb.Item.str', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='raw_str', full_name='aapt.pb.Item.raw_str', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='styled_str', full_name='aapt.pb.Item.styled_str', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='file', full_name='aapt.pb.Item.file', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='id', full_name='aapt.pb.Item.id', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='prim', full_name='aapt.pb.Item.prim', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value', full_name='aapt.pb.Item.value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1790, + serialized_end=2059, +) + + +_COMPOUNDVALUE = _descriptor.Descriptor( + name='CompoundValue', + full_name='aapt.pb.CompoundValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='attr', full_name='aapt.pb.CompoundValue.attr', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='style', full_name='aapt.pb.CompoundValue.style', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='styleable', full_name='aapt.pb.CompoundValue.styleable', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='array', full_name='aapt.pb.CompoundValue.array', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='plural', full_name='aapt.pb.CompoundValue.plural', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value', full_name='aapt.pb.CompoundValue.value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2062, + serialized_end=2264, +) + + +_BOOLEAN = _descriptor.Descriptor( + name='Boolean', + full_name='aapt.pb.Boolean', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='aapt.pb.Boolean.value', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2266, + serialized_end=2290, +) + + +_REFERENCE = _descriptor.Descriptor( + name='Reference', + full_name='aapt.pb.Reference', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='aapt.pb.Reference.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='id', full_name='aapt.pb.Reference.id', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='aapt.pb.Reference.name', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='private', full_name='aapt.pb.Reference.private', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='is_dynamic', full_name='aapt.pb.Reference.is_dynamic', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _REFERENCE_TYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2293, + serialized_end=2462, +) + + +_ID = _descriptor.Descriptor( + name='Id', + full_name='aapt.pb.Id', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2464, + serialized_end=2468, +) + + +_STRING = _descriptor.Descriptor( + name='String', + full_name='aapt.pb.String', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='aapt.pb.String.value', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2470, + serialized_end=2493, +) + + +_RAWSTRING = _descriptor.Descriptor( + name='RawString', + full_name='aapt.pb.RawString', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='aapt.pb.RawString.value', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2495, + serialized_end=2521, +) + + +_STYLEDSTRING_SPAN = _descriptor.Descriptor( + name='Span', + full_name='aapt.pb.StyledString.Span', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tag', full_name='aapt.pb.StyledString.Span.tag', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='first_char', full_name='aapt.pb.StyledString.Span.first_char', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_char', full_name='aapt.pb.StyledString.Span.last_char', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2597, + serialized_end=2655, +) + +_STYLEDSTRING = _descriptor.Descriptor( + name='StyledString', + full_name='aapt.pb.StyledString', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='aapt.pb.StyledString.value', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='span', full_name='aapt.pb.StyledString.span', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_STYLEDSTRING_SPAN, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2524, + serialized_end=2655, +) + + +_FILEREFERENCE = _descriptor.Descriptor( + name='FileReference', + full_name='aapt.pb.FileReference', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='aapt.pb.FileReference.path', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='aapt.pb.FileReference.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEREFERENCE_TYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2658, + serialized_end=2791, +) + + +_PRIMITIVE_NULLTYPE = _descriptor.Descriptor( + name='NullType', + full_name='aapt.pb.Primitive.NullType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3271, + serialized_end=3281, +) + +_PRIMITIVE_EMPTYTYPE = _descriptor.Descriptor( + name='EmptyType', + full_name='aapt.pb.Primitive.EmptyType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3283, + serialized_end=3294, +) + +_PRIMITIVE = _descriptor.Descriptor( + name='Primitive', + full_name='aapt.pb.Primitive', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='null_value', full_name='aapt.pb.Primitive.null_value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='empty_value', full_name='aapt.pb.Primitive.empty_value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='float_value', full_name='aapt.pb.Primitive.float_value', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dimension_value', full_name='aapt.pb.Primitive.dimension_value', index=3, + number=13, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='fraction_value', full_name='aapt.pb.Primitive.fraction_value', index=4, + number=14, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int_decimal_value', full_name='aapt.pb.Primitive.int_decimal_value', index=5, + number=6, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int_hexadecimal_value', full_name='aapt.pb.Primitive.int_hexadecimal_value', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='boolean_value', full_name='aapt.pb.Primitive.boolean_value', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='color_argb8_value', full_name='aapt.pb.Primitive.color_argb8_value', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='color_rgb8_value', full_name='aapt.pb.Primitive.color_rgb8_value', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='color_argb4_value', full_name='aapt.pb.Primitive.color_argb4_value', index=10, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='color_rgb4_value', full_name='aapt.pb.Primitive.color_rgb4_value', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dimension_value_deprecated', full_name='aapt.pb.Primitive.dimension_value_deprecated', index=12, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\030\001'), file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='fraction_value_deprecated', full_name='aapt.pb.Primitive.fraction_value_deprecated', index=13, + number=5, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=_b('\030\001'), file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_PRIMITIVE_NULLTYPE, _PRIMITIVE_EMPTYTYPE, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='oneof_value', full_name='aapt.pb.Primitive.oneof_value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2794, + serialized_end=3309, +) + + +_ATTRIBUTE_SYMBOL = _descriptor.Descriptor( + name='Symbol', + full_name='aapt.pb.Attribute.Symbol', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Attribute.Symbol.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.Attribute.Symbol.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='aapt.pb.Attribute.Symbol.name', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='aapt.pb.Attribute.Symbol.value', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='type', full_name='aapt.pb.Attribute.Symbol.type', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3424, + serialized_end=3545, +) + +_ATTRIBUTE = _descriptor.Descriptor( + name='Attribute', + full_name='aapt.pb.Attribute', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='format_flags', full_name='aapt.pb.Attribute.format_flags', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_int', full_name='aapt.pb.Attribute.min_int', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_int', full_name='aapt.pb.Attribute.max_int', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='symbol', full_name='aapt.pb.Attribute.symbol', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_ATTRIBUTE_SYMBOL, ], + enum_types=[ + _ATTRIBUTE_FORMATFLAGS, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3312, + serialized_end=3712, +) + + +_STYLE_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='aapt.pb.Style.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Style.Entry.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.Style.Entry.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='key', full_name='aapt.pb.Style.Entry.key', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='item', full_name='aapt.pb.Style.Entry.item', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3837, + serialized_end=3956, +) + +_STYLE = _descriptor.Descriptor( + name='Style', + full_name='aapt.pb.Style', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='aapt.pb.Style.parent', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='parent_source', full_name='aapt.pb.Style.parent_source', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='entry', full_name='aapt.pb.Style.entry', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_STYLE_ENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3715, + serialized_end=3956, +) + + +_STYLEABLE_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='aapt.pb.Styleable.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Styleable.Entry.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.Styleable.Entry.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='attr', full_name='aapt.pb.Styleable.Entry.attr', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4013, + serialized_end=4104, +) + +_STYLEABLE = _descriptor.Descriptor( + name='Styleable', + full_name='aapt.pb.Styleable', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entry', full_name='aapt.pb.Styleable.entry', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_STYLEABLE_ENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3959, + serialized_end=4104, +) + + +_ARRAY_ELEMENT = _descriptor.Descriptor( + name='Element', + full_name='aapt.pb.Array.Element', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Array.Element.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.Array.Element.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='item', full_name='aapt.pb.Array.Element.item', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4157, + serialized_end=4245, +) + +_ARRAY = _descriptor.Descriptor( + name='Array', + full_name='aapt.pb.Array', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='element', full_name='aapt.pb.Array.element', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_ARRAY_ELEMENT, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4107, + serialized_end=4245, +) + + +_PLURAL_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='aapt.pb.Plural.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.Plural.Entry.source', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comment', full_name='aapt.pb.Plural.Entry.comment', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='arity', full_name='aapt.pb.Plural.Entry.arity', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='item', full_name='aapt.pb.Plural.Entry.item', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4296, + serialized_end=4420, +) + +_PLURAL = _descriptor.Descriptor( + name='Plural', + full_name='aapt.pb.Plural', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entry', full_name='aapt.pb.Plural.entry', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_PLURAL_ENTRY, ], + enum_types=[ + _PLURAL_ARITY, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4248, + serialized_end=4487, +) + + +_XMLNODE = _descriptor.Descriptor( + name='XmlNode', + full_name='aapt.pb.XmlNode', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='element', full_name='aapt.pb.XmlNode.element', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='text', full_name='aapt.pb.XmlNode.text', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.XmlNode.source', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='node', full_name='aapt.pb.XmlNode.node', + index=0, containing_type=None, fields=[]), + ], + serialized_start=4489, + serialized_end=4603, +) + + +_XMLELEMENT = _descriptor.Descriptor( + name='XmlElement', + full_name='aapt.pb.XmlElement', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='namespace_declaration', full_name='aapt.pb.XmlElement.namespace_declaration', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='namespace_uri', full_name='aapt.pb.XmlElement.namespace_uri', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='aapt.pb.XmlElement.name', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='attribute', full_name='aapt.pb.XmlElement.attribute', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='child', full_name='aapt.pb.XmlElement.child', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4606, + serialized_end=4784, +) + + +_XMLNAMESPACE = _descriptor.Descriptor( + name='XmlNamespace', + full_name='aapt.pb.XmlNamespace', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prefix', full_name='aapt.pb.XmlNamespace.prefix', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='uri', full_name='aapt.pb.XmlNamespace.uri', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.XmlNamespace.source', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4786, + serialized_end=4870, +) + + +_XMLATTRIBUTE = _descriptor.Descriptor( + name='XmlAttribute', + full_name='aapt.pb.XmlAttribute', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='namespace_uri', full_name='aapt.pb.XmlAttribute.namespace_uri', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='aapt.pb.XmlAttribute.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='aapt.pb.XmlAttribute.value', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source', full_name='aapt.pb.XmlAttribute.source', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resource_id', full_name='aapt.pb.XmlAttribute.resource_id', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='compiled_item', full_name='aapt.pb.XmlAttribute.compiled_item', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4873, + serialized_end=5039, +) + +_SOURCE.fields_by_name['position'].message_type = _SOURCEPOSITION +_RESOURCETABLE.fields_by_name['source_pool'].message_type = _STRINGPOOL +_RESOURCETABLE.fields_by_name['package'].message_type = _PACKAGE +_RESOURCETABLE.fields_by_name['overlayable'].message_type = _OVERLAYABLE +_RESOURCETABLE.fields_by_name['tool_fingerprint'].message_type = _TOOLFINGERPRINT +_PACKAGE.fields_by_name['package_id'].message_type = _PACKAGEID +_PACKAGE.fields_by_name['type'].message_type = _TYPE +_TYPE.fields_by_name['type_id'].message_type = _TYPEID +_TYPE.fields_by_name['entry'].message_type = _ENTRY +_VISIBILITY.fields_by_name['level'].enum_type = _VISIBILITY_LEVEL +_VISIBILITY.fields_by_name['source'].message_type = _SOURCE +_VISIBILITY_LEVEL.containing_type = _VISIBILITY +_ALLOWNEW.fields_by_name['source'].message_type = _SOURCE +_OVERLAYABLE.fields_by_name['source'].message_type = _SOURCE +_OVERLAYABLEITEM.fields_by_name['source'].message_type = _SOURCE +_OVERLAYABLEITEM.fields_by_name['policy'].enum_type = _OVERLAYABLEITEM_POLICY +_OVERLAYABLEITEM_POLICY.containing_type = _OVERLAYABLEITEM +_ENTRY.fields_by_name['entry_id'].message_type = _ENTRYID +_ENTRY.fields_by_name['visibility'].message_type = _VISIBILITY +_ENTRY.fields_by_name['allow_new'].message_type = _ALLOWNEW +_ENTRY.fields_by_name['overlayable_item'].message_type = _OVERLAYABLEITEM +_ENTRY.fields_by_name['config_value'].message_type = _CONFIGVALUE +_CONFIGVALUE.fields_by_name['config'].message_type = frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2._CONFIGURATION +_CONFIGVALUE.fields_by_name['value'].message_type = _VALUE +_VALUE.fields_by_name['source'].message_type = _SOURCE +_VALUE.fields_by_name['item'].message_type = _ITEM +_VALUE.fields_by_name['compound_value'].message_type = _COMPOUNDVALUE +_VALUE.oneofs_by_name['value'].fields.append( + _VALUE.fields_by_name['item']) +_VALUE.fields_by_name['item'].containing_oneof = _VALUE.oneofs_by_name['value'] +_VALUE.oneofs_by_name['value'].fields.append( + _VALUE.fields_by_name['compound_value']) +_VALUE.fields_by_name['compound_value'].containing_oneof = _VALUE.oneofs_by_name['value'] +_ITEM.fields_by_name['ref'].message_type = _REFERENCE +_ITEM.fields_by_name['str'].message_type = _STRING +_ITEM.fields_by_name['raw_str'].message_type = _RAWSTRING +_ITEM.fields_by_name['styled_str'].message_type = _STYLEDSTRING +_ITEM.fields_by_name['file'].message_type = _FILEREFERENCE +_ITEM.fields_by_name['id'].message_type = _ID +_ITEM.fields_by_name['prim'].message_type = _PRIMITIVE +_ITEM.oneofs_by_name['value'].fields.append( + _ITEM.fields_by_name['ref']) +_ITEM.fields_by_name['ref'].containing_oneof = _ITEM.oneofs_by_name['value'] +_ITEM.oneofs_by_name['value'].fields.append( + _ITEM.fields_by_name['str']) +_ITEM.fields_by_name['str'].containing_oneof = _ITEM.oneofs_by_name['value'] +_ITEM.oneofs_by_name['value'].fields.append( + _ITEM.fields_by_name['raw_str']) +_ITEM.fields_by_name['raw_str'].containing_oneof = _ITEM.oneofs_by_name['value'] +_ITEM.oneofs_by_name['value'].fields.append( + _ITEM.fields_by_name['styled_str']) +_ITEM.fields_by_name['styled_str'].containing_oneof = _ITEM.oneofs_by_name['value'] +_ITEM.oneofs_by_name['value'].fields.append( + _ITEM.fields_by_name['file']) +_ITEM.fields_by_name['file'].containing_oneof = _ITEM.oneofs_by_name['value'] +_ITEM.oneofs_by_name['value'].fields.append( + _ITEM.fields_by_name['id']) +_ITEM.fields_by_name['id'].containing_oneof = _ITEM.oneofs_by_name['value'] +_ITEM.oneofs_by_name['value'].fields.append( + _ITEM.fields_by_name['prim']) +_ITEM.fields_by_name['prim'].containing_oneof = _ITEM.oneofs_by_name['value'] +_COMPOUNDVALUE.fields_by_name['attr'].message_type = _ATTRIBUTE +_COMPOUNDVALUE.fields_by_name['style'].message_type = _STYLE +_COMPOUNDVALUE.fields_by_name['styleable'].message_type = _STYLEABLE +_COMPOUNDVALUE.fields_by_name['array'].message_type = _ARRAY +_COMPOUNDVALUE.fields_by_name['plural'].message_type = _PLURAL +_COMPOUNDVALUE.oneofs_by_name['value'].fields.append( + _COMPOUNDVALUE.fields_by_name['attr']) +_COMPOUNDVALUE.fields_by_name['attr'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value'] +_COMPOUNDVALUE.oneofs_by_name['value'].fields.append( + _COMPOUNDVALUE.fields_by_name['style']) +_COMPOUNDVALUE.fields_by_name['style'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value'] +_COMPOUNDVALUE.oneofs_by_name['value'].fields.append( + _COMPOUNDVALUE.fields_by_name['styleable']) +_COMPOUNDVALUE.fields_by_name['styleable'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value'] +_COMPOUNDVALUE.oneofs_by_name['value'].fields.append( + _COMPOUNDVALUE.fields_by_name['array']) +_COMPOUNDVALUE.fields_by_name['array'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value'] +_COMPOUNDVALUE.oneofs_by_name['value'].fields.append( + _COMPOUNDVALUE.fields_by_name['plural']) +_COMPOUNDVALUE.fields_by_name['plural'].containing_oneof = _COMPOUNDVALUE.oneofs_by_name['value'] +_REFERENCE.fields_by_name['type'].enum_type = _REFERENCE_TYPE +_REFERENCE.fields_by_name['is_dynamic'].message_type = _BOOLEAN +_REFERENCE_TYPE.containing_type = _REFERENCE +_STYLEDSTRING_SPAN.containing_type = _STYLEDSTRING +_STYLEDSTRING.fields_by_name['span'].message_type = _STYLEDSTRING_SPAN +_FILEREFERENCE.fields_by_name['type'].enum_type = _FILEREFERENCE_TYPE +_FILEREFERENCE_TYPE.containing_type = _FILEREFERENCE +_PRIMITIVE_NULLTYPE.containing_type = _PRIMITIVE +_PRIMITIVE_EMPTYTYPE.containing_type = _PRIMITIVE +_PRIMITIVE.fields_by_name['null_value'].message_type = _PRIMITIVE_NULLTYPE +_PRIMITIVE.fields_by_name['empty_value'].message_type = _PRIMITIVE_EMPTYTYPE +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['null_value']) +_PRIMITIVE.fields_by_name['null_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['empty_value']) +_PRIMITIVE.fields_by_name['empty_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['float_value']) +_PRIMITIVE.fields_by_name['float_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['dimension_value']) +_PRIMITIVE.fields_by_name['dimension_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['fraction_value']) +_PRIMITIVE.fields_by_name['fraction_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['int_decimal_value']) +_PRIMITIVE.fields_by_name['int_decimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['int_hexadecimal_value']) +_PRIMITIVE.fields_by_name['int_hexadecimal_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['boolean_value']) +_PRIMITIVE.fields_by_name['boolean_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['color_argb8_value']) +_PRIMITIVE.fields_by_name['color_argb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['color_rgb8_value']) +_PRIMITIVE.fields_by_name['color_rgb8_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['color_argb4_value']) +_PRIMITIVE.fields_by_name['color_argb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['color_rgb4_value']) +_PRIMITIVE.fields_by_name['color_rgb4_value'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['dimension_value_deprecated']) +_PRIMITIVE.fields_by_name['dimension_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_PRIMITIVE.oneofs_by_name['oneof_value'].fields.append( + _PRIMITIVE.fields_by_name['fraction_value_deprecated']) +_PRIMITIVE.fields_by_name['fraction_value_deprecated'].containing_oneof = _PRIMITIVE.oneofs_by_name['oneof_value'] +_ATTRIBUTE_SYMBOL.fields_by_name['source'].message_type = _SOURCE +_ATTRIBUTE_SYMBOL.fields_by_name['name'].message_type = _REFERENCE +_ATTRIBUTE_SYMBOL.containing_type = _ATTRIBUTE +_ATTRIBUTE.fields_by_name['symbol'].message_type = _ATTRIBUTE_SYMBOL +_ATTRIBUTE_FORMATFLAGS.containing_type = _ATTRIBUTE +_STYLE_ENTRY.fields_by_name['source'].message_type = _SOURCE +_STYLE_ENTRY.fields_by_name['key'].message_type = _REFERENCE +_STYLE_ENTRY.fields_by_name['item'].message_type = _ITEM +_STYLE_ENTRY.containing_type = _STYLE +_STYLE.fields_by_name['parent'].message_type = _REFERENCE +_STYLE.fields_by_name['parent_source'].message_type = _SOURCE +_STYLE.fields_by_name['entry'].message_type = _STYLE_ENTRY +_STYLEABLE_ENTRY.fields_by_name['source'].message_type = _SOURCE +_STYLEABLE_ENTRY.fields_by_name['attr'].message_type = _REFERENCE +_STYLEABLE_ENTRY.containing_type = _STYLEABLE +_STYLEABLE.fields_by_name['entry'].message_type = _STYLEABLE_ENTRY +_ARRAY_ELEMENT.fields_by_name['source'].message_type = _SOURCE +_ARRAY_ELEMENT.fields_by_name['item'].message_type = _ITEM +_ARRAY_ELEMENT.containing_type = _ARRAY +_ARRAY.fields_by_name['element'].message_type = _ARRAY_ELEMENT +_PLURAL_ENTRY.fields_by_name['source'].message_type = _SOURCE +_PLURAL_ENTRY.fields_by_name['arity'].enum_type = _PLURAL_ARITY +_PLURAL_ENTRY.fields_by_name['item'].message_type = _ITEM +_PLURAL_ENTRY.containing_type = _PLURAL +_PLURAL.fields_by_name['entry'].message_type = _PLURAL_ENTRY +_PLURAL_ARITY.containing_type = _PLURAL +_XMLNODE.fields_by_name['element'].message_type = _XMLELEMENT +_XMLNODE.fields_by_name['source'].message_type = _SOURCEPOSITION +_XMLNODE.oneofs_by_name['node'].fields.append( + _XMLNODE.fields_by_name['element']) +_XMLNODE.fields_by_name['element'].containing_oneof = _XMLNODE.oneofs_by_name['node'] +_XMLNODE.oneofs_by_name['node'].fields.append( + _XMLNODE.fields_by_name['text']) +_XMLNODE.fields_by_name['text'].containing_oneof = _XMLNODE.oneofs_by_name['node'] +_XMLELEMENT.fields_by_name['namespace_declaration'].message_type = _XMLNAMESPACE +_XMLELEMENT.fields_by_name['attribute'].message_type = _XMLATTRIBUTE +_XMLELEMENT.fields_by_name['child'].message_type = _XMLNODE +_XMLNAMESPACE.fields_by_name['source'].message_type = _SOURCEPOSITION +_XMLATTRIBUTE.fields_by_name['source'].message_type = _SOURCEPOSITION +_XMLATTRIBUTE.fields_by_name['compiled_item'].message_type = _ITEM +DESCRIPTOR.message_types_by_name['StringPool'] = _STRINGPOOL +DESCRIPTOR.message_types_by_name['SourcePosition'] = _SOURCEPOSITION +DESCRIPTOR.message_types_by_name['Source'] = _SOURCE +DESCRIPTOR.message_types_by_name['ToolFingerprint'] = _TOOLFINGERPRINT +DESCRIPTOR.message_types_by_name['ResourceTable'] = _RESOURCETABLE +DESCRIPTOR.message_types_by_name['PackageId'] = _PACKAGEID +DESCRIPTOR.message_types_by_name['Package'] = _PACKAGE +DESCRIPTOR.message_types_by_name['TypeId'] = _TYPEID +DESCRIPTOR.message_types_by_name['Type'] = _TYPE +DESCRIPTOR.message_types_by_name['Visibility'] = _VISIBILITY +DESCRIPTOR.message_types_by_name['AllowNew'] = _ALLOWNEW +DESCRIPTOR.message_types_by_name['Overlayable'] = _OVERLAYABLE +DESCRIPTOR.message_types_by_name['OverlayableItem'] = _OVERLAYABLEITEM +DESCRIPTOR.message_types_by_name['EntryId'] = _ENTRYID +DESCRIPTOR.message_types_by_name['Entry'] = _ENTRY +DESCRIPTOR.message_types_by_name['ConfigValue'] = _CONFIGVALUE +DESCRIPTOR.message_types_by_name['Value'] = _VALUE +DESCRIPTOR.message_types_by_name['Item'] = _ITEM +DESCRIPTOR.message_types_by_name['CompoundValue'] = _COMPOUNDVALUE +DESCRIPTOR.message_types_by_name['Boolean'] = _BOOLEAN +DESCRIPTOR.message_types_by_name['Reference'] = _REFERENCE +DESCRIPTOR.message_types_by_name['Id'] = _ID +DESCRIPTOR.message_types_by_name['String'] = _STRING +DESCRIPTOR.message_types_by_name['RawString'] = _RAWSTRING +DESCRIPTOR.message_types_by_name['StyledString'] = _STYLEDSTRING +DESCRIPTOR.message_types_by_name['FileReference'] = _FILEREFERENCE +DESCRIPTOR.message_types_by_name['Primitive'] = _PRIMITIVE +DESCRIPTOR.message_types_by_name['Attribute'] = _ATTRIBUTE +DESCRIPTOR.message_types_by_name['Style'] = _STYLE +DESCRIPTOR.message_types_by_name['Styleable'] = _STYLEABLE +DESCRIPTOR.message_types_by_name['Array'] = _ARRAY +DESCRIPTOR.message_types_by_name['Plural'] = _PLURAL +DESCRIPTOR.message_types_by_name['XmlNode'] = _XMLNODE +DESCRIPTOR.message_types_by_name['XmlElement'] = _XMLELEMENT +DESCRIPTOR.message_types_by_name['XmlNamespace'] = _XMLNAMESPACE +DESCRIPTOR.message_types_by_name['XmlAttribute'] = _XMLATTRIBUTE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +StringPool = _reflection.GeneratedProtocolMessageType('StringPool', (_message.Message,), { + 'DESCRIPTOR' : _STRINGPOOL, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.StringPool) + }) +_sym_db.RegisterMessage(StringPool) + +SourcePosition = _reflection.GeneratedProtocolMessageType('SourcePosition', (_message.Message,), { + 'DESCRIPTOR' : _SOURCEPOSITION, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.SourcePosition) + }) +_sym_db.RegisterMessage(SourcePosition) + +Source = _reflection.GeneratedProtocolMessageType('Source', (_message.Message,), { + 'DESCRIPTOR' : _SOURCE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Source) + }) +_sym_db.RegisterMessage(Source) + +ToolFingerprint = _reflection.GeneratedProtocolMessageType('ToolFingerprint', (_message.Message,), { + 'DESCRIPTOR' : _TOOLFINGERPRINT, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.ToolFingerprint) + }) +_sym_db.RegisterMessage(ToolFingerprint) + +ResourceTable = _reflection.GeneratedProtocolMessageType('ResourceTable', (_message.Message,), { + 'DESCRIPTOR' : _RESOURCETABLE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.ResourceTable) + }) +_sym_db.RegisterMessage(ResourceTable) + +PackageId = _reflection.GeneratedProtocolMessageType('PackageId', (_message.Message,), { + 'DESCRIPTOR' : _PACKAGEID, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.PackageId) + }) +_sym_db.RegisterMessage(PackageId) + +Package = _reflection.GeneratedProtocolMessageType('Package', (_message.Message,), { + 'DESCRIPTOR' : _PACKAGE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Package) + }) +_sym_db.RegisterMessage(Package) + +TypeId = _reflection.GeneratedProtocolMessageType('TypeId', (_message.Message,), { + 'DESCRIPTOR' : _TYPEID, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.TypeId) + }) +_sym_db.RegisterMessage(TypeId) + +Type = _reflection.GeneratedProtocolMessageType('Type', (_message.Message,), { + 'DESCRIPTOR' : _TYPE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Type) + }) +_sym_db.RegisterMessage(Type) + +Visibility = _reflection.GeneratedProtocolMessageType('Visibility', (_message.Message,), { + 'DESCRIPTOR' : _VISIBILITY, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Visibility) + }) +_sym_db.RegisterMessage(Visibility) + +AllowNew = _reflection.GeneratedProtocolMessageType('AllowNew', (_message.Message,), { + 'DESCRIPTOR' : _ALLOWNEW, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.AllowNew) + }) +_sym_db.RegisterMessage(AllowNew) + +Overlayable = _reflection.GeneratedProtocolMessageType('Overlayable', (_message.Message,), { + 'DESCRIPTOR' : _OVERLAYABLE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Overlayable) + }) +_sym_db.RegisterMessage(Overlayable) + +OverlayableItem = _reflection.GeneratedProtocolMessageType('OverlayableItem', (_message.Message,), { + 'DESCRIPTOR' : _OVERLAYABLEITEM, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.OverlayableItem) + }) +_sym_db.RegisterMessage(OverlayableItem) + +EntryId = _reflection.GeneratedProtocolMessageType('EntryId', (_message.Message,), { + 'DESCRIPTOR' : _ENTRYID, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.EntryId) + }) +_sym_db.RegisterMessage(EntryId) + +Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), { + 'DESCRIPTOR' : _ENTRY, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Entry) + }) +_sym_db.RegisterMessage(Entry) + +ConfigValue = _reflection.GeneratedProtocolMessageType('ConfigValue', (_message.Message,), { + 'DESCRIPTOR' : _CONFIGVALUE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.ConfigValue) + }) +_sym_db.RegisterMessage(ConfigValue) + +Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), { + 'DESCRIPTOR' : _VALUE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Value) + }) +_sym_db.RegisterMessage(Value) + +Item = _reflection.GeneratedProtocolMessageType('Item', (_message.Message,), { + 'DESCRIPTOR' : _ITEM, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Item) + }) +_sym_db.RegisterMessage(Item) + +CompoundValue = _reflection.GeneratedProtocolMessageType('CompoundValue', (_message.Message,), { + 'DESCRIPTOR' : _COMPOUNDVALUE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.CompoundValue) + }) +_sym_db.RegisterMessage(CompoundValue) + +Boolean = _reflection.GeneratedProtocolMessageType('Boolean', (_message.Message,), { + 'DESCRIPTOR' : _BOOLEAN, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Boolean) + }) +_sym_db.RegisterMessage(Boolean) + +Reference = _reflection.GeneratedProtocolMessageType('Reference', (_message.Message,), { + 'DESCRIPTOR' : _REFERENCE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Reference) + }) +_sym_db.RegisterMessage(Reference) + +Id = _reflection.GeneratedProtocolMessageType('Id', (_message.Message,), { + 'DESCRIPTOR' : _ID, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Id) + }) +_sym_db.RegisterMessage(Id) + +String = _reflection.GeneratedProtocolMessageType('String', (_message.Message,), { + 'DESCRIPTOR' : _STRING, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.String) + }) +_sym_db.RegisterMessage(String) + +RawString = _reflection.GeneratedProtocolMessageType('RawString', (_message.Message,), { + 'DESCRIPTOR' : _RAWSTRING, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.RawString) + }) +_sym_db.RegisterMessage(RawString) + +StyledString = _reflection.GeneratedProtocolMessageType('StyledString', (_message.Message,), { + + 'Span' : _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { + 'DESCRIPTOR' : _STYLEDSTRING_SPAN, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.StyledString.Span) + }) + , + 'DESCRIPTOR' : _STYLEDSTRING, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.StyledString) + }) +_sym_db.RegisterMessage(StyledString) +_sym_db.RegisterMessage(StyledString.Span) + +FileReference = _reflection.GeneratedProtocolMessageType('FileReference', (_message.Message,), { + 'DESCRIPTOR' : _FILEREFERENCE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.FileReference) + }) +_sym_db.RegisterMessage(FileReference) + +Primitive = _reflection.GeneratedProtocolMessageType('Primitive', (_message.Message,), { + + 'NullType' : _reflection.GeneratedProtocolMessageType('NullType', (_message.Message,), { + 'DESCRIPTOR' : _PRIMITIVE_NULLTYPE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.NullType) + }) + , + + 'EmptyType' : _reflection.GeneratedProtocolMessageType('EmptyType', (_message.Message,), { + 'DESCRIPTOR' : _PRIMITIVE_EMPTYTYPE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Primitive.EmptyType) + }) + , + 'DESCRIPTOR' : _PRIMITIVE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Primitive) + }) +_sym_db.RegisterMessage(Primitive) +_sym_db.RegisterMessage(Primitive.NullType) +_sym_db.RegisterMessage(Primitive.EmptyType) + +Attribute = _reflection.GeneratedProtocolMessageType('Attribute', (_message.Message,), { + + 'Symbol' : _reflection.GeneratedProtocolMessageType('Symbol', (_message.Message,), { + 'DESCRIPTOR' : _ATTRIBUTE_SYMBOL, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Attribute.Symbol) + }) + , + 'DESCRIPTOR' : _ATTRIBUTE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Attribute) + }) +_sym_db.RegisterMessage(Attribute) +_sym_db.RegisterMessage(Attribute.Symbol) + +Style = _reflection.GeneratedProtocolMessageType('Style', (_message.Message,), { + + 'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), { + 'DESCRIPTOR' : _STYLE_ENTRY, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Style.Entry) + }) + , + 'DESCRIPTOR' : _STYLE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Style) + }) +_sym_db.RegisterMessage(Style) +_sym_db.RegisterMessage(Style.Entry) + +Styleable = _reflection.GeneratedProtocolMessageType('Styleable', (_message.Message,), { + + 'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), { + 'DESCRIPTOR' : _STYLEABLE_ENTRY, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Styleable.Entry) + }) + , + 'DESCRIPTOR' : _STYLEABLE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Styleable) + }) +_sym_db.RegisterMessage(Styleable) +_sym_db.RegisterMessage(Styleable.Entry) + +Array = _reflection.GeneratedProtocolMessageType('Array', (_message.Message,), { + + 'Element' : _reflection.GeneratedProtocolMessageType('Element', (_message.Message,), { + 'DESCRIPTOR' : _ARRAY_ELEMENT, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Array.Element) + }) + , + 'DESCRIPTOR' : _ARRAY, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Array) + }) +_sym_db.RegisterMessage(Array) +_sym_db.RegisterMessage(Array.Element) + +Plural = _reflection.GeneratedProtocolMessageType('Plural', (_message.Message,), { + + 'Entry' : _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), { + 'DESCRIPTOR' : _PLURAL_ENTRY, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Plural.Entry) + }) + , + 'DESCRIPTOR' : _PLURAL, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.Plural) + }) +_sym_db.RegisterMessage(Plural) +_sym_db.RegisterMessage(Plural.Entry) + +XmlNode = _reflection.GeneratedProtocolMessageType('XmlNode', (_message.Message,), { + 'DESCRIPTOR' : _XMLNODE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.XmlNode) + }) +_sym_db.RegisterMessage(XmlNode) + +XmlElement = _reflection.GeneratedProtocolMessageType('XmlElement', (_message.Message,), { + 'DESCRIPTOR' : _XMLELEMENT, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.XmlElement) + }) +_sym_db.RegisterMessage(XmlElement) + +XmlNamespace = _reflection.GeneratedProtocolMessageType('XmlNamespace', (_message.Message,), { + 'DESCRIPTOR' : _XMLNAMESPACE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.XmlNamespace) + }) +_sym_db.RegisterMessage(XmlNamespace) + +XmlAttribute = _reflection.GeneratedProtocolMessageType('XmlAttribute', (_message.Message,), { + 'DESCRIPTOR' : _XMLATTRIBUTE, + '__module__' : 'frameworks.base.tools.aapt2.Resources_pb2' + # @@protoc_insertion_point(class_scope:aapt.pb.XmlAttribute) + }) +_sym_db.RegisterMessage(XmlAttribute) + + +DESCRIPTOR._options = None +_PRIMITIVE.fields_by_name['dimension_value_deprecated']._options = None +_PRIMITIVE.fields_by_name['fraction_value_deprecated']._options = None +# @@protoc_insertion_point(module_scope) diff --git a/android/gyp/proto/__init__.py b/android/gyp/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/android/gyp/system_image_apks.py b/android/gyp/system_image_apks.py new file mode 100755 index 000000000000..0b6804b9af11 --- /dev/null +++ b/android/gyp/system_image_apks.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generates APKs for use on system images.""" + +import argparse +import os +import pathlib +import tempfile +import shutil +import sys +import zipfile + +_DIR_SOURCE_ROOT = str(pathlib.Path(__file__).parents[2]) +sys.path.append(os.path.join(_DIR_SOURCE_ROOT, 'build', 'android', 'gyp')) +from util import build_utils + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', required=True, help='Input path') + parser.add_argument('--output', required=True, help='Output path') + parser.add_argument('--bundle-wrapper', help='APK operations script path') + parser.add_argument('--fuse-apk', + help='Create single .apk rather than using apk splits', + action='store_true') + args = parser.parse_args() + + if not args.bundle_wrapper: + shutil.copyfile(args.input, args.output) + return + + with tempfile.NamedTemporaryFile(suffix='.apks') as tmp_file: + cmd = [ + args.bundle_wrapper, 'build-bundle-apks', '--output-apks', tmp_file.name + ] + cmd += ['--build-mode', 'system' if args.fuse_apk else 'system_apks'] + + # Creates a .apks zip file that contains the system image APK(s). + build_utils.CheckOutput(cmd) + + if args.fuse_apk: + with zipfile.ZipFile(tmp_file.name) as z: + pathlib.Path(args.output).write_bytes(z.read('system/system.apk')) + return + + # Rename .apk files and remove toc.pb to make it clear that system apks + # should not be installed via bundletool. + with zipfile.ZipFile(tmp_file.name) as z_input, \ + zipfile.ZipFile(args.output, 'w') as z_output: + for info in z_input.infolist(): + if info.filename.endswith('.apk'): + data = z_input.read(info) + info.filename = (info.filename.replace('splits/', + '').replace('-master', '')) + z_output.writestr(info, data) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/gyp/system_image_apks.pydeps b/android/gyp/system_image_apks.pydeps new file mode 100644 index 000000000000..35f1dc9fe6e4 --- /dev/null +++ b/android/gyp/system_image_apks.pydeps @@ -0,0 +1,6 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/system_image_apks.pydeps build/android/gyp/system_image_apks.py +../../gn_helpers.py +system_image_apks.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/test/BUILD.gn b/android/gyp/test/BUILD.gn new file mode 100644 index 000000000000..301a220d032a --- /dev/null +++ b/android/gyp/test/BUILD.gn @@ -0,0 +1,11 @@ +import("//build/config/android/rules.gni") + +java_library("hello_world_java") { + sources = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ] +} + +java_binary("hello_world") { + deps = [ ":hello_world_java" ] + sources = [ "java/org/chromium/helloworld/HelloWorldMain.java" ] + main_class = "org.chromium.helloworld.HelloWorldMain" +} diff --git a/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java new file mode 100644 index 000000000000..2c4d9a274712 --- /dev/null +++ b/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java @@ -0,0 +1,15 @@ +// Copyright 2014 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldMain { + public static void main(String[] args) { + if (args.length > 0) { + System.exit(Integer.parseInt(args[0])); + } + HelloWorldPrinter.print(); + } +} + diff --git a/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java new file mode 100644 index 000000000000..2762b4f9e036 --- /dev/null +++ b/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java @@ -0,0 +1,12 @@ +// Copyright 2014 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldPrinter { + public static void print() { + System.out.println("Hello, world!"); + } +} + diff --git a/android/gyp/trace_event_bytecode_rewriter.py b/android/gyp/trace_event_bytecode_rewriter.py new file mode 100755 index 000000000000..3e0e696f511b --- /dev/null +++ b/android/gyp/trace_event_bytecode_rewriter.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Wrapper script around TraceEventAdder script.""" + +import argparse +import sys +import os + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def main(argv): + argv = build_utils.ExpandFileArgs(argv[1:]) + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--script', + required=True, + help='Path to the java binary wrapper script.') + parser.add_argument('--stamp', help='Path to stamp to mark when finished.') + parser.add_argument('--classpath', action='append', nargs='+') + parser.add_argument('--input-jars', action='append', nargs='+') + parser.add_argument('--output-jars', action='append', nargs='+') + args = parser.parse_args(argv) + + args.classpath = action_helpers.parse_gn_list(args.classpath) + args.input_jars = action_helpers.parse_gn_list(args.input_jars) + args.output_jars = action_helpers.parse_gn_list(args.output_jars) + + for output_jar in args.output_jars: + jar_dir = os.path.dirname(output_jar) + if not os.path.exists(jar_dir): + os.makedirs(jar_dir) + + all_input_jars = set(args.classpath + args.input_jars) + cmd = [ + args.script, '--classpath', ':'.join(sorted(all_input_jars)), + ':'.join(args.input_jars), ':'.join(args.output_jars) + ] + build_utils.CheckOutput(cmd, print_stdout=True) + + build_utils.Touch(args.stamp) + + action_helpers.write_depfile(args.depfile, args.stamp, inputs=all_input_jars) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/android/gyp/trace_event_bytecode_rewriter.pydeps b/android/gyp/trace_event_bytecode_rewriter.pydeps new file mode 100644 index 000000000000..e03fc0c233a8 --- /dev/null +++ b/android/gyp/trace_event_bytecode_rewriter.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/trace_event_bytecode_rewriter.pydeps build/android/gyp/trace_event_bytecode_rewriter.py +../../action_helpers.py +../../gn_helpers.py +trace_event_bytecode_rewriter.py +util/__init__.py +util/build_utils.py diff --git a/android/gyp/turbine.py b/android/gyp/turbine.py new file mode 100755 index 000000000000..2de92f4704ae --- /dev/null +++ b/android/gyp/turbine.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Wraps the turbine jar and expands @FileArgs.""" + +import argparse +import functools +import logging +import sys +import time +import zipfile + +import compile_java +import javac_output_processor +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def ProcessJavacOutput(output, target_name): + output_processor = javac_output_processor.JavacOutputProcessor(target_name) + lines = output_processor.Process(output.split('\n')) + return '\n'.join(lines) + + +def main(argv): + build_utils.InitLogging('TURBINE_DEBUG') + argv = build_utils.ExpandFileArgs(argv[1:]) + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--target-name', help='Fully qualified GN target name.') + parser.add_argument( + '--turbine-jar-path', required=True, help='Path to the turbine jar file.') + parser.add_argument( + '--java-srcjars', + action='append', + default=[], + help='List of srcjars to include in compilation.') + parser.add_argument('--classpath', action='append', help='Classpath to use.') + parser.add_argument( + '--processors', + action='append', + help='GN list of annotation processor main classes.') + parser.add_argument( + '--processorpath', + action='append', + help='GN list of jars that comprise the classpath used for Annotation ' + 'Processors.') + parser.add_argument( + '--processor-args', + action='append', + help='key=value arguments for the annotation processors.') + parser.add_argument('--jar-path', help='Jar output path.', required=True) + parser.add_argument( + '--generated-jar-path', + required=True, + help='Output path for generated source files.') + parser.add_argument('--warnings-as-errors', + action='store_true', + help='Treat all warnings as errors.') + parser.add_argument('--kotlin-jar-path', + help='Kotlin jar to be merged into the output jar.') + options, unknown_args = parser.parse_known_args(argv) + + options.classpath = action_helpers.parse_gn_list(options.classpath) + options.processorpath = action_helpers.parse_gn_list(options.processorpath) + options.processors = action_helpers.parse_gn_list(options.processors) + options.java_srcjars = action_helpers.parse_gn_list(options.java_srcjars) + + files = [] + for arg in unknown_args: + # Interpret a path prefixed with @ as a file containing a list of sources. + if arg.startswith('@'): + files.extend(build_utils.ReadSourcesList(arg[1:])) + + # The target's .sources file contains both Java and Kotlin files. We use + # compile_kt.py to compile the Kotlin files to .class and header jars. + # Turbine is run only on .java files. + java_files = [f for f in files if f.endswith('.java')] + + cmd = build_utils.JavaCmd() + [ + '-classpath', options.turbine_jar_path, 'com.google.turbine.main.Main' + ] + javac_cmd = [ + # We currently target JDK 11 everywhere. + '--release', + '11', + ] + + # Turbine reads lists from command line args by consuming args until one + # starts with double dash (--). Thus command line args should be grouped + # together and passed in together. + if options.processors: + cmd += ['--processors'] + cmd += options.processors + + if options.processorpath: + cmd += ['--processorpath'] + cmd += options.processorpath + + if options.processor_args: + for arg in options.processor_args: + javac_cmd.extend(['-A%s' % arg]) + + if options.classpath: + cmd += ['--classpath'] + cmd += options.classpath + + if options.java_srcjars: + cmd += ['--source_jars'] + cmd += options.java_srcjars + + if java_files: + # Use jar_path to ensure paths are relative (needed for goma). + files_rsp_path = options.jar_path + '.java_files_list.txt' + with open(files_rsp_path, 'w') as f: + f.write(' '.join(java_files)) + # Pass source paths as response files to avoid extremely long command + # lines that are tedius to debug. + cmd += ['--sources'] + cmd += ['@' + files_rsp_path] + + cmd += ['--javacopts'] + cmd += javac_cmd + cmd += ['--'] # Terminate javacopts + + # Use AtomicOutput so that output timestamps are not updated when outputs + # are not changed. + with action_helpers.atomic_output(options.jar_path) as output_jar, \ + action_helpers.atomic_output(options.generated_jar_path) as gensrc_jar: + cmd += ['--output', output_jar.name, '--gensrc_output', gensrc_jar.name] + process_javac_output_partial = functools.partial( + ProcessJavacOutput, target_name=options.target_name) + + logging.debug('Command: %s', cmd) + start = time.time() + try: + build_utils.CheckOutput(cmd, + print_stdout=True, + stdout_filter=process_javac_output_partial, + stderr_filter=process_javac_output_partial, + fail_on_output=options.warnings_as_errors) + except build_utils.CalledProcessError as e: + # Do not output stacktrace as it takes up space on gerrit UI, forcing + # you to click though to find the actual compilation error. It's never + # interesting to see the Python stacktrace for a Java compilation error. + sys.stderr.write(e.output) + sys.exit(1) + end = time.time() - start + logging.info('Header compilation took %ss', end) + if options.kotlin_jar_path: + with zipfile.ZipFile(output_jar.name, 'a') as out_zip: + path_transform = lambda p: p if p.endswith('.class') else None + zip_helpers.merge_zips(out_zip, [options.kotlin_jar_path], + path_transform=path_transform) + + if options.depfile: + # GN already knows of the java files, so avoid listing individual java files + # in the depfile. + depfile_deps = (options.classpath + options.processorpath + + options.java_srcjars) + action_helpers.write_depfile(options.depfile, options.jar_path, + depfile_deps) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/android/gyp/turbine.pydeps b/android/gyp/turbine.pydeps new file mode 100644 index 000000000000..3d20f2ef4cec --- /dev/null +++ b/android/gyp/turbine.pydeps @@ -0,0 +1,33 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/turbine.pydeps build/android/gyp/turbine.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../../tools/android/modularization/convenience/lookup_dep.py +../../action_helpers.py +../../gn_helpers.py +../../print_python_deps.py +../../zip_helpers.py +../list_java_targets.py +../pylib/__init__.py +../pylib/constants/__init__.py +compile_java.py +javac_output_processor.py +turbine.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py +util/md5_check.py +util/server_utils.py diff --git a/android/gyp/unused_resources.py b/android/gyp/unused_resources.py new file mode 100755 index 000000000000..d7578ce709f2 --- /dev/null +++ b/android/gyp/unused_resources.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +# encoding: utf-8 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import sys + +from util import build_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _FilterUnusedResources(r_text_in, r_text_out, unused_resources_config): + removed_resources = set() + with open(unused_resources_config, encoding='utf-8') as output_config: + for line in output_config: + # example line: attr/line_height#remove + resource = line.split('#')[0] + resource_type, resource_name = resource.split('/') + removed_resources.add((resource_type, resource_name)) + kept_lines = [] + with open(r_text_in, encoding='utf-8') as infile: + for line in infile: + # example line: int attr line_height 0x7f0014ee + resource_type, resource_name = line.split(' ')[1:3] + if (resource_type, resource_name) not in removed_resources: + kept_lines.append(line) + + with open(r_text_out, 'w', encoding='utf-8') as out_file: + out_file.writelines(kept_lines) + + +def main(args): + parser = argparse.ArgumentParser() + + action_helpers.add_depfile_arg(parser) + parser.add_argument('--script', + required=True, + help='Path to the unused resources detector script.') + parser.add_argument( + '--dependencies-res-zips', + required=True, + action='append', + help='Resources zip archives to investigate for unused resources.') + parser.add_argument('--dexes', + action='append', + required=True, + help='Path to dex file, or zip with dex files.') + parser.add_argument( + '--proguard-mapping', + help='Path to proguard mapping file for the optimized dex.') + parser.add_argument('--r-text-in', required=True, help='Path to input R.txt') + parser.add_argument( + '--r-text-out', + help='Path to output R.txt with unused resources removed.') + parser.add_argument('--android-manifests', + action='append', + required=True, + help='Path to AndroidManifest') + parser.add_argument('--output-config', + required=True, + help='Path to output the aapt2 config to.') + args = build_utils.ExpandFileArgs(args) + options = parser.parse_args(args) + options.dependencies_res_zips = (action_helpers.parse_gn_list( + options.dependencies_res_zips)) + + # in case of no resources, short circuit early. + if not options.dependencies_res_zips: + build_utils.Touch(options.output_config) + return + + with build_utils.TempDir() as temp_dir: + dep_subdirs = [] + for dependency_res_zip in options.dependencies_res_zips: + dep_subdirs += resource_utils.ExtractDeps([dependency_res_zip], temp_dir) + + cmd = [ + options.script, + '--rtxts', + options.r_text_in, + '--manifests', + ':'.join(options.android_manifests), + '--resourceDirs', + ':'.join(dep_subdirs), + '--dexes', + ':'.join(options.dexes), + '--outputConfig', + options.output_config, + ] + if options.proguard_mapping: + cmd += [ + '--mapping', + options.proguard_mapping, + ] + build_utils.CheckOutput(cmd) + + if options.r_text_out: + _FilterUnusedResources(options.r_text_in, options.r_text_out, + options.output_config) + + if options.depfile: + depfile_deps = (options.dependencies_res_zips + options.android_manifests + + options.dexes) + [options.r_text_in] + if options.proguard_mapping: + depfile_deps.append(options.proguard_mapping) + action_helpers.write_depfile(options.depfile, options.output_config, + depfile_deps) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/unused_resources.pydeps b/android/gyp/unused_resources.pydeps new file mode 100644 index 000000000000..b4da89a95eb0 --- /dev/null +++ b/android/gyp/unused_resources.pydeps @@ -0,0 +1,30 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/unused_resources.pydeps build/android/gyp/unused_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +unused_resources.py +util/__init__.py +util/build_utils.py +util/resource_utils.py diff --git a/android/gyp/util/__init__.py b/android/gyp/util/__init__.py new file mode 100644 index 000000000000..5ffa28413724 --- /dev/null +++ b/android/gyp/util/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/gyp/util/build_utils.py b/android/gyp/util/build_utils.py new file mode 100644 index 000000000000..f88518210b5b --- /dev/null +++ b/android/gyp/util/build_utils.py @@ -0,0 +1,488 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Contains common helpers for GN action()s.""" + +import atexit +import collections +import contextlib +import filecmp +import fnmatch +import json +import logging +import os +import pipes +import re +import shlex +import shutil +import stat +import subprocess +import sys +import tempfile +import textwrap +import time +import zipfile + +sys.path.append(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir)) +import gn_helpers + +# Use relative paths to improved hermetic property of build scripts. +DIR_SOURCE_ROOT = os.path.relpath( + os.environ.get( + 'CHECKOUT_SOURCE_ROOT', + os.path.join( + os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, + os.pardir))) +JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current') +JAVAC_PATH = os.path.join(JAVA_HOME, 'bin', 'javac') +JAVAP_PATH = os.path.join(JAVA_HOME, 'bin', 'javap') +KOTLIN_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'kotlinc', 'current') +KOTLINC_PATH = os.path.join(KOTLIN_HOME, 'bin', 'kotlinc') +# Please avoid using this. Our JAVA_HOME is using a newer and actively patched +# JDK. +JAVA_11_HOME_DEPRECATED = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk11', + 'current') + +def JavaCmd(xmx='1G'): + ret = [os.path.join(JAVA_HOME, 'bin', 'java')] + # Limit heap to avoid Java not GC'ing when it should, and causing + # bots to OOM when many java commands are runnig at the same time + # https://crbug.com/1098333 + ret += ['-Xmx' + xmx] + return ret + + +@contextlib.contextmanager +def TempDir(**kwargs): + dirname = tempfile.mkdtemp(**kwargs) + try: + yield dirname + finally: + shutil.rmtree(dirname) + + +def MakeDirectory(dir_path): + try: + os.makedirs(dir_path) + except OSError: + pass + + +def DeleteDirectory(dir_path): + if os.path.exists(dir_path): + shutil.rmtree(dir_path) + + +def Touch(path, fail_if_missing=False): + if fail_if_missing and not os.path.exists(path): + raise Exception(path + ' doesn\'t exist.') + + MakeDirectory(os.path.dirname(path)) + with open(path, 'a'): + os.utime(path, None) + + +def FindInDirectory(directory, filename_filter='*'): + files = [] + for root, _dirnames, filenames in os.walk(directory): + matched_files = fnmatch.filter(filenames, filename_filter) + files.extend((os.path.join(root, f) for f in matched_files)) + return files + + +def CheckOptions(options, parser, required=None): + if not required: + return + for option_name in required: + if getattr(options, option_name) is None: + parser.error('--%s is required' % option_name.replace('_', '-')) + + +def WriteJson(obj, path, only_if_changed=False): + old_dump = None + if os.path.exists(path): + with open(path, 'r') as oldfile: + old_dump = oldfile.read() + + new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': ')) + + if not only_if_changed or old_dump != new_dump: + with open(path, 'w') as outfile: + outfile.write(new_dump) + + +@contextlib.contextmanager +def _AtomicOutput(path, only_if_changed=True, mode='w+b'): + # Create in same directory to ensure same filesystem when moving. + dirname = os.path.dirname(path) + if not os.path.exists(dirname): + MakeDirectory(dirname) + with tempfile.NamedTemporaryFile( + mode, suffix=os.path.basename(path), dir=dirname, delete=False) as f: + try: + yield f + + # file should be closed before comparison/move. + f.close() + if not (only_if_changed and os.path.exists(path) and + filecmp.cmp(f.name, path)): + shutil.move(f.name, path) + finally: + if os.path.exists(f.name): + os.unlink(f.name) + + +class CalledProcessError(Exception): + """This exception is raised when the process run by CheckOutput + exits with a non-zero exit code.""" + + def __init__(self, cwd, args, output): + super().__init__() + self.cwd = cwd + self.args = args + self.output = output + + def __str__(self): + # A user should be able to simply copy and paste the command that failed + # into their shell (unless it is more than 200 chars). + # User can set PRINT_FULL_COMMAND=1 to always print the full command. + print_full = os.environ.get('PRINT_FULL_COMMAND', '0') != '0' + full_cmd = shlex.join(self.args) + short_cmd = textwrap.shorten(full_cmd, width=200) + printed_cmd = full_cmd if print_full else short_cmd + copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd), + printed_cmd) + return 'Command failed: {}\n{}'.format(copyable_command, self.output) + + +def FilterLines(output, filter_string): + """Output filter from build_utils.CheckOutput. + + Args: + output: Executable output as from build_utils.CheckOutput. + filter_string: An RE string that will filter (remove) matching + lines from |output|. + + Returns: + The filtered output, as a single string. + """ + re_filter = re.compile(filter_string) + return '\n'.join( + line for line in output.split('\n') if not re_filter.search(line)) + + +def FilterReflectiveAccessJavaWarnings(output): + """Filters out warnings about illegal reflective access operation. + + These warnings were introduced in Java 9, and generally mean that dependencies + need to be updated. + """ + # WARNING: An illegal reflective access operation has occurred + # WARNING: Illegal reflective access by ... + # WARNING: Please consider reporting this to the maintainers of ... + # WARNING: Use --illegal-access=warn to enable warnings of further ... + # WARNING: All illegal access operations will be denied in a future release + return FilterLines( + output, r'WARNING: (' + 'An illegal reflective|' + 'Illegal reflective access|' + 'Please consider reporting this to|' + 'Use --illegal-access=warn|' + 'All illegal access operations)') + + +# This can be used in most cases like subprocess.check_output(). The output, +# particularly when the command fails, better highlights the command's failure. +# If the command fails, raises a build_utils.CalledProcessError. +def CheckOutput(args, + cwd=None, + env=None, + print_stdout=False, + print_stderr=True, + stdout_filter=None, + stderr_filter=None, + fail_on_output=True, + fail_func=lambda returncode, stderr: returncode != 0): + if not cwd: + cwd = os.getcwd() + + logging.info('CheckOutput: %s', ' '.join(args)) + child = subprocess.Popen(args, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) + stdout, stderr = child.communicate() + + # For Python3 only: + if isinstance(stdout, bytes) and sys.version_info >= (3, ): + stdout = stdout.decode('utf-8') + stderr = stderr.decode('utf-8') + + if stdout_filter is not None: + stdout = stdout_filter(stdout) + + if stderr_filter is not None: + stderr = stderr_filter(stderr) + + if fail_func and fail_func(child.returncode, stderr): + raise CalledProcessError(cwd, args, stdout + stderr) + + if print_stdout: + sys.stdout.write(stdout) + if print_stderr: + sys.stderr.write(stderr) + + has_stdout = print_stdout and stdout + has_stderr = print_stderr and stderr + if has_stdout or has_stderr: + if has_stdout and has_stderr: + stream_name = 'stdout and stderr' + elif has_stdout: + stream_name = 'stdout' + else: + stream_name = 'stderr' + + if fail_on_output: + MSG = """ +Command failed because it wrote to {}. +You can often set treat_warnings_as_errors=false to not treat output as \ +failure (useful when developing locally). +""" + raise CalledProcessError(cwd, args, MSG.format(stream_name)) + + short_cmd = textwrap.shorten(shlex.join(args), width=200) + sys.stderr.write( + f'\nThe above {stream_name} output was from: {short_cmd}\n') + + return stdout + + +def GetModifiedTime(path): + # For a symlink, the modified time should be the greater of the link's + # modified time and the modified time of the target. + return max(os.lstat(path).st_mtime, os.stat(path).st_mtime) + + +def IsTimeStale(output, inputs): + if not os.path.exists(output): + return True + + output_time = GetModifiedTime(output) + for i in inputs: + if GetModifiedTime(i) > output_time: + return True + return False + + +def _CheckZipPath(name): + if os.path.normpath(name) != name: + raise Exception('Non-canonical zip path: %s' % name) + if os.path.isabs(name): + raise Exception('Absolute zip path: %s' % name) + + +def _IsSymlink(zip_file, name): + zi = zip_file.getinfo(name) + + # The two high-order bytes of ZipInfo.external_attr represent + # UNIX permissions and file type bits. + return stat.S_ISLNK(zi.external_attr >> 16) + + +def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None, + predicate=None): + if path is None: + path = os.getcwd() + elif not os.path.exists(path): + MakeDirectory(path) + + if not zipfile.is_zipfile(zip_path): + raise Exception('Invalid zip file: %s' % zip_path) + + extracted = [] + with zipfile.ZipFile(zip_path) as z: + for name in z.namelist(): + if name.endswith('/'): + MakeDirectory(os.path.join(path, name)) + continue + if pattern is not None: + if not fnmatch.fnmatch(name, pattern): + continue + if predicate and not predicate(name): + continue + _CheckZipPath(name) + if no_clobber: + output_path = os.path.join(path, name) + if os.path.exists(output_path): + raise Exception( + 'Path already exists from zip: %s %s %s' + % (zip_path, name, output_path)) + if _IsSymlink(z, name): + dest = os.path.join(path, name) + MakeDirectory(os.path.dirname(dest)) + os.symlink(z.read(name), dest) + extracted.append(dest) + else: + z.extract(name, path) + extracted.append(os.path.join(path, name)) + + return extracted + + +def MatchesGlob(path, filters): + """Returns whether the given path matches any of the given glob patterns.""" + return filters and any(fnmatch.fnmatch(path, f) for f in filters) + + +def MergeZips(output, input_zips, path_transform=None, compress=None): + """Combines all files from |input_zips| into |output|. + + Args: + output: Path, fileobj, or ZipFile instance to add files to. + input_zips: Iterable of paths to zip files to merge. + path_transform: Called for each entry path. Returns a new path, or None to + skip the file. + compress: Overrides compression setting from origin zip entries. + """ + path_transform = path_transform or (lambda p: p) + + out_zip = output + if not isinstance(output, zipfile.ZipFile): + out_zip = zipfile.ZipFile(output, 'w') + + # Include paths in the existing zip here to avoid adding duplicate files. + added_names = set(out_zip.namelist()) + + try: + for in_file in input_zips: + with zipfile.ZipFile(in_file, 'r') as in_zip: + for info in in_zip.infolist(): + # Ignore directories. + if info.filename[-1] == '/': + continue + dst_name = path_transform(info.filename) + if not dst_name: + continue + already_added = dst_name in added_names + if not already_added: + if compress is not None: + compress_entry = compress + else: + compress_entry = info.compress_type != zipfile.ZIP_STORED + AddToZipHermetic( + out_zip, + dst_name, + data=in_zip.read(info), + compress=compress_entry) + added_names.add(dst_name) + finally: + if output is not out_zip: + out_zip.close() + + +def GetSortedTransitiveDependencies(top, deps_func): + """Gets the list of all transitive dependencies in sorted order. + + There should be no cycles in the dependency graph (crashes if cycles exist). + + Args: + top: A list of the top level nodes + deps_func: A function that takes a node and returns a list of its direct + dependencies. + Returns: + A list of all transitive dependencies of nodes in top, in order (a node will + appear in the list at a higher index than all of its dependencies). + """ + # Find all deps depth-first, maintaining original order in the case of ties. + deps_map = collections.OrderedDict() + def discover(nodes): + for node in nodes: + if node in deps_map: + continue + deps = deps_func(node) + discover(deps) + deps_map[node] = deps + + discover(top) + return list(deps_map) + + +def InitLogging(enabling_env): + logging.basicConfig( + level=logging.DEBUG if os.environ.get(enabling_env) else logging.WARNING, + format='%(levelname).1s %(process)d %(relativeCreated)6d %(message)s') + script_name = os.path.basename(sys.argv[0]) + logging.info('Started (%s)', script_name) + + my_pid = os.getpid() + + def log_exit(): + # Do not log for fork'ed processes. + if os.getpid() == my_pid: + logging.info("Job's done (%s)", script_name) + + atexit.register(log_exit) + + +def ExpandFileArgs(args): + """Replaces file-arg placeholders in args. + + These placeholders have the form: + @FileArg(filename:key1:key2:...:keyn) + + The value of such a placeholder is calculated by reading 'filename' as json. + And then extracting the value at [key1][key2]...[keyn]. If a key has a '[]' + suffix the (intermediate) value will be interpreted as a single item list and + the single item will be returned or used for further traversal. + + Note: This intentionally does not return the list of files that appear in such + placeholders. An action that uses file-args *must* know the paths of those + files prior to the parsing of the arguments (typically by explicitly listing + them in the action's inputs in build files). + """ + new_args = list(args) + file_jsons = dict() + r = re.compile('@FileArg\((.*?)\)') + for i, arg in enumerate(args): + match = r.search(arg) + if not match: + continue + + def get_key(key): + if key.endswith('[]'): + return key[:-2], True + return key, False + + lookup_path = match.group(1).split(':') + file_path, _ = get_key(lookup_path[0]) + if not file_path in file_jsons: + with open(file_path) as f: + file_jsons[file_path] = json.load(f) + + expansion = file_jsons + for k in lookup_path: + k, flatten = get_key(k) + expansion = expansion[k] + if flatten: + if not isinstance(expansion, list) or not len(expansion) == 1: + raise Exception('Expected single item list but got %s' % expansion) + expansion = expansion[0] + + # This should match parse_gn_list. The output is either a GN-formatted list + # or a literal (with no quotes). + if isinstance(expansion, list): + new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) + + arg[match.end():]) + else: + new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():] + + return new_args + + +def ReadSourcesList(sources_list_file_name): + """Reads a GN-written file containing list of file names and returns a list. + + Note that this function should not be used to parse response files. + """ + with open(sources_list_file_name) as f: + return [file_name.strip() for file_name in f] diff --git a/android/gyp/util/build_utils_test.py b/android/gyp/util/build_utils_test.py new file mode 100755 index 000000000000..44528c9215de --- /dev/null +++ b/android/gyp/util/build_utils_test.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import os +import sys +import unittest + +sys.path.insert( + 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from util import build_utils + +_DEPS = collections.OrderedDict() +_DEPS['a'] = [] +_DEPS['b'] = [] +_DEPS['c'] = ['a'] +_DEPS['d'] = ['a'] +_DEPS['e'] = ['f'] +_DEPS['f'] = ['a', 'd'] +_DEPS['g'] = [] +_DEPS['h'] = ['d', 'b', 'f'] +_DEPS['i'] = ['f'] + + +class BuildUtilsTest(unittest.TestCase): + def testGetSortedTransitiveDependencies_all(self): + TOP = _DEPS.keys() + EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i'] + actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get) + self.assertEqual(EXPECTED, actual) + + def testGetSortedTransitiveDependencies_leaves(self): + TOP = ['c', 'e', 'g', 'h', 'i'] + EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i'] + actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get) + self.assertEqual(EXPECTED, actual) + + def testGetSortedTransitiveDependencies_leavesReverse(self): + TOP = ['i', 'h', 'g', 'e', 'c'] + EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c'] + actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get) + self.assertEqual(EXPECTED, actual) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/util/diff_utils.py b/android/gyp/util/diff_utils.py new file mode 100644 index 000000000000..445bbe3d21be --- /dev/null +++ b/android/gyp/util/diff_utils.py @@ -0,0 +1,136 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import difflib +import os +import sys + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _SkipOmitted(line): + """ + Skip lines that are to be intentionally omitted from the expectations file. + + This is required when the file to be compared against expectations contains + a line that changes from build to build because - for instance - it contains + version information. + """ + if line.rstrip().endswith('# OMIT FROM EXPECTATIONS'): + return '# THIS LINE WAS OMITTED\n' + return line + + +def _GenerateDiffWithOnlyAdditons(expected_path, actual_data): + """Generate a diff that only contains additions""" + # Ignore blank lines when creating the diff to cut down on whitespace-only + # lines in the diff. Also remove trailing whitespaces and add the new lines + # manually (ndiff expects new lines but we don't care about trailing + # whitespace). + with open(expected_path) as expected: + expected_lines = [l for l in expected.readlines() if l.strip()] + actual_lines = [ + '{}\n'.format(l.rstrip()) for l in actual_data.splitlines() if l.strip() + ] + + # This helps the diff to not over-anchor on comments or closing braces in + # proguard configs. + def is_junk_line(l): + l = l.strip() + if l.startswith('# File:'): + return False + return l == '' or l == '}' or l.startswith('#') + + diff = difflib.ndiff(expected_lines, actual_lines, linejunk=is_junk_line) + filtered_diff = (l for l in diff if l.startswith('+')) + return ''.join(filtered_diff) + + +def _DiffFileContents(expected_path, actual_data): + """Check file contents for equality and return the diff or None.""" + # Remove all trailing whitespace and add it explicitly in the end. + with open(expected_path) as f_expected: + expected_lines = [l.rstrip() for l in f_expected.readlines()] + actual_lines = [ + _SkipOmitted(line).rstrip() for line in actual_data.splitlines() + ] + + if expected_lines == actual_lines: + return None + + expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT) + + diff = difflib.unified_diff( + expected_lines, + actual_lines, + fromfile=os.path.join('before', expected_path), + tofile=os.path.join('after', expected_path), + n=0, + lineterm='', + ) + + return '\n'.join(diff) + + +def AddCommandLineFlags(parser): + group = parser.add_argument_group('Expectations') + group.add_argument( + '--expected-file', + help='Expected contents for the check. If --expected-file-base is set, ' + 'this is a diff of --actual-file and --expected-file-base.') + group.add_argument( + '--expected-file-base', + help='File to diff against before comparing to --expected-file.') + group.add_argument('--actual-file', + help='Path to write actual file (for reference).') + group.add_argument('--failure-file', + help='Write to this file if expectations fail.') + group.add_argument('--fail-on-expectations', + action="store_true", + help='Fail on expectation mismatches.') + group.add_argument('--only-verify-expectations', + action='store_true', + help='Verify the expectation and exit.') + + +def CheckExpectations(actual_data, options, custom_msg=''): + if options.actual_file: + with action_helpers.atomic_output(options.actual_file) as f: + f.write(actual_data.encode('utf8')) + if options.expected_file_base: + actual_data = _GenerateDiffWithOnlyAdditons(options.expected_file_base, + actual_data) + diff_text = _DiffFileContents(options.expected_file, actual_data) + + if not diff_text: + fail_msg = '' + else: + fail_msg = """ +Expectations need updating: +https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/expectations/README.md + +LogDog tip: Use "Raw log" or "Switch to lite mode" before copying: +https://bugs.chromium.org/p/chromium/issues/detail?id=984616 + +{} + +To update expectations, run: +########### START ########### + patch -p1 <<'END_DIFF' +{} +END_DIFF +############ END ############ +""".format(custom_msg, diff_text) + + sys.stderr.write(fail_msg) + + if fail_msg and options.fail_on_expectations: + # Don't write failure file when failing on expectations or else the target + # will not be re-run on subsequent ninja invocations. + sys.exit(1) + + if options.failure_file: + with open(options.failure_file, 'w') as f: + f.write(fail_msg) diff --git a/android/gyp/util/jar_info_utils.py b/android/gyp/util/jar_info_utils.py new file mode 100644 index 000000000000..3a895c2a81a3 --- /dev/null +++ b/android/gyp/util/jar_info_utils.py @@ -0,0 +1,59 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os + +# Utilities to read and write .jar.info files. +# +# A .jar.info file contains a simple mapping from fully-qualified Java class +# names to the source file that actually defines it. +# +# For APKs, the .jar.info maps the class names to the .jar file that which +# contains its .class definition instead. + + +def ReadAarSourceInfo(info_path): + """Returns the source= path from an .aar's source.info file.""" + # The .info looks like: "source=path/to/.aar\n". + with open(info_path) as f: + return f.read().rstrip().split('=', 1)[1] + + +def ParseJarInfoFile(info_path): + """Parse a given .jar.info file as a dictionary. + + Args: + info_path: input .jar.info file path. + Returns: + A new dictionary mapping fully-qualified Java class names to file paths. + """ + info_data = dict() + if os.path.exists(info_path): + with open(info_path, 'r') as info_file: + for line in info_file: + line = line.strip() + if line: + fully_qualified_name, path = line.split(',', 1) + info_data[fully_qualified_name] = path + return info_data + + +def WriteJarInfoFile(output_obj, info_data, source_file_map=None): + """Generate a .jar.info file from a given dictionary. + + Args: + output_obj: output file object. + info_data: a mapping of fully qualified Java class names to filepaths. + source_file_map: an optional mapping from java source file paths to the + corresponding source .srcjar. This is because info_data may contain the + path of Java source files that where extracted from an .srcjar into a + temporary location. + """ + for fully_qualified_name, path in sorted(info_data.items()): + if source_file_map and path in source_file_map: + path = source_file_map[path] + assert not path.startswith('/tmp'), ( + 'Java file path should not be in temp dir: {}'.format(path)) + output_obj.write(('{},{}\n'.format(fully_qualified_name, + path)).encode('utf8')) diff --git a/android/gyp/util/java_cpp_utils.py b/android/gyp/util/java_cpp_utils.py new file mode 100644 index 000000000000..46f05f66241b --- /dev/null +++ b/android/gyp/util/java_cpp_utils.py @@ -0,0 +1,192 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +import sys + + +def GetScriptName(): + return os.path.basename(os.path.abspath(sys.argv[0])) + + +def GetJavaFilePath(java_package, class_name): + package_path = java_package.replace('.', os.path.sep) + file_name = class_name + '.java' + return os.path.join(package_path, file_name) + + +def KCamelToShouty(s): + """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE. + + kFooBar -> FOO_BAR + FooBar -> FOO_BAR + FooBAR9 -> FOO_BAR9 + FooBARBaz -> FOO_BAR_BAZ + """ + if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s): + return s + # Strip the leading k. + s = re.sub(r'^k', '', s) + # Treat "WebView" like one word. + s = re.sub(r'WebView', r'Webview', s) + # Add _ between title words and anything else. + s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s) + # Add _ between lower -> upper transitions. + s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s) + return s.upper() + + +class JavaString: + def __init__(self, name, value, comments): + self.name = KCamelToShouty(name) + self.value = value + self.comments = '\n'.join(' ' + x for x in comments) + + def Format(self): + return '%s\n public static final String %s = %s;' % ( + self.comments, self.name, self.value) + + +def ParseTemplateFile(lines): + package_re = re.compile(r'^package (.*);') + class_re = re.compile(r'.*class (.*) {') + package = '' + class_name = '' + for line in lines: + package_line = package_re.match(line) + if package_line: + package = package_line.groups()[0] + class_line = class_re.match(line) + if class_line: + class_name = class_line.groups()[0] + break + return package, class_name + + +# TODO(crbug.com/937282): Work will be needed if we want to annotate specific +# constants in the file to be parsed. +class CppConstantParser: + """Parses C++ constants, retaining their comments. + + The Delegate subclass is responsible for matching and extracting the + constant's variable name and value, as well as generating an object to + represent the Java representation of this value. + """ + SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)') + + class Delegate: + def ExtractConstantName(self, line): + """Extracts a constant's name from line or None if not a match.""" + raise NotImplementedError() + + def ExtractValue(self, line): + """Extracts a constant's value from line or None if not a match.""" + raise NotImplementedError() + + def CreateJavaConstant(self, name, value, comments): + """Creates an object representing the Java analog of a C++ constant. + + CppConstantParser will not interact with the object created by this + method. Instead, it will store this value in a list and return a list of + all objects from the Parse() method. In this way, the caller may define + whatever class suits their need. + + Args: + name: the constant's variable name, as extracted by + ExtractConstantName() + value: the constant's value, as extracted by ExtractValue() + comments: the code comments describing this constant + """ + raise NotImplementedError() + + def __init__(self, delegate, lines): + self._delegate = delegate + self._lines = lines + self._in_variable = False + self._in_comment = False + self._package = '' + self._current_comments = [] + self._current_name = '' + self._current_value = '' + self._constants = [] + + def _ExtractVariable(self, line): + match = StringFileParser.STRING_RE.match(line) + return match.group(1) if match else None + + def _ExtractValue(self, line): + match = StringFileParser.VALUE_RE.search(line) + return match.group(1) if match else None + + def _Reset(self): + self._current_comments = [] + self._current_name = '' + self._current_value = '' + self._in_variable = False + self._in_comment = False + + def _AppendConstant(self): + self._constants.append( + self._delegate.CreateJavaConstant(self._current_name, + self._current_value, + self._current_comments)) + self._Reset() + + def _ParseValue(self, line): + current_value = self._delegate.ExtractValue(line) + if current_value is not None: + self._current_value = current_value + self._AppendConstant() + else: + self._Reset() + + def _ParseComment(self, line): + comment_line = CppConstantParser.SINGLE_LINE_COMMENT_RE.match(line) + if comment_line: + self._current_comments.append(comment_line.groups()[0]) + self._in_comment = True + self._in_variable = True + return True + self._in_comment = False + return False + + def _ParseVariable(self, line): + current_name = self._delegate.ExtractConstantName(line) + if current_name is not None: + self._current_name = current_name + current_value = self._delegate.ExtractValue(line) + if current_value is not None: + self._current_value = current_value + self._AppendConstant() + else: + self._in_variable = True + return True + self._in_variable = False + return False + + def _ParseLine(self, line): + if not self._in_variable: + if not self._ParseVariable(line): + self._ParseComment(line) + return + + if self._in_comment: + if self._ParseComment(line): + return + if not self._ParseVariable(line): + self._Reset() + return + + if self._in_variable: + self._ParseValue(line) + + def Parse(self): + """Returns a list of objects representing C++ constants. + + Each object in the list was created by Delegate.CreateJavaValue(). + """ + for line in self._lines: + self._ParseLine(line) + return self._constants diff --git a/android/gyp/util/manifest_utils.py b/android/gyp/util/manifest_utils.py new file mode 100644 index 000000000000..3202058b616b --- /dev/null +++ b/android/gyp/util/manifest_utils.py @@ -0,0 +1,322 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Contains common helpers for working with Android manifests.""" + +import hashlib +import os +import re +import shlex +import sys +import xml.dom.minidom as minidom +from xml.etree import ElementTree + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + +ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android' +TOOLS_NAMESPACE = 'http://schemas.android.com/tools' +DIST_NAMESPACE = 'http://schemas.android.com/apk/distribution' +EMPTY_ANDROID_MANIFEST_PATH = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', 'AndroidManifest.xml')) +# When normalizing for expectation matching, wrap these tags when they are long +# or else they become very hard to read. +_WRAP_CANDIDATES = ( + ' node. + app_node: the node. + """ + _RegisterElementTreeNamespaces() + doc = ElementTree.parse(path) + # ElementTree.find does not work if the required tag is the root. + if doc.getroot().tag == 'manifest': + manifest_node = doc.getroot() + else: + manifest_node = doc.find('manifest') + assert manifest_node is not None, 'Manifest is none for path ' + path + + app_node = doc.find('application') + if app_node is None: + app_node = ElementTree.SubElement(manifest_node, 'application') + + return doc, manifest_node, app_node + + +def SaveManifest(doc, path): + with action_helpers.atomic_output(path) as f: + f.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8')) + + +def GetPackage(manifest_node): + return manifest_node.get('package') + + +def SetUsesSdk(manifest_node, + target_sdk_version, + min_sdk_version, + max_sdk_version=None): + uses_sdk_node = manifest_node.find('./uses-sdk') + if uses_sdk_node is None: + uses_sdk_node = ElementTree.SubElement(manifest_node, 'uses-sdk') + NamespacedSet(uses_sdk_node, 'targetSdkVersion', target_sdk_version) + NamespacedSet(uses_sdk_node, 'minSdkVersion', min_sdk_version) + if max_sdk_version: + NamespacedSet(uses_sdk_node, 'maxSdkVersion', max_sdk_version) + + +def SetTargetApiIfUnset(manifest_node, target_sdk_version): + uses_sdk_node = manifest_node.find('./uses-sdk') + if uses_sdk_node is None: + uses_sdk_node = ElementTree.SubElement(manifest_node, 'uses-sdk') + curr_target_sdk_version = NamespacedGet(uses_sdk_node, 'targetSdkVersion') + if curr_target_sdk_version is None: + NamespacedSet(uses_sdk_node, 'targetSdkVersion', target_sdk_version) + return curr_target_sdk_version is None + + +def _SortAndStripElementTree(root): + # Sort alphabetically with two exceptions: + # 1) Put node last (since it's giant). + # 2) Put android:name before other attributes. + def element_sort_key(node): + if node.tag == 'application': + return 'z' + ret = ElementTree.tostring(node) + # ElementTree.tostring inserts namespace attributes for any that are needed + # for the node or any of its descendants. Remove them so as to prevent a + # change to a child that adds/removes a namespace usage from changing sort + # order. + return re.sub(r' xmlns:.*?".*?"', '', ret.decode('utf8')) + + name_attr = '{%s}name' % ANDROID_NAMESPACE + + def attribute_sort_key(tup): + return ('', '') if tup[0] == name_attr else tup + + def helper(node): + for child in node: + if child.text and child.text.isspace(): + child.text = None + helper(child) + + # Sort attributes (requires Python 3.8+). + node.attrib = dict(sorted(node.attrib.items(), key=attribute_sort_key)) + + # Sort nodes + node[:] = sorted(node, key=element_sort_key) + + helper(root) + + +def _SplitElement(line): + """Parses a one-line xml node into ('').""" + + # Shlex splits nicely, but removes quotes. Need to put them back. + def restore_quotes(value): + return value.replace('=', '="', 1) + '"' + + # Simplify restore_quotes by separating />. + assert line.endswith('>'), line + end_tag = '>' + if line.endswith('/>'): + end_tag = '/>' + line = line[:-len(end_tag)] + + # Use shlex to avoid having to re-encode ", etc. + parts = shlex.split(line) + start_tag = parts[0] + attrs = parts[1:] + + return start_tag, [restore_quotes(x) for x in attrs], end_tag + + +def _CreateNodeHash(lines): + """Computes a hash (md5) for the first XML node found in |lines|. + + Args: + lines: List of strings containing pretty-printed XML. + + Returns: + Positive 32-bit integer hash of the node (including children). + """ + target_indent = lines[0].find('<') + tag_closed = False + for i, l in enumerate(lines[1:]): + cur_indent = l.find('<') + if cur_indent != -1 and cur_indent <= target_indent: + tag_lines = lines[:i + 1] + break + if not tag_closed and 'android:name="' in l: + # To reduce noise of node tags changing, use android:name as the + # basis the hash since they usually unique. + tag_lines = [l] + break + tag_closed = tag_closed or '>' in l + else: + assert False, 'Did not find end of node:\n' + '\n'.join(lines) + + # Insecure and truncated hash as it only needs to be unique vs. its neighbors. + return hashlib.md5(('\n'.join(tag_lines)).encode('utf8')).hexdigest()[:8] + + +def _IsSelfClosing(lines): + """Given pretty-printed xml, returns whether first node is self-closing.""" + for l in lines: + idx = l.find('>') + if idx != -1: + return l[idx - 1] == '/' + raise RuntimeError('Did not find end of tag:\n%s' % '\n'.join(lines)) + + +def _AddDiffTags(lines): + # When multiple identical tags appear sequentially, XML diffs can look like: + # + + # + + # rather than: + # + + # + + # To reduce confusion, add hashes to tags. + # This also ensures changed tags show up with outer elements rather than + # showing only changed attributes. + hash_stack = [] + for i, l in enumerate(lines): + stripped = l.lstrip() + # Ignore non-indented tags and lines that are not the start/end of a node. + if l[0] != ' ' or stripped[0] != '<': + continue + # Ignore self-closing nodes that fit on one line. + if l[-2:] == '/>': + continue + # Ignore since diff tag changes with basically any change. + if stripped.lstrip('). + if stripped[1] != '/': + cur_hash = _CreateNodeHash(lines[i:]) + if not _IsSelfClosing(lines[i:]): + hash_stack.append(cur_hash) + else: + cur_hash = hash_stack.pop() + lines[i] += ' # DIFF-ANCHOR: {}'.format(cur_hash) + assert not hash_stack, 'hash_stack was not empty:\n' + '\n'.join(hash_stack) + + +def NormalizeManifest(manifest_contents, version_code_offset, + library_version_offset): + _RegisterElementTreeNamespaces() + # This also strips comments and sorts node attributes alphabetically. + root = ElementTree.fromstring(manifest_contents) + package = GetPackage(root) + + app_node = root.find('application') + if app_node is not None: + # android:debuggable is added when !is_official_build. Strip it out to avoid + # expectation diffs caused by not adding is_official_build. Play store + # blocks uploading apps with it set, so there's no risk of it slipping in. + debuggable_name = '{%s}debuggable' % ANDROID_NAMESPACE + if debuggable_name in app_node.attrib: + del app_node.attrib[debuggable_name] + + version_code = NamespacedGet(root, 'versionCode') + if version_code and version_code_offset: + version_code = int(version_code) - int(version_code_offset) + NamespacedSet(root, 'versionCode', f'OFFSET={version_code}') + version_name = NamespacedGet(root, 'versionName') + if version_name: + version_name = re.sub(r'\d+', '#', version_name) + NamespacedSet(root, 'versionName', version_name) + + # Trichrome's static library version number is updated daily. To avoid + # frequent manifest check failures, we remove the exact version number + # during normalization. + for node in app_node: + if node.tag in ['uses-static-library', 'static-library']: + version = NamespacedGet(node, 'version') + if version and library_version_offset: + version = int(version) - int(library_version_offset) + NamespacedSet(node, 'version', f'OFFSET={version}') + + # We also remove the exact package name (except the one at the root level) + # to avoid noise during manifest comparison. + def blur_package_name(node): + for key in node.keys(): + node.set(key, node.get(key).replace(package, '$PACKAGE')) + + for child in node: + blur_package_name(child) + + # We only blur the package names of non-root nodes because they generate a lot + # of diffs when doing manifest checks for upstream targets. We still want to + # have 1 piece of package name not blurred just in case the package name is + # mistakenly changed. + for child in root: + blur_package_name(child) + + _SortAndStripElementTree(root) + + # Fix up whitespace/indentation. + dom = minidom.parseString(ElementTree.tostring(root)) + out_lines = [] + for l in dom.toprettyxml(indent=' ').splitlines(): + if not l or l.isspace(): + continue + if len(l) > _WRAP_LINE_LENGTH and any(x in l for x in _WRAP_CANDIDATES): + indent = ' ' * l.find('<') + start_tag, attrs, end_tag = _SplitElement(l) + out_lines.append('{}{}'.format(indent, start_tag)) + for attribute in attrs: + out_lines.append('{} {}'.format(indent, attribute)) + out_lines[-1] += '>' + # Heuristic: Do not allow multi-line tags to be self-closing since these + # can generally be allowed to have nested elements. When diffing, it adds + # noise if the base file is self-closing and the non-base file is not + # self-closing. + if end_tag == '/>': + out_lines.append('{}{}>'.format(indent, start_tag.replace('<', ' + + + + + + + + + + {extra_intent_filter_elem} + + + + + + + + + +""" + +_TEST_MANIFEST_NORMALIZED = """\ + + + + + + + # DIFF-ANCHOR: {intent_filter_diff_anchor} + {extra_intent_filter_elem}\ + + + + # DIFF-ANCHOR: {intent_filter_diff_anchor} + # DIFF-ANCHOR: {activity_diff_anchor} + + # DIFF-ANCHOR: ddab3320 + + +""" + +_ACTIVITY_DIFF_ANCHOR = '32b3a641' +_INTENT_FILTER_DIFF_ANCHOR = '4ee601b7' + + +def _CreateTestData(intent_filter_diff_anchor=_INTENT_FILTER_DIFF_ANCHOR, + extra_activity_attr='', + extra_intent_filter_elem=''): + if extra_activity_attr: + extra_activity_attr += '\n ' + if extra_intent_filter_elem: + extra_intent_filter_elem += '\n ' + test_manifest = _TEST_MANIFEST.format( + extra_activity_attr=extra_activity_attr, + extra_intent_filter_elem=extra_intent_filter_elem) + expected = _TEST_MANIFEST_NORMALIZED.format( + activity_diff_anchor=_ACTIVITY_DIFF_ANCHOR, + intent_filter_diff_anchor=intent_filter_diff_anchor, + extra_activity_attr=extra_activity_attr, + extra_intent_filter_elem=extra_intent_filter_elem) + return test_manifest, expected + + +class ManifestUtilsTest(unittest.TestCase): + # Enable diff output. + maxDiff = None + + def testNormalizeManifest_golden(self): + test_manifest, expected = _CreateTestData() + actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None) + self.assertMultiLineEqual(expected, actual) + + def testNormalizeManifest_nameUsedForActivity(self): + test_manifest, expected = _CreateTestData(extra_activity_attr='a="b"') + actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None) + # Checks that the DIFF-ANCHOR does not change with the added attribute. + self.assertMultiLineEqual(expected, actual) + + def testNormalizeManifest_nameNotUsedForIntentFilter(self): + test_manifest, expected = _CreateTestData( + extra_intent_filter_elem='', intent_filter_diff_anchor='5f5c8a70') + actual = manifest_utils.NormalizeManifest(test_manifest, 1230, None) + # Checks that the DIFF-ANCHOR does change with the added element despite + # having a nested element with an android:name set. + self.assertMultiLineEqual(expected, actual) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/util/md5_check.py b/android/gyp/util/md5_check.py new file mode 100644 index 000000000000..269ae284076c --- /dev/null +++ b/android/gyp/util/md5_check.py @@ -0,0 +1,469 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import difflib +import hashlib +import itertools +import json +import os +import sys +import zipfile + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import print_python_deps + +# When set and a difference is detected, a diff of what changed is printed. +PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0)) + +# An escape hatch that causes all targets to be rebuilt. +_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0)) + + +def CallAndWriteDepfileIfStale(on_stale_md5, + options, + record_path=None, + input_paths=None, + input_strings=None, + output_paths=None, + force=False, + pass_changes=False, + track_subpaths_allowlist=None, + depfile_deps=None): + """Wraps CallAndRecordIfStale() and writes a depfile if applicable. + + Depfiles are automatically added to output_paths when present in the |options| + argument. They are then created after |on_stale_md5| is called. + + By default, only python dependencies are added to the depfile. If there are + other input paths that are not captured by GN deps, then they should be listed + in depfile_deps. It's important to write paths to the depfile that are already + captured by GN deps since GN args can cause GN deps to change, and such + changes are not immediately reflected in depfiles (http://crbug.com/589311). + """ + if not output_paths: + raise Exception('At least one output_path must be specified.') + input_paths = list(input_paths or []) + input_strings = list(input_strings or []) + output_paths = list(output_paths or []) + + input_paths += print_python_deps.ComputePythonDependencies() + + CallAndRecordIfStale( + on_stale_md5, + record_path=record_path, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + force=force, + pass_changes=pass_changes, + track_subpaths_allowlist=track_subpaths_allowlist) + + # Write depfile even when inputs have not changed to ensure build correctness + # on bots that build with & without patch, and the patch changes the depfile + # location. + if hasattr(options, 'depfile') and options.depfile: + action_helpers.write_depfile(options.depfile, output_paths[0], depfile_deps) + + +def CallAndRecordIfStale(function, + record_path=None, + input_paths=None, + input_strings=None, + output_paths=None, + force=False, + pass_changes=False, + track_subpaths_allowlist=None): + """Calls function if outputs are stale. + + Outputs are considered stale if: + - any output_paths are missing, or + - the contents of any file within input_paths has changed, or + - the contents of input_strings has changed. + + To debug which files are out-of-date, set the environment variable: + PRINT_MD5_DIFFS=1 + + Args: + function: The function to call. + record_path: Path to record metadata. + Defaults to output_paths[0] + '.md5.stamp' + input_paths: List of paths to calcualte an md5 sum on. + input_strings: List of strings to record verbatim. + output_paths: List of output paths. + force: Whether to treat outputs as missing regardless of whether they + actually are. + pass_changes: Whether to pass a Changes instance to |function|. + track_subpaths_allowlist: Relevant only when pass_changes=True. List of .zip + files from |input_paths| to make subpath information available for. + """ + assert record_path or output_paths + input_paths = input_paths or [] + input_strings = input_strings or [] + output_paths = output_paths or [] + record_path = record_path or output_paths[0] + '.md5.stamp' + + assert record_path.endswith('.stamp'), ( + 'record paths must end in \'.stamp\' so that they are easy to find ' + 'and delete') + + new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS) + new_metadata.AddStrings(input_strings) + + zip_allowlist = set(track_subpaths_allowlist or []) + for path in input_paths: + # It's faster to md5 an entire zip file than it is to just locate & hash + # its central directory (which is what this used to do). + if path in zip_allowlist: + entries = _ExtractZipEntries(path) + new_metadata.AddZipFile(path, entries) + else: + new_metadata.AddFile(path, _ComputeTagForPath(path)) + + old_metadata = None + force = force or _FORCE_REBUILD + missing_outputs = [x for x in output_paths if force or not os.path.exists(x)] + too_new = [] + # When outputs are missing, don't bother gathering change information. + if not missing_outputs and os.path.exists(record_path): + record_mtime = os.path.getmtime(record_path) + # Outputs newer than the change information must have been modified outside + # of the build, and should be considered stale. + too_new = [x for x in output_paths if os.path.getmtime(x) > record_mtime] + if not too_new: + with open(record_path, 'r') as jsonfile: + try: + old_metadata = _Metadata.FromFile(jsonfile) + except: # pylint: disable=bare-except + pass # Not yet using new file format. + + changes = Changes(old_metadata, new_metadata, force, missing_outputs, too_new) + if not changes.HasChanges(): + return + + if PRINT_EXPLANATIONS: + print('=' * 80) + print('Target is stale: %s' % record_path) + print(changes.DescribeDifference()) + print('=' * 80) + + args = (changes,) if pass_changes else () + function(*args) + + with open(record_path, 'w') as f: + new_metadata.ToFile(f) + + +class Changes: + """Provides and API for querying what changed between runs.""" + + def __init__(self, old_metadata, new_metadata, force, missing_outputs, + too_new): + self.old_metadata = old_metadata + self.new_metadata = new_metadata + self.force = force + self.missing_outputs = missing_outputs + self.too_new = too_new + + def _GetOldTag(self, path, subpath=None): + return self.old_metadata and self.old_metadata.GetTag(path, subpath) + + def HasChanges(self): + """Returns whether any changes exist.""" + return (self.HasStringChanges() + or self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5()) + + def HasStringChanges(self): + """Returns whether string metadata changed.""" + return (self.force or not self.old_metadata + or self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()) + + def AddedOrModifiedOnly(self): + """Returns whether the only changes were from added or modified (sub)files. + + No missing outputs, no removed paths/subpaths. + """ + if self.HasStringChanges(): + return False + if any(self.IterRemovedPaths()): + return False + for path in self.IterModifiedPaths(): + if any(self.IterRemovedSubpaths(path)): + return False + return True + + def IterAllPaths(self): + """Generator for paths.""" + return self.new_metadata.IterPaths(); + + def IterAllSubpaths(self, path): + """Generator for subpaths.""" + return self.new_metadata.IterSubpaths(path); + + def IterAddedPaths(self): + """Generator for paths that were added.""" + for path in self.new_metadata.IterPaths(): + if self._GetOldTag(path) is None: + yield path + + def IterAddedSubpaths(self, path): + """Generator for paths that were added within the given zip file.""" + for subpath in self.new_metadata.IterSubpaths(path): + if self._GetOldTag(path, subpath) is None: + yield subpath + + def IterRemovedPaths(self): + """Generator for paths that were removed.""" + if self.old_metadata: + for path in self.old_metadata.IterPaths(): + if self.new_metadata.GetTag(path) is None: + yield path + + def IterRemovedSubpaths(self, path): + """Generator for paths that were removed within the given zip file.""" + if self.old_metadata: + for subpath in self.old_metadata.IterSubpaths(path): + if self.new_metadata.GetTag(path, subpath) is None: + yield subpath + + def IterModifiedPaths(self): + """Generator for paths whose contents have changed.""" + for path in self.new_metadata.IterPaths(): + old_tag = self._GetOldTag(path) + new_tag = self.new_metadata.GetTag(path) + if old_tag is not None and old_tag != new_tag: + yield path + + def IterModifiedSubpaths(self, path): + """Generator for paths within a zip file whose contents have changed.""" + for subpath in self.new_metadata.IterSubpaths(path): + old_tag = self._GetOldTag(path, subpath) + new_tag = self.new_metadata.GetTag(path, subpath) + if old_tag is not None and old_tag != new_tag: + yield subpath + + def IterChangedPaths(self): + """Generator for all changed paths (added/removed/modified).""" + return itertools.chain(self.IterRemovedPaths(), + self.IterModifiedPaths(), + self.IterAddedPaths()) + + def IterChangedSubpaths(self, path): + """Generator for paths within a zip that were added/removed/modified.""" + return itertools.chain(self.IterRemovedSubpaths(path), + self.IterModifiedSubpaths(path), + self.IterAddedSubpaths(path)) + + def DescribeDifference(self): + """Returns a human-readable description of what changed.""" + if self.force: + return 'force=True' + if self.missing_outputs: + return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs) + if self.too_new: + return 'Outputs newer than stamp file:\n ' + '\n '.join(self.too_new) + if self.old_metadata is None: + return 'Previous stamp file not found.' + + if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5(): + ndiff = difflib.ndiff(self.old_metadata.GetStrings(), + self.new_metadata.GetStrings()) + changed = [s for s in ndiff if not s.startswith(' ')] + return 'Input strings changed:\n ' + '\n '.join(changed) + + if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5(): + return "There's no difference." + + lines = [] + lines.extend('Added: ' + p for p in self.IterAddedPaths()) + lines.extend('Removed: ' + p for p in self.IterRemovedPaths()) + for path in self.IterModifiedPaths(): + lines.append('Modified: ' + path) + lines.extend(' -> Subpath added: ' + p + for p in self.IterAddedSubpaths(path)) + lines.extend(' -> Subpath removed: ' + p + for p in self.IterRemovedSubpaths(path)) + lines.extend(' -> Subpath modified: ' + p + for p in self.IterModifiedSubpaths(path)) + if lines: + return 'Input files changed:\n ' + '\n '.join(lines) + return 'I have no idea what changed (there is a bug).' + + +class _Metadata: + """Data model for tracking change metadata. + + Args: + track_entries: Enables per-file change tracking. Slower, but required for + Changes functionality. + """ + # Schema: + # { + # "files-md5": "VALUE", + # "strings-md5": "VALUE", + # "input-files": [ + # { + # "path": "path.jar", + # "tag": "{MD5 of entries}", + # "entries": [ + # { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ... + # ] + # }, { + # "path": "path.txt", + # "tag": "{MD5}", + # } + # ], + # "input-strings": ["a", "b", ...], + # } + def __init__(self, track_entries=False): + self._track_entries = track_entries + self._files_md5 = None + self._strings_md5 = None + self._files = [] + self._strings = [] + # Map of (path, subpath) -> entry. Created upon first call to _GetEntry(). + self._file_map = None + + @classmethod + def FromFile(cls, fileobj): + """Returns a _Metadata initialized from a file object.""" + ret = cls() + obj = json.load(fileobj) + ret._files_md5 = obj['files-md5'] + ret._strings_md5 = obj['strings-md5'] + ret._files = obj.get('input-files', []) + ret._strings = obj.get('input-strings', []) + return ret + + def ToFile(self, fileobj): + """Serializes metadata to the given file object.""" + obj = { + 'files-md5': self.FilesMd5(), + 'strings-md5': self.StringsMd5(), + } + if self._track_entries: + obj['input-files'] = sorted(self._files, key=lambda e: e['path']) + obj['input-strings'] = self._strings + + json.dump(obj, fileobj, indent=2) + + def _AssertNotQueried(self): + assert self._files_md5 is None + assert self._strings_md5 is None + assert self._file_map is None + + def AddStrings(self, values): + self._AssertNotQueried() + self._strings.extend(str(v) for v in values) + + def AddFile(self, path, tag): + """Adds metadata for a non-zip file. + + Args: + path: Path to the file. + tag: A short string representative of the file contents. + """ + self._AssertNotQueried() + self._files.append({ + 'path': path, + 'tag': tag, + }) + + def AddZipFile(self, path, entries): + """Adds metadata for a zip file. + + Args: + path: Path to the file. + entries: List of (subpath, tag) tuples for entries within the zip. + """ + self._AssertNotQueried() + tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries), + (e[1] for e in entries))) + self._files.append({ + 'path': path, + 'tag': tag, + 'entries': [{"path": e[0], "tag": e[1]} for e in entries], + }) + + def GetStrings(self): + """Returns the list of input strings.""" + return self._strings + + def FilesMd5(self): + """Lazily computes and returns the aggregate md5 of input files.""" + if self._files_md5 is None: + # Omit paths from md5 since temporary files have random names. + self._files_md5 = _ComputeInlineMd5( + self.GetTag(p) for p in sorted(self.IterPaths())) + return self._files_md5 + + def StringsMd5(self): + """Lazily computes and returns the aggregate md5 of input strings.""" + if self._strings_md5 is None: + self._strings_md5 = _ComputeInlineMd5(self._strings) + return self._strings_md5 + + def _GetEntry(self, path, subpath=None): + """Returns the JSON entry for the given path / subpath.""" + if self._file_map is None: + self._file_map = {} + for entry in self._files: + self._file_map[(entry['path'], None)] = entry + for subentry in entry.get('entries', ()): + self._file_map[(entry['path'], subentry['path'])] = subentry + return self._file_map.get((path, subpath)) + + def GetTag(self, path, subpath=None): + """Returns the tag for the given path / subpath.""" + ret = self._GetEntry(path, subpath) + return ret and ret['tag'] + + def IterPaths(self): + """Returns a generator for all top-level paths.""" + return (e['path'] for e in self._files) + + def IterSubpaths(self, path): + """Returns a generator for all subpaths in the given zip. + + If the given path is not a zip file or doesn't exist, returns an empty + iterable. + """ + outer_entry = self._GetEntry(path) + if not outer_entry: + return () + subentries = outer_entry.get('entries', []) + return (entry['path'] for entry in subentries) + + +def _ComputeTagForPath(path): + stat = os.stat(path) + if stat.st_size > 1 * 1024 * 1024: + # Fallback to mtime for large files so that md5_check does not take too long + # to run. + return stat.st_mtime + md5 = hashlib.md5() + with open(path, 'rb') as f: + md5.update(f.read()) + return md5.hexdigest() + + +def _ComputeInlineMd5(iterable): + """Computes the md5 of the concatenated parameters.""" + md5 = hashlib.md5() + for item in iterable: + md5.update(str(item).encode('ascii')) + return md5.hexdigest() + + +def _ExtractZipEntries(path): + """Returns a list of (path, CRC32) of all files within |path|.""" + entries = [] + with zipfile.ZipFile(path) as zip_file: + for zip_info in zip_file.infolist(): + # Skip directories and empty files. + if zip_info.CRC: + entries.append( + (zip_info.filename, zip_info.CRC + zip_info.compress_type)) + return entries diff --git a/android/gyp/util/md5_check_test.py b/android/gyp/util/md5_check_test.py new file mode 100755 index 000000000000..e1e940b4da31 --- /dev/null +++ b/android/gyp/util/md5_check_test.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import os +import sys +import tempfile +import unittest +import zipfile + +sys.path.insert( + 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from util import md5_check + + +def _WriteZipFile(path, entries): + with zipfile.ZipFile(path, 'w') as zip_file: + for subpath, data in entries: + zip_file.writestr(subpath, data) + + +class TestMd5Check(unittest.TestCase): + def setUp(self): + self.called = False + self.changes = None + + def testCallAndRecordIfStale(self): + input_strings = ['string1', 'string2'] + input_file1 = tempfile.NamedTemporaryFile(suffix='.txt') + input_file2 = tempfile.NamedTemporaryFile(suffix='.zip') + file1_contents = b'input file 1' + input_file1.write(file1_contents) + input_file1.flush() + # Test out empty zip file to start. + _WriteZipFile(input_file2.name, []) + input_files = [input_file1.name, input_file2.name] + zip_paths = [input_file2.name] + + record_path = tempfile.NamedTemporaryFile(suffix='.stamp') + + def CheckCallAndRecord(should_call, + message, + force=False, + outputs_specified=False, + outputs_missing=False, + expected_changes=None, + added_or_modified_only=None, + track_subentries=False, + output_newer_than_record=False): + output_paths = None + if outputs_specified: + output_file1 = tempfile.NamedTemporaryFile() + if outputs_missing: + output_file1.close() # Gets deleted on close(). + output_paths = [output_file1.name] + if output_newer_than_record: + output_mtime = os.path.getmtime(output_file1.name) + os.utime(record_path.name, (output_mtime - 1, output_mtime - 1)) + else: + # touch the record file so it doesn't look like it's older that + # the output we've just created + os.utime(record_path.name, None) + + self.called = False + self.changes = None + if expected_changes or added_or_modified_only is not None: + def MarkCalled(changes): + self.called = True + self.changes = changes + else: + def MarkCalled(): + self.called = True + + md5_check.CallAndRecordIfStale( + MarkCalled, + record_path=record_path.name, + input_paths=input_files, + input_strings=input_strings, + output_paths=output_paths, + force=force, + pass_changes=(expected_changes or added_or_modified_only) is not None, + track_subpaths_allowlist=zip_paths if track_subentries else None) + self.assertEqual(should_call, self.called, message) + if expected_changes: + description = self.changes.DescribeDifference() + self.assertTrue(fnmatch.fnmatch(description, expected_changes), + 'Expected %s to match %s' % ( + repr(description), repr(expected_changes))) + if should_call and added_or_modified_only is not None: + self.assertEqual(added_or_modified_only, + self.changes.AddedOrModifiedOnly()) + + CheckCallAndRecord(True, 'should call when record doesn\'t exist', + expected_changes='Previous stamp file not found.', + added_or_modified_only=False) + CheckCallAndRecord(False, 'should not call when nothing changed') + input_files = input_files[::-1] + CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call') + + CheckCallAndRecord(False, 'should not call when nothing changed #2', + outputs_specified=True, outputs_missing=False) + CheckCallAndRecord(True, 'should call when output missing', + outputs_specified=True, outputs_missing=True, + expected_changes='Outputs do not exist:*', + added_or_modified_only=False) + CheckCallAndRecord(True, + 'should call when output is newer than record', + expected_changes='Outputs newer than stamp file:*', + outputs_specified=True, + outputs_missing=False, + added_or_modified_only=False, + output_newer_than_record=True) + CheckCallAndRecord(True, force=True, message='should call when forced', + expected_changes='force=True', + added_or_modified_only=False) + + input_file1.write(b'some more input') + input_file1.flush() + CheckCallAndRecord(True, 'changed input file should trigger call', + expected_changes='*Modified: %s' % input_file1.name, + added_or_modified_only=True) + + input_files = input_files[:1] + CheckCallAndRecord(True, 'removing file should trigger call', + expected_changes='*Removed: %s' % input_file1.name, + added_or_modified_only=False) + + input_files.append(input_file1.name) + CheckCallAndRecord(True, 'added input file should trigger call', + expected_changes='*Added: %s' % input_file1.name, + added_or_modified_only=True) + + input_strings[0] = input_strings[0] + ' a bit longer' + CheckCallAndRecord(True, 'changed input string should trigger call', + expected_changes='*Input strings changed*', + added_or_modified_only=False) + + input_strings = input_strings[::-1] + CheckCallAndRecord(True, 'reordering of string inputs should trigger call', + expected_changes='*Input strings changed*') + + input_strings = input_strings[:1] + CheckCallAndRecord(True, 'removing a string should trigger call') + + input_strings.append('a brand new string') + CheckCallAndRecord( + True, + 'added input string should trigger call', + added_or_modified_only=False) + + _WriteZipFile(input_file2.name, [('path/1.txt', '1')]) + CheckCallAndRecord( + True, + 'added subpath should trigger call', + expected_changes='*Modified: %s*Subpath added: %s' % (input_file2.name, + 'path/1.txt'), + added_or_modified_only=True, + track_subentries=True) + _WriteZipFile(input_file2.name, [('path/1.txt', '2')]) + CheckCallAndRecord( + True, + 'changed subpath should trigger call', + expected_changes='*Modified: %s*Subpath modified: %s' % + (input_file2.name, 'path/1.txt'), + added_or_modified_only=True, + track_subentries=True) + + _WriteZipFile(input_file2.name, []) + CheckCallAndRecord(True, 'removed subpath should trigger call', + expected_changes='*Modified: %s*Subpath removed: %s' % ( + input_file2.name, 'path/1.txt'), + added_or_modified_only=False) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/util/parallel.py b/android/gyp/util/parallel.py new file mode 100644 index 000000000000..dec94c7a329c --- /dev/null +++ b/android/gyp/util/parallel.py @@ -0,0 +1,217 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helpers related to multiprocessing. + +Based on: //tools/binary_size/libsupersize/parallel.py +""" + +import atexit +import logging +import multiprocessing +import os +import sys +import threading +import traceback + +DISABLE_ASYNC = os.environ.get('DISABLE_ASYNC') == '1' +if DISABLE_ASYNC: + logging.warning('Running in synchronous mode.') + +_all_pools = None +_is_child_process = False +_silence_exceptions = False + +# Used to pass parameters to forked processes without pickling. +_fork_params = None +_fork_kwargs = None + + +class _ImmediateResult: + def __init__(self, value): + self._value = value + + def get(self): + return self._value + + def wait(self): + pass + + def ready(self): + return True + + def successful(self): + return True + + +class _ExceptionWrapper: + """Used to marshal exception messages back to main process.""" + + def __init__(self, msg, exception_type=None): + self.msg = msg + self.exception_type = exception_type + + def MaybeThrow(self): + if self.exception_type: + raise getattr(__builtins__, + self.exception_type)('Originally caused by: ' + self.msg) + + +class _FuncWrapper: + """Runs on the fork()'ed side to catch exceptions and spread *args.""" + + def __init__(self, func): + global _is_child_process + _is_child_process = True + self._func = func + + def __call__(self, index, _=None): + global _fork_kwargs + try: + if _fork_kwargs is None: # Clarifies _fork_kwargs is map for pylint. + _fork_kwargs = {} + return self._func(*_fork_params[index], **_fork_kwargs) + except Exception as e: + # Only keep the exception type for builtin exception types or else risk + # further marshalling exceptions. + exception_type = None + if hasattr(__builtins__, type(e).__name__): + exception_type = type(e).__name__ + # multiprocessing is supposed to catch and return exceptions automatically + # but it doesn't seem to work properly :(. + return _ExceptionWrapper(traceback.format_exc(), exception_type) + except: # pylint: disable=bare-except + return _ExceptionWrapper(traceback.format_exc()) + + +class _WrappedResult: + """Allows for host-side logic to be run after child process has terminated. + + * Unregisters associated pool _all_pools. + * Raises exception caught by _FuncWrapper. + """ + + def __init__(self, result, pool=None): + self._result = result + self._pool = pool + + def get(self): + self.wait() + value = self._result.get() + _CheckForException(value) + return value + + def wait(self): + self._result.wait() + if self._pool: + _all_pools.remove(self._pool) + self._pool = None + + def ready(self): + return self._result.ready() + + def successful(self): + return self._result.successful() + + +def _TerminatePools(): + """Calls .terminate() on all active process pools. + + Not supposed to be necessary according to the docs, but seems to be required + when child process throws an exception or Ctrl-C is hit. + """ + global _silence_exceptions + _silence_exceptions = True + # Child processes cannot have pools, but atexit runs this function because + # it was registered before fork()ing. + if _is_child_process: + return + + def close_pool(pool): + try: + pool.terminate() + except: # pylint: disable=bare-except + pass + + for i, pool in enumerate(_all_pools): + # Without calling terminate() on a separate thread, the call can block + # forever. + thread = threading.Thread(name='Pool-Terminate-{}'.format(i), + target=close_pool, + args=(pool, )) + thread.daemon = True + thread.start() + + +def _CheckForException(value): + if isinstance(value, _ExceptionWrapper): + global _silence_exceptions + if not _silence_exceptions: + value.MaybeThrow() + _silence_exceptions = True + logging.error('Subprocess raised an exception:\n%s', value.msg) + sys.exit(1) + + +def _MakeProcessPool(job_params, **job_kwargs): + global _all_pools + global _fork_params + global _fork_kwargs + assert _fork_params is None + assert _fork_kwargs is None + pool_size = min(len(job_params), multiprocessing.cpu_count()) + _fork_params = job_params + _fork_kwargs = job_kwargs + ret = multiprocessing.Pool(pool_size) + _fork_params = None + _fork_kwargs = None + if _all_pools is None: + _all_pools = [] + atexit.register(_TerminatePools) + _all_pools.append(ret) + return ret + + +def ForkAndCall(func, args): + """Runs |func| in a fork'ed process. + + Returns: + A Result object (call .get() to get the return value) + """ + if DISABLE_ASYNC: + pool = None + result = _ImmediateResult(func(*args)) + else: + pool = _MakeProcessPool([args]) # Omit |kwargs|. + result = pool.apply_async(_FuncWrapper(func), (0, )) + pool.close() + return _WrappedResult(result, pool=pool) + + +def BulkForkAndCall(func, arg_tuples, **kwargs): + """Calls |func| in a fork'ed process for each set of args within |arg_tuples|. + + Args: + kwargs: Common keyword arguments to be passed to |func|. + + Yields the return values in order. + """ + arg_tuples = list(arg_tuples) + if not arg_tuples: + return + + if DISABLE_ASYNC: + for args in arg_tuples: + yield func(*args, **kwargs) + return + + pool = _MakeProcessPool(arg_tuples, **kwargs) + wrapped_func = _FuncWrapper(func) + try: + for result in pool.imap(wrapped_func, range(len(arg_tuples))): + _CheckForException(result) + yield result + finally: + pool.close() + pool.join() + _all_pools.remove(pool) diff --git a/android/gyp/util/protoresources.py b/android/gyp/util/protoresources.py new file mode 100644 index 000000000000..11f877806612 --- /dev/null +++ b/android/gyp/util/protoresources.py @@ -0,0 +1,308 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Functions that modify resources in protobuf format. + +Format reference: +https://cs.android.com/search?q=f:aapt2.*Resources.proto +""" + +import logging +import os +import struct +import sys +import zipfile + +from util import build_utils +from util import resource_utils + +sys.path[1:1] = [ + # `Resources_pb2` module imports `descriptor`, which imports `six`. + os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'six', 'src'), + # Make sure the pb2 files are able to import google.protobuf + os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'protobuf', + 'python'), +] + +from proto import Resources_pb2 + +# First bytes in an .flat.arsc file. +# uint32: Magic ("ARSC"), version (1), num_entries (1), type (0) +_FLAT_ARSC_HEADER = b'AAPT\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00' + +# The package ID hardcoded for shared libraries. See +# _HardcodeSharedLibraryDynamicAttributes() for more details. If this value +# changes make sure to change REQUIRED_PACKAGE_IDENTIFIER in WebLayerImpl.java. +SHARED_LIBRARY_HARDCODED_ID = 36 + + +def _ProcessZip(zip_path, process_func): + """Filters a .zip file via: new_bytes = process_func(filename, data).""" + has_changes = False + zip_entries = [] + with zipfile.ZipFile(zip_path) as src_zip: + for info in src_zip.infolist(): + data = src_zip.read(info) + new_data = process_func(info.filename, data) + if new_data is not data: + has_changes = True + data = new_data + zip_entries.append((info, data)) + + # Overwrite the original zip file. + if has_changes: + with zipfile.ZipFile(zip_path, 'w') as f: + for info, data in zip_entries: + f.writestr(info, data) + + +def _ProcessProtoItem(item): + if not item.HasField('ref'): + return + + # If this is a dynamic attribute (type ATTRIBUTE, package ID 0), hardcode + # the package to SHARED_LIBRARY_HARDCODED_ID. + if item.ref.type == Resources_pb2.Reference.ATTRIBUTE and not (item.ref.id + & 0xff000000): + item.ref.id |= (0x01000000 * SHARED_LIBRARY_HARDCODED_ID) + item.ref.ClearField('is_dynamic') + + +def _ProcessProtoValue(value): + if value.HasField('item'): + _ProcessProtoItem(value.item) + return + + compound_value = value.compound_value + if compound_value.HasField('style'): + for entry in compound_value.style.entry: + _ProcessProtoItem(entry.item) + elif compound_value.HasField('array'): + for element in compound_value.array.element: + _ProcessProtoItem(element.item) + elif compound_value.HasField('plural'): + for entry in compound_value.plural.entry: + _ProcessProtoItem(entry.item) + + +def _ProcessProtoXmlNode(xml_node): + if not xml_node.HasField('element'): + return + + for attribute in xml_node.element.attribute: + _ProcessProtoItem(attribute.compiled_item) + + for child in xml_node.element.child: + _ProcessProtoXmlNode(child) + + +def _SplitLocaleResourceType(_type, allowed_resource_names): + """Splits locale specific resources out of |_type| and returns them. + + Any locale specific resources will be removed from |_type|, and a new + Resources_pb2.Type value will be returned which contains those resources. + + Args: + _type: A Resources_pb2.Type value + allowed_resource_names: Names of locale resources that should be kept in the + main type. + """ + locale_entries = [] + for entry in _type.entry: + if entry.name in allowed_resource_names: + continue + + # First collect all resources values with a locale set. + config_values_with_locale = [] + for config_value in entry.config_value: + if config_value.config.locale: + config_values_with_locale.append(config_value) + + if config_values_with_locale: + # Remove the locale resources from the original entry + for value in config_values_with_locale: + entry.config_value.remove(value) + + # Add locale resources to a new Entry, and save for later. + locale_entry = Resources_pb2.Entry() + locale_entry.CopyFrom(entry) + del locale_entry.config_value[:] + locale_entry.config_value.extend(config_values_with_locale) + locale_entries.append(locale_entry) + + if not locale_entries: + return None + + # Copy the original type and replace the entries with |locale_entries|. + locale_type = Resources_pb2.Type() + locale_type.CopyFrom(_type) + del locale_type.entry[:] + locale_type.entry.extend(locale_entries) + return locale_type + + +def _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist): + translations_package = None + if is_bundle_module: + # A separate top level package will be added to the resources, which + # contains only locale specific resources. The package ID of the locale + # resources is hardcoded to SHARED_LIBRARY_HARDCODED_ID. This causes + # resources in locale splits to all get assigned + # SHARED_LIBRARY_HARDCODED_ID as their package ID, which prevents a bug + # in shared library bundles where each split APK gets a separate dynamic + # ID, and cannot be accessed by the main APK. + translations_package = Resources_pb2.Package() + translations_package.package_id.id = SHARED_LIBRARY_HARDCODED_ID + translations_package.package_name = (table.package[0].package_name + + '_translations') + + # These resources are allowed in the base resources, since they are needed + # by WebView. + allowed_resource_names = set() + if shared_resources_allowlist: + allowed_resource_names = set( + resource_utils.GetRTxtStringResourceNames(shared_resources_allowlist)) + + for package in table.package: + for _type in package.type: + for entry in _type.entry: + for config_value in entry.config_value: + _ProcessProtoValue(config_value.value) + + if translations_package is not None: + locale_type = _SplitLocaleResourceType(_type, allowed_resource_names) + if locale_type: + translations_package.type.add().CopyFrom(locale_type) + + if translations_package is not None: + table.package.add().CopyFrom(translations_package) + + +def HardcodeSharedLibraryDynamicAttributes(zip_path, + is_bundle_module, + shared_resources_allowlist=None): + """Hardcodes the package IDs of dynamic attributes and locale resources. + + Hardcoding dynamic attribute package IDs is a workaround for b/147674078, + which affects Android versions pre-N. Hardcoding locale resource package IDs + is a workaround for b/155437035, which affects resources built with + --shared-lib on all Android versions + + Args: + zip_path: Path to proto APK file. + is_bundle_module: True for bundle modules. + shared_resources_allowlist: Set of resource names to not extract out of the + main package. + """ + + def process_func(filename, data): + if filename == 'resources.pb': + table = Resources_pb2.ResourceTable() + table.ParseFromString(data) + _HardcodeInTable(table, is_bundle_module, shared_resources_allowlist) + data = table.SerializeToString() + elif filename.endswith('.xml') and not filename.startswith('res/raw'): + xml_node = Resources_pb2.XmlNode() + xml_node.ParseFromString(data) + _ProcessProtoXmlNode(xml_node) + data = xml_node.SerializeToString() + return data + + _ProcessZip(zip_path, process_func) + + +class _ResourceStripper: + def __init__(self, partial_path, keep_predicate): + self.partial_path = partial_path + self.keep_predicate = keep_predicate + self._has_changes = False + + @staticmethod + def _IterStyles(entry): + for config_value in entry.config_value: + value = config_value.value + if value.HasField('compound_value'): + compound_value = value.compound_value + if compound_value.HasField('style'): + yield compound_value.style + + def _StripStyles(self, entry, type_and_name): + # Strip style entries that refer to attributes that have been stripped. + for style in self._IterStyles(entry): + entries = style.entry + new_entries = [] + for e in entries: + full_name = '{}/{}'.format(type_and_name, e.key.name) + if not self.keep_predicate(full_name): + logging.debug('Stripped %s/%s', self.partial_path, full_name) + else: + new_entries.append(e) + + if len(new_entries) != len(entries): + self._has_changes = True + del entries[:] + entries.extend(new_entries) + + def _StripEntries(self, entries, type_name): + new_entries = [] + for entry in entries: + type_and_name = '{}/{}'.format(type_name, entry.name) + if not self.keep_predicate(type_and_name): + logging.debug('Stripped %s/%s', self.partial_path, type_and_name) + else: + new_entries.append(entry) + self._StripStyles(entry, type_and_name) + + if len(new_entries) != len(entries): + self._has_changes = True + del entries[:] + entries.extend(new_entries) + + def StripTable(self, table): + self._has_changes = False + for package in table.package: + for _type in package.type: + self._StripEntries(_type.entry, _type.name) + return self._has_changes + + +def _TableFromFlatBytes(data): + # https://cs.android.com/search?q=f:aapt2.*Container.cpp + size_idx = len(_FLAT_ARSC_HEADER) + proto_idx = size_idx + 8 + if data[:size_idx] != _FLAT_ARSC_HEADER: + raise Exception('Error parsing {} in {}'.format(info.filename, zip_path)) + # Size is stored as uint64. + size = struct.unpack('' or '-' + # where is a 2 or 3 letter language code (ISO 639-1 or 639-2) + # and region is a capitalized locale region name. + lang, _, region = chromium_locale.partition('-') + if not region: + return lang + + # Translate newer language tags into obsolete ones. Only necessary if + # region is not None (e.g. 'he-IL' -> 'iw-rIL') + lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang) + + # Using '-r' is now acceptable as a locale name for all + # versions of Android. + return '%s-r%s' % (lang, region) + + +# ISO 639 language code + optional ("-r" + capitalized region code). +# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes +# are supported. +_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$') + +# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must +# be prefixed with 'b+', and may include optional tags. +# e.g. 'b+en+US', 'b+ja+Latn', 'b+ja+Latn+JP' +_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$') + + +def ToChromiumLocaleName(android_locale): + """Convert an Android locale name into a Chromium one.""" + lang = None + region = None + script = None + m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale) + if m: + lang = m.group(1) + if m.group(2): + region = m.group(3) + elif _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale): + # Split an Android BCP-47 locale (e.g. b+sr+Latn+RS) + tags = android_locale.split('+') + + # The Lang tag is always the first tag. + lang = tags[1] + + # The optional region tag is 2ALPHA or 3DIGIT tag in pos 1 or 2. + # The optional script tag is 4ALPHA and always in pos 1. + optional_tags = iter(tags[2:]) + + next_tag = next(optional_tags, None) + if next_tag and len(next_tag) == 4: + script = next_tag + next_tag = next(optional_tags, None) + if next_tag and len(next_tag) < 4: + region = next_tag + + if not lang: + return None + + # Special case for es-rUS -> es-419 + if lang == 'es' and region == 'US': + return 'es-419' + + lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang) + + if script: + lang = '%s-%s' % (lang, script) + + if not region: + return lang + + return '%s-%s' % (lang, region) + + +def IsAndroidLocaleQualifier(string): + """Returns true if |string| is a valid Android resource locale qualifier.""" + return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string) + or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string)) + + +def FindLocaleInStringResourceFilePath(file_path): + """Return Android locale name of a string resource file path. + + Args: + file_path: A file path. + Returns: + If |file_path| is of the format '.../values-/.xml', return + the value of (and Android locale qualifier). Otherwise return None. + """ + if not file_path.endswith('.xml'): + return None + prefix = 'values-' + dir_name = os.path.basename(os.path.dirname(file_path)) + if not dir_name.startswith(prefix): + return None + qualifier = dir_name[len(prefix):] + return qualifier if IsAndroidLocaleQualifier(qualifier) else None + + +def ToAndroidLocaleList(locale_list): + """Convert a list of Chromium locales into the corresponding Android list.""" + return sorted(ToAndroidLocaleName(locale) for locale in locale_list) + +# Represents a line from a R.txt file. +_TextSymbolEntry = collections.namedtuple('RTextEntry', + ('java_type', 'resource_type', 'name', 'value')) + + +def _GenerateGlobs(pattern): + # This function processes the aapt ignore assets pattern into a list of globs + # to be used to exclude files using build_utils.MatchesGlob. It removes the + # '!', which is used by aapt to mean 'not chatty' so it does not output if the + # file is ignored (we dont output anyways, so it is not required). This + # function does not handle the and prefixes used by aapt and are + # assumed not to be included in the pattern string. + return pattern.replace('!', '').split(':') + + +def DeduceResourceDirsFromFileList(resource_files): + """Return a list of resource directories from a list of resource files.""" + # Directory list order is important, cannot use set or other data structures + # that change order. This is because resource files of the same name in + # multiple res/ directories ellide one another (the last one passed is used). + # Thus the order must be maintained to prevent non-deterministic and possibly + # flakey builds. + resource_dirs = [] + for resource_path in resource_files: + # Resources are always 1 directory deep under res/. + res_dir = os.path.dirname(os.path.dirname(resource_path)) + if res_dir not in resource_dirs: + resource_dirs.append(res_dir) + + # Check if any resource_dirs are children of other ones. This indicates that a + # file was listed that is not exactly 1 directory deep under res/. + # E.g.: + # sources = ["java/res/values/foo.xml", "java/res/README.md"] + # ^^ This will cause "java" to be detected as resource directory. + for a, b in itertools.permutations(resource_dirs, 2): + if not os.path.relpath(a, b).startswith('..'): + bad_sources = (s for s in resource_files + if os.path.dirname(os.path.dirname(s)) == b) + msg = """\ +Resource(s) found that are not in a proper directory structure: + {} +All resource files must follow a structure of "$ROOT/$SUBDIR/$FILE".""" + raise Exception(msg.format('\n '.join(bad_sources))) + + return resource_dirs + + +def IterResourceFilesInDirectories(directories, + ignore_pattern=AAPT_IGNORE_PATTERN): + globs = _GenerateGlobs(ignore_pattern) + for d in directories: + for root, _, files in os.walk(d): + for f in files: + archive_path = f + parent_dir = os.path.relpath(root, d) + if parent_dir != '.': + archive_path = os.path.join(parent_dir, f) + path = os.path.join(root, f) + if build_utils.MatchesGlob(archive_path, globs): + continue + yield path, archive_path + + +class ResourceInfoFile: + """Helper for building up .res.info files.""" + + def __init__(self): + # Dict of archive_path -> source_path for the current target. + self._entries = {} + # List of (old_archive_path, new_archive_path) tuples. + self._renames = [] + # We don't currently support using both AddMapping and MergeInfoFile. + self._add_mapping_was_called = False + + def AddMapping(self, archive_path, source_path): + """Adds a single |archive_path| -> |source_path| entry.""" + self._add_mapping_was_called = True + # "values/" files do not end up in the apk except through resources.arsc. + if archive_path.startswith('values'): + return + source_path = os.path.normpath(source_path) + new_value = self._entries.setdefault(archive_path, source_path) + if new_value != source_path: + raise Exception('Duplicate AddMapping for "{}". old={} new={}'.format( + archive_path, new_value, source_path)) + + def RegisterRename(self, old_archive_path, new_archive_path): + """Records an archive_path rename. + + |old_archive_path| does not need to currently exist in the mappings. Renames + are buffered and replayed only when Write() is called. + """ + if not old_archive_path.startswith('values'): + self._renames.append((old_archive_path, new_archive_path)) + + def MergeInfoFile(self, info_file_path): + """Merges the mappings from |info_file_path| into this object. + + Any existing entries are overridden. + """ + assert not self._add_mapping_was_called + # Allows clobbering, which is used when overriding resources. + with open(info_file_path) as f: + self._entries.update(l.rstrip().split('\t') for l in f) + + def _ApplyRenames(self): + applied_renames = set() + ret = self._entries + for rename_tup in self._renames: + # Duplicate entries happen for resource overrides. + # Use a "seen" set to ensure we still error out if multiple renames + # happen for the same old_archive_path with different new_archive_paths. + if rename_tup in applied_renames: + continue + applied_renames.add(rename_tup) + old_archive_path, new_archive_path = rename_tup + ret[new_archive_path] = ret[old_archive_path] + del ret[old_archive_path] + + self._entries = None + self._renames = None + return ret + + def Write(self, info_file_path): + """Applies renames and writes out the file. + + No other methods may be called after this. + """ + entries = self._ApplyRenames() + lines = [] + for archive_path, source_path in entries.items(): + lines.append('{}\t{}\n'.format(archive_path, source_path)) + with open(info_file_path, 'w') as info_file: + info_file.writelines(sorted(lines)) + + +def _ParseTextSymbolsFile(path, fix_package_ids=False): + """Given an R.txt file, returns a list of _TextSymbolEntry. + + Args: + path: Input file path. + fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file + will be fixed to 0x7f. + Returns: + A list of _TextSymbolEntry instances. + Raises: + Exception: An unexpected line was detected in the input. + """ + ret = [] + with open(path) as f: + for line in f: + m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line) + if not m: + raise Exception('Unexpected line in R.txt: %s' % line) + java_type, resource_type, name, value = m.groups() + if fix_package_ids: + value = _FixPackageIds(value) + ret.append(_TextSymbolEntry(java_type, resource_type, name, value)) + return ret + + +def _FixPackageIds(resource_value): + # Resource IDs for resources belonging to regular APKs have their first byte + # as 0x7f (package id). However with webview, since it is not a regular apk + # but used as a shared library, aapt is passed the --shared-resources flag + # which changes some of the package ids to 0x00. This function normalises + # these (0x00) package ids to 0x7f, which the generated code in R.java changes + # to the correct package id at runtime. resource_value is a string with + # either, a single value '0x12345678', or an array of values like '{ + # 0xfedcba98, 0x01234567, 0x56789abc }' + return resource_value.replace('0x00', '0x7f') + + +def ResolveStyleableReferences(r_txt_path): + # Convert lines like: + # int[] styleable ViewBack { 0x010100d4, com.android.webview.R.attr.backTint } + # to: + # int[] styleable ViewBack { 0x010100d4, 0xREALVALUE } + entries = _ParseTextSymbolsFile(r_txt_path) + lookup_table = {(e.resource_type, e.name): e.value for e in entries} + + sb = [] + with open(r_txt_path, encoding='utf8') as f: + for l in f: + if l.startswith('int[] styleable'): + brace_start = l.index('{') + 2 + brace_end = l.index('}') - 1 + values = [x for x in l[brace_start:brace_end].split(', ') if x] + new_values = [] + for v in values: + try: + if not v.startswith('0x'): + resource_type, name = v.split('.')[-2:] + new_values.append(lookup_table[(resource_type, name)]) + else: + new_values.append(v) + except: + logging.warning('Failed line: %r %r', l, v) + raise + l = l[:brace_start] + ', '.join(new_values) + l[brace_end:] + sb.append(l) + + with open(r_txt_path, 'w', encoding='utf8') as f: + f.writelines(sb) + + +def _GetRTxtResourceNames(r_txt_path): + """Parse an R.txt file and extract the set of resource names from it.""" + return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)} + + +def GetRTxtStringResourceNames(r_txt_path): + """Parse an R.txt file and the list of its string resource names.""" + return sorted({ + entry.name + for entry in _ParseTextSymbolsFile(r_txt_path) + if entry.resource_type == 'string' + }) + + +def GenerateStringResourcesAllowList(module_r_txt_path, allowlist_r_txt_path): + """Generate a allowlist of string resource IDs. + + Args: + module_r_txt_path: Input base module R.txt path. + allowlist_r_txt_path: Input allowlist R.txt path. + Returns: + A dictionary mapping numerical resource IDs to the corresponding + string resource names. The ID values are taken from string resources in + |module_r_txt_path| that are also listed by name in |allowlist_r_txt_path|. + """ + allowlisted_names = { + entry.name + for entry in _ParseTextSymbolsFile(allowlist_r_txt_path) + if entry.resource_type == 'string' + } + return { + int(entry.value, 0): entry.name + for entry in _ParseTextSymbolsFile(module_r_txt_path) + if entry.resource_type == 'string' and entry.name in allowlisted_names + } + + +class RJavaBuildOptions: + """A class used to model the various ways to build an R.java file. + + This is used to control which resource ID variables will be final or + non-final, and whether an onResourcesLoaded() method will be generated + to adjust the non-final ones, when the corresponding library is loaded + at runtime. + + Note that by default, all resources are final, and there is no + method generated, which corresponds to calling ExportNoResources(). + """ + def __init__(self): + self.has_constant_ids = True + self.resources_allowlist = None + self.has_on_resources_loaded = False + self.export_const_styleable = False + self.final_package_id = None + self.fake_on_resources_loaded = False + + def ExportNoResources(self): + """Make all resource IDs final, and don't generate a method.""" + self.has_constant_ids = True + self.resources_allowlist = None + self.has_on_resources_loaded = False + self.export_const_styleable = False + + def ExportAllResources(self): + """Make all resource IDs non-final in the R.java file.""" + self.has_constant_ids = False + self.resources_allowlist = None + + def ExportSomeResources(self, r_txt_file_path): + """Only select specific resource IDs to be non-final. + + Args: + r_txt_file_path: The path to an R.txt file. All resources named + int it will be non-final in the generated R.java file, all others + will be final. + """ + self.has_constant_ids = True + self.resources_allowlist = _GetRTxtResourceNames(r_txt_file_path) + + def ExportAllStyleables(self): + """Make all styleable constants non-final, even non-resources ones. + + Resources that are styleable but not of int[] type are not actually + resource IDs but constants. By default they are always final. Call this + method to make them non-final anyway in the final R.java file. + """ + self.export_const_styleable = True + + def GenerateOnResourcesLoaded(self, fake=False): + """Generate an onResourcesLoaded() method. + + This Java method will be called at runtime by the framework when + the corresponding library (which includes the R.java source file) + will be loaded at runtime. This corresponds to the --shared-resources + or --app-as-shared-lib flags of 'aapt package'. + + if |fake|, then the method will be empty bodied to compile faster. This + useful for dummy R.java files that will eventually be replaced by real + ones. + """ + self.has_on_resources_loaded = True + self.fake_on_resources_loaded = fake + + def SetFinalPackageId(self, package_id): + """Sets a package ID to be used for resources marked final.""" + self.final_package_id = package_id + + def _MaybeRewriteRTxtPackageIds(self, r_txt_path): + """Rewrites package IDs in the R.txt file if necessary. + + If SetFinalPackageId() was called, some of the resource IDs may have had + their package ID changed. This function rewrites the R.txt file to match + those changes. + """ + if self.final_package_id is None: + return + + entries = _ParseTextSymbolsFile(r_txt_path) + with open(r_txt_path, 'w') as f: + for entry in entries: + value = entry.value + if self._IsResourceFinal(entry): + value = re.sub(r'0x(?:00|7f)', + '0x{:02x}'.format(self.final_package_id), value) + f.write('{} {} {} {}\n'.format(entry.java_type, entry.resource_type, + entry.name, value)) + + def _IsResourceFinal(self, entry): + """Determines whether a resource should be final or not. + + Args: + entry: A _TextSymbolEntry instance. + Returns: + True iff the corresponding entry should be final. + """ + if entry.resource_type == 'styleable' and entry.java_type != 'int[]': + # A styleable constant may be exported as non-final after all. + return not self.export_const_styleable + if not self.has_constant_ids: + # Every resource is non-final + return False + if not self.resources_allowlist: + # No allowlist means all IDs are non-final. + return True + # Otherwise, only those in the + return entry.name not in self.resources_allowlist + + +def CreateRJavaFiles(srcjar_dir, + package, + main_r_txt_file, + extra_res_packages, + rjava_build_options, + srcjar_out, + custom_root_package_name=None, + grandparent_custom_package_name=None, + ignore_mismatched_values=False): + """Create all R.java files for a set of packages and R.txt files. + + Args: + srcjar_dir: The top-level output directory for the generated files. + package: Package name for R java source files which will inherit + from the root R java file. + main_r_txt_file: The main R.txt file containing the valid values + of _all_ resource IDs. + extra_res_packages: A list of extra package names. + rjava_build_options: An RJavaBuildOptions instance that controls how + exactly the R.java file is generated. + srcjar_out: Path of desired output srcjar. + custom_root_package_name: Custom package name for module root R.java file, + (eg. vr for gen.vr package). + grandparent_custom_package_name: Custom root package name for the root + R.java file to inherit from. DFM root R.java files will have "base" + as the grandparent_custom_package_name. The format of this package name + is identical to custom_root_package_name. + (eg. for vr grandparent_custom_package_name would be "base") + ignore_mismatched_values: If True, ignores if a resource appears multiple + times with different entry values (useful when all the values are + dummy anyways). + Raises: + Exception if a package name appears several times in |extra_res_packages| + """ + rjava_build_options._MaybeRewriteRTxtPackageIds(main_r_txt_file) + + packages = list(extra_res_packages) + + if package and package not in packages: + # Sometimes, an apk target and a resources target share the same + # AndroidManifest.xml and thus |package| will already be in |packages|. + packages.append(package) + + # Map of (resource_type, name) -> Entry. + # Contains the correct values for resources. + all_resources = {} + all_resources_by_type = collections.defaultdict(list) + + main_r_text_files = [main_r_txt_file] + for r_txt_file in main_r_text_files: + for entry in _ParseTextSymbolsFile(r_txt_file, fix_package_ids=True): + entry_key = (entry.resource_type, entry.name) + if entry_key in all_resources: + if not ignore_mismatched_values: + assert entry == all_resources[entry_key], ( + 'Input R.txt %s provided a duplicate resource with a different ' + 'entry value. Got %s, expected %s.' % + (r_txt_file, entry, all_resources[entry_key])) + else: + all_resources[entry_key] = entry + all_resources_by_type[entry.resource_type].append(entry) + assert entry.resource_type in ALL_RESOURCE_TYPES, ( + 'Unknown resource type: %s, add to ALL_RESOURCE_TYPES!' % + entry.resource_type) + + if custom_root_package_name: + # Custom package name is available, thus use it for root_r_java_package. + root_r_java_package = GetCustomPackagePath(custom_root_package_name) + else: + # Create a unique name using srcjar_out. Underscores are added to ensure + # no reserved keywords are used for directory names. + root_r_java_package = re.sub('[^\w\.]', '', srcjar_out.replace('/', '._')) + + root_r_java_dir = os.path.join(srcjar_dir, *root_r_java_package.split('.')) + build_utils.MakeDirectory(root_r_java_dir) + root_r_java_path = os.path.join(root_r_java_dir, 'R.java') + root_java_file_contents = _RenderRootRJavaSource( + root_r_java_package, all_resources_by_type, rjava_build_options, + grandparent_custom_package_name) + with open(root_r_java_path, 'w') as f: + f.write(root_java_file_contents) + + for p in packages: + _CreateRJavaSourceFile(srcjar_dir, p, root_r_java_package, + rjava_build_options) + + +def _CreateRJavaSourceFile(srcjar_dir, package, root_r_java_package, + rjava_build_options): + """Generates an R.java source file.""" + package_r_java_dir = os.path.join(srcjar_dir, *package.split('.')) + build_utils.MakeDirectory(package_r_java_dir) + package_r_java_path = os.path.join(package_r_java_dir, 'R.java') + java_file_contents = _RenderRJavaSource(package, root_r_java_package, + rjava_build_options) + with open(package_r_java_path, 'w') as f: + f.write(java_file_contents) + + +# Resource IDs inside resource arrays are sorted. Application resource IDs start +# with 0x7f but system resource IDs start with 0x01 thus system resource ids are +# always at the start of the array. This function finds the index of the first +# non system resource id to be used for package ID rewriting (we should not +# rewrite system resource ids). +def _GetNonSystemIndex(entry): + """Get the index of the first application resource ID within a resource + array.""" + res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value) + for i, res_id in enumerate(res_ids): + if res_id.startswith('0x7f'): + return i + return len(res_ids) + + +def _RenderRJavaSource(package, root_r_java_package, rjava_build_options): + """Generates the contents of a R.java file.""" + template = Template( + """/* AUTO-GENERATED FILE. DO NOT MODIFY. */ + +package {{ package }}; + +public final class R { + {% for resource_type in resource_types %} + public static final class {{ resource_type }} extends + {{ root_package }}.R.{{ resource_type }} {} + {% endfor %} + {% if has_on_resources_loaded %} + public static void onResourcesLoaded(int packageId) { + {{ root_package }}.R.onResourcesLoaded(packageId); + } + {% endif %} +} +""", + trim_blocks=True, + lstrip_blocks=True) + + return template.render( + package=package, + resource_types=sorted(ALL_RESOURCE_TYPES), + root_package=root_r_java_package, + has_on_resources_loaded=rjava_build_options.has_on_resources_loaded) + + +def GetCustomPackagePath(package_name): + return 'gen.' + package_name + '_module' + + +def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options, + grandparent_custom_package_name): + """Render an R.java source file. See _CreateRJaveSourceFile for args info.""" + final_resources_by_type = collections.defaultdict(list) + non_final_resources_by_type = collections.defaultdict(list) + for res_type, resources in all_resources_by_type.items(): + for entry in resources: + # Entries in stylable that are not int[] are not actually resource ids + # but constants. + if rjava_build_options._IsResourceFinal(entry): + final_resources_by_type[res_type].append(entry) + else: + non_final_resources_by_type[res_type].append(entry) + + # Here we diverge from what aapt does. Because we have so many + # resources, the onResourcesLoaded method was exceeding the 64KB limit that + # Java imposes. For this reason we split onResourcesLoaded into different + # methods for each resource type. + extends_string = '' + dep_path = '' + if grandparent_custom_package_name: + extends_string = 'extends {{ parent_path }}.R.{{ resource_type }} ' + dep_path = GetCustomPackagePath(grandparent_custom_package_name) + + # Don't actually mark fields as "final" or else R8 complain when aapt2 uses + # --proguard-conditional-keep-rules. E.g.: + # Rule precondition matches static final fields javac has inlined. + # Such rules are unsound as the shrinker cannot infer the inlining precisely. + template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ + +package {{ package }}; + +public final class R { + {% for resource_type in resource_types %} + public static class {{ resource_type }} """ + extends_string + """ { + {% for e in final_resources[resource_type] %} + public static {{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% endfor %} + {% for e in non_final_resources[resource_type] %} + {% if e.value != '0' %} + public static {{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% else %} + public static {{ e.java_type }} {{ e.name }}; + {% endif %} + {% endfor %} + } + {% endfor %} + {% if has_on_resources_loaded %} + {% if fake_on_resources_loaded %} + public static void onResourcesLoaded(int packageId) { + } + {% else %} + private static boolean sResourcesDidLoad; + + private static void patchArray( + int[] arr, int startIndex, int packageIdTransform) { + for (int i = startIndex; i < arr.length; ++i) { + arr[i] ^= packageIdTransform; + } + } + + public static void onResourcesLoaded(int packageId) { + if (sResourcesDidLoad) { + return; + } + sResourcesDidLoad = true; + int packageIdTransform = (packageId ^ 0x7f) << 24; + {# aapt2 makes int[] resources refer to other resources by reference + rather than by value. Thus, need to transform the int[] resources + first, before the referenced resources are transformed in order to + ensure the transform applies exactly once. + See https://crbug.com/1237059 for context. + #} + {% for resource_type in resource_types %} + {% for e in non_final_resources[resource_type] %} + {% if e.java_type == 'int[]' %} + patchArray({{ e.resource_type }}.{{ e.name }}, {{ startIndex(e) }}, \ +packageIdTransform); + {% endif %} + {% endfor %} + {% endfor %} + {% for resource_type in resource_types %} + onResourcesLoaded{{ resource_type|title }}(packageIdTransform); + {% endfor %} + } + {% for res_type in resource_types %} + private static void onResourcesLoaded{{ res_type|title }} ( + int packageIdTransform) { + {% for e in non_final_resources[res_type] %} + {% if res_type != 'styleable' and e.java_type != 'int[]' %} + {{ e.resource_type }}.{{ e.name }} ^= packageIdTransform; + {% endif %} + {% endfor %} + } + {% endfor %} + {% endif %} + {% endif %} +} +""", + trim_blocks=True, + lstrip_blocks=True) + return template.render( + package=package, + resource_types=sorted(ALL_RESOURCE_TYPES), + has_on_resources_loaded=rjava_build_options.has_on_resources_loaded, + fake_on_resources_loaded=rjava_build_options.fake_on_resources_loaded, + final_resources=final_resources_by_type, + non_final_resources=non_final_resources_by_type, + startIndex=_GetNonSystemIndex, + parent_path=dep_path) + + +def ExtractBinaryManifestValues(aapt2_path, apk_path): + """Returns (version_code, version_name, package_name) for the given apk.""" + output = subprocess.check_output([ + aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml' + ]).decode('utf-8') + version_code = re.search(r'versionCode.*?=(\d*)', output).group(1) + version_name = re.search(r'versionName.*?="(.*?)"', output).group(1) + package_name = re.search(r'package.*?="(.*?)"', output).group(1) + return version_code, version_name, package_name + + +def ExtractArscPackage(aapt2_path, apk_path): + """Returns (package_name, package_id) of resources.arsc from apk_path. + + When the apk does not have any entries in its resources file, in recent aapt2 + versions it will not contain a "Package" line. The package is not even in the + actual resources.arsc/resources.pb file (which itself is mostly empty). Thus + return (None, None) when dump succeeds and there are no errors to indicate + that the package name does not exist in the resources file. + """ + proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + for line in proc.stdout: + line = line.decode('utf-8') + # Package name=org.chromium.webview_shell id=7f + if line.startswith('Package'): + proc.kill() + parts = line.split() + package_name = parts[1].split('=')[1] + package_id = parts[2][3:] + return package_name, int(package_id, 16) + + # aapt2 currently crashes when dumping webview resources, but not until after + # it prints the "Package" line (b/130553900). + stderr_output = proc.stderr.read().decode('utf-8') + if stderr_output: + sys.stderr.write(stderr_output) + raise Exception('Failed to find arsc package name') + return None, None + + +def _RenameSubdirsWithPrefix(dir_path, prefix): + subdirs = [ + d for d in os.listdir(dir_path) + if os.path.isdir(os.path.join(dir_path, d)) + ] + renamed_subdirs = [] + for d in subdirs: + old_path = os.path.join(dir_path, d) + new_path = os.path.join(dir_path, '{}_{}'.format(prefix, d)) + renamed_subdirs.append(new_path) + os.rename(old_path, new_path) + return renamed_subdirs + + +def _HasMultipleResDirs(zip_path): + """Checks for magic comment set by prepare_resources.py + + Returns: True iff the zipfile has the magic comment that means it contains + multiple res/ dirs inside instead of just contents of a single res/ dir + (without a wrapping res/). + """ + with zipfile.ZipFile(zip_path) as z: + return z.comment == MULTIPLE_RES_MAGIC_STRING + + +def ExtractDeps(dep_zips, deps_dir): + """Extract a list of resource dependency zip files. + + Args: + dep_zips: A list of zip file paths, each one will be extracted to + a subdirectory of |deps_dir|, named after the zip file's path (e.g. + '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/'). + deps_dir: Top-level extraction directory. + Returns: + The list of all sub-directory paths, relative to |deps_dir|. + Raises: + Exception: If a sub-directory already exists with the same name before + extraction. + """ + dep_subdirs = [] + for z in dep_zips: + subdirname = z.replace(os.path.sep, '_') + subdir = os.path.join(deps_dir, subdirname) + if os.path.exists(subdir): + raise Exception('Resource zip name conflict: ' + subdirname) + build_utils.ExtractAll(z, path=subdir) + if _HasMultipleResDirs(z): + # basename of the directory is used to create a zip during resource + # compilation, include the path in the basename to help blame errors on + # the correct target. For example directory 0_res may be renamed + # chrome_android_chrome_app_java_resources_0_res pointing to the name and + # path of the android_resources target from whence it came. + subdir_subdirs = _RenameSubdirsWithPrefix(subdir, subdirname) + dep_subdirs.extend(subdir_subdirs) + else: + dep_subdirs.append(subdir) + return dep_subdirs + + +class _ResourceBuildContext: + """A temporary directory for packaging and compiling Android resources. + + Args: + temp_dir: Optional root build directory path. If None, a temporary + directory will be created, and removed in Close(). + """ + + def __init__(self, temp_dir=None, keep_files=False): + """Initialized the context.""" + # The top-level temporary directory. + if temp_dir: + self.temp_dir = temp_dir + os.makedirs(temp_dir) + else: + self.temp_dir = tempfile.mkdtemp() + self.remove_on_exit = not keep_files + + # A location to store resources extracted form dependency zip files. + self.deps_dir = os.path.join(self.temp_dir, 'deps') + os.mkdir(self.deps_dir) + # A location to place aapt-generated files. + self.gen_dir = os.path.join(self.temp_dir, 'gen') + os.mkdir(self.gen_dir) + # A location to place generated R.java files. + self.srcjar_dir = os.path.join(self.temp_dir, 'java') + os.mkdir(self.srcjar_dir) + # Temporary file locacations. + self.r_txt_path = os.path.join(self.gen_dir, 'R.txt') + self.srcjar_path = os.path.join(self.temp_dir, 'R.srcjar') + self.info_path = os.path.join(self.temp_dir, 'size.info') + self.stable_ids_path = os.path.join(self.temp_dir, 'in_ids.txt') + self.emit_ids_path = os.path.join(self.temp_dir, 'out_ids.txt') + self.proguard_path = os.path.join(self.temp_dir, 'keeps.flags') + self.proguard_main_dex_path = os.path.join(self.temp_dir, 'maindex.flags') + self.arsc_path = os.path.join(self.temp_dir, 'out.ap_') + self.proto_path = os.path.join(self.temp_dir, 'out.proto.ap_') + self.optimized_arsc_path = os.path.join(self.temp_dir, 'out.opt.ap_') + self.optimized_proto_path = os.path.join(self.temp_dir, 'out.opt.proto.ap_') + + def Close(self): + """Close the context and destroy all temporary files.""" + if self.remove_on_exit: + shutil.rmtree(self.temp_dir) + + +@contextlib.contextmanager +def BuildContext(temp_dir=None, keep_files=False): + """Generator for a _ResourceBuildContext instance.""" + context = None + try: + context = _ResourceBuildContext(temp_dir, keep_files) + yield context + finally: + if context: + context.Close() + + +def ParseAndroidResourceStringsFromXml(xml_data): + """Parse and Android xml resource file and extract strings from it. + + Args: + xml_data: XML file data. + Returns: + A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8 + encoded value, and |namespaces| is a dictionary mapping prefixes to URLs + corresponding to namespaces declared in the element. + """ + # NOTE: This uses regular expression matching because parsing with something + # like ElementTree makes it tedious to properly parse some of the structured + # text found in string resources, e.g.: + # \ + # "Condividi tramite %s"\ + # + result = {} + + # Find start tag and extract namespaces from it. + m = re.search(']*)>', xml_data, re.MULTILINE) + if not m: + raise Exception(' start tag expected: ' + xml_data) + input_data = xml_data[m.end():] + resource_attrs = m.group(1) + re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")') + namespaces = {} + while resource_attrs: + m = re_namespace.match(resource_attrs) + if not m: + break + namespaces[m.group(2)] = m.group(3) + resource_attrs = resource_attrs[m.end(1):] + + # Find each string element now. + re_string_element_start = re.compile(']* )?name="([^">]+)"[^>]*>') + re_string_element_end = re.compile('') + while input_data: + m = re_string_element_start.search(input_data) + if not m: + break + name = m.group(2) + input_data = input_data[m.end():] + m2 = re_string_element_end.search(input_data) + if not m2: + raise Exception('Expected closing string tag: ' + input_data) + text = input_data[:m2.start()] + input_data = input_data[m2.end():] + if len(text) != 0 and text[0] == '"' and text[-1] == '"': + text = text[1:-1] + result[name] = text + + return result, namespaces + + +def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None): + """Generate an XML text corresponding to an Android resource strings map. + + Args: + names_to_text: A dictionary mapping resource names to localized + text (encoded as UTF-8). + namespaces: A map of namespace prefix to URL. + Returns: + New non-Unicode string containing an XML data structure describing the + input as an Android resource .xml file. + """ + result = '\n' + result += ' element should be kept in the file. + + Args: + xml_file_path: Android resource strings xml file path. + string_predicate: A predicate function which will receive the string name + and shal + """ + with open(xml_file_path) as f: + xml_data = f.read() + strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data) + + string_deletion = False + for name in list(strings_map.keys()): + if not string_predicate(name): + del strings_map[name] + string_deletion = True + + if string_deletion: + new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces) + with open(xml_file_path, 'wb') as f: + f.write(new_xml_data) diff --git a/android/gyp/util/resource_utils_test.py b/android/gyp/util/resource_utils_test.py new file mode 100755 index 000000000000..4b31e9257866 --- /dev/null +++ b/android/gyp/util/resource_utils_test.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python3 +# coding: utf-8 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import os +import sys +import unittest + +sys.path.insert( + 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from util import build_utils + +# Required because the following import needs build/android/gyp in the +# Python path to import util.build_utils. +_BUILD_ANDROID_GYP_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir)) +sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT) + +import resource_utils + +# pylint: disable=line-too-long + +_TEST_XML_INPUT_1 = ''' + +"Lõikelauale kopeerimine ebaõnnestus" +"Eelmist toimingut ei saa vähese mälu tõttu lõpetada" +"Valit. faili avamine ebaõnnestus" +"This is %s" + +''' + +_TEST_XML_OUTPUT_2 = ''' + +"Eelmist toimingut ei saa vähese mälu tõttu lõpetada" +"This is %s" + +''' + +# pylint: enable=line-too-long + +_TEST_XML_OUTPUT_EMPTY = ''' + + + +''' + +_TEST_RESOURCES_MAP_1 = { + 'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada', + 'opening_file_error': 'Valit. faili avamine ebaõnnestus', + 'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus', + 'structured_text': 'This is %s', +} + +_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'} + +_TEST_RESOURCES_ALLOWLIST_1 = ['low_memory_error', 'structured_text'] + +# Extracted from one generated Chromium R.txt file, with string resource +# names shuffled randomly. +_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000 +int anim abc_fade_out 0x7f050001 +int anim abc_grow_fade_in_from_bottom 0x7f050002 +int array DefaultCookiesSettingEntries 0x7f120002 +int array DefaultCookiesSettingValues 0x7f120003 +int array DefaultGeolocationSettingEntries 0x7f120004 +int attr actionBarDivider 0x7f0100e7 +int attr actionBarStyle 0x7f0100e2 +int string AllowedDomainsForAppsDesc 0x7f0c0105 +int string AlternateErrorPagesEnabledDesc 0x7f0c0107 +int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109 +int string AllowedDomainsForAppsTitle 0x7f0c0104 +int string AlternateErrorPagesEnabledTitle 0x7f0c0106 +int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba } +int styleable SnackbarLayout_android_maxWidth 0 +int styleable SnackbarLayout_elevation 2 +''' + +# Test allowlist R.txt file. Note that AlternateErrorPagesEnabledTitle is +# listed as an 'anim' and should thus be skipped. Similarly the string +# 'ThisStringDoesNotAppear' should not be in the final result. +_TEST_ALLOWLIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee +int string AllowedDomainsForAppsDesc 0x7f0c0105 +int string AlternateErrorPagesEnabledDesc 0x7f0c0107 +int string ThisStringDoesNotAppear 0x7f0fffff +''' + +_TEST_R_TEXT_RESOURCES_IDS = { + 0x7f0c0105: 'AllowedDomainsForAppsDesc', + 0x7f0c0107: 'AlternateErrorPagesEnabledDesc', +} + +# Names of string resources in _TEST_R_TXT, should be sorted! +_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([ + 'AllowedDomainsForAppsDesc', + 'AllowedDomainsForAppsTitle', + 'AlternateErrorPagesEnabledDesc', + 'AlternateErrorPagesEnabledTitle', + 'AuthAndroidNegotiateAccountTypeDesc', +]) + + +def _CreateTestFile(tmp_dir, file_name, file_data): + file_path = os.path.join(tmp_dir, file_name) + with open(file_path, 'wt') as f: + f.write(file_data) + return file_path + + + +class ResourceUtilsTest(unittest.TestCase): + + def test_GetRTxtStringResourceNames(self): + with build_utils.TempDir() as tmp_dir: + tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT) + self.assertListEqual( + resource_utils.GetRTxtStringResourceNames(tmp_file), + _TEST_R_TXT_STRING_RESOURCE_NAMES) + + def test_GenerateStringResourcesAllowList(self): + with build_utils.TempDir() as tmp_dir: + tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT) + tmp_allowlist_rtxt_file = _CreateTestFile(tmp_dir, "test_allowlist_R.txt", + _TEST_ALLOWLIST_R_TXT) + self.assertDictEqual( + resource_utils.GenerateStringResourcesAllowList( + tmp_module_rtxt_file, tmp_allowlist_rtxt_file), + _TEST_R_TEXT_RESOURCES_IDS) + + def test_IsAndroidLocaleQualifier(self): + good_locales = [ + 'en', + 'en-rUS', + 'fil', + 'fil-rPH', + 'iw', + 'iw-rIL', + 'b+en', + 'b+en+US', + 'b+ja+Latn', + 'b+ja+JP+Latn', + 'b+cmn+Hant-TW', + ] + bad_locales = [ + 'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+' + ] + for locale in good_locales: + self.assertTrue( + resource_utils.IsAndroidLocaleQualifier(locale), + msg="'%s' should be a good locale!" % locale) + + for locale in bad_locales: + self.assertFalse( + resource_utils.IsAndroidLocaleQualifier(locale), + msg="'%s' should be a bad locale!" % locale) + + def test_ToAndroidLocaleName(self): + _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = { + 'en': 'en', + 'en-US': 'en-rUS', + 'en-FOO': 'en-rFOO', + 'fil': 'tl', + 'tl': 'tl', + 'he': 'iw', + 'he-IL': 'iw-rIL', + 'id': 'in', + 'id-BAR': 'in-rBAR', + 'nb': 'nb', + 'yi': 'ji' + } + for chromium_locale, android_locale in \ + _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.items(): + result = resource_utils.ToAndroidLocaleName(chromium_locale) + self.assertEqual(result, android_locale) + + def test_ToChromiumLocaleName(self): + _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = { + 'foo': 'foo', + 'foo-rBAR': 'foo-BAR', + 'b+lll': 'lll', + 'b+ll+Extra': 'll', + 'b+ll+RR': 'll-RR', + 'b+lll+RR+Extra': 'lll-RR', + 'b+ll+RRR+Extra': 'll-RRR', + 'b+ll+Ssss': 'll-Ssss', + 'b+ll+Ssss+Extra': 'll-Ssss', + 'b+ll+Ssss+RR': 'll-Ssss-RR', + 'b+ll+Ssss+RRR': 'll-Ssss-RRR', + 'b+ll+Ssss+RRR+Extra': 'll-Ssss-RRR', + 'b+ll+Whatever': 'll', + 'en': 'en', + 'en-rUS': 'en-US', + 'en-US': None, + 'en-FOO': None, + 'en-rFOO': 'en-FOO', + 'es-rES': 'es-ES', + 'es-rUS': 'es-419', + 'tl': 'fil', + 'fil': 'fil', + 'iw': 'he', + 'iw-rIL': 'he-IL', + 'b+iw+IL': 'he-IL', + 'in': 'id', + 'in-rBAR': 'id-BAR', + 'id-rBAR': 'id-BAR', + 'nb': 'nb', + 'no': 'nb', # http://crbug.com/920960 + } + for android_locale, chromium_locale in \ + _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.items(): + result = resource_utils.ToChromiumLocaleName(android_locale) + self.assertEqual(result, chromium_locale) + + def test_FindLocaleInStringResourceFilePath(self): + self.assertEqual( + None, + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values/whatever.xml')) + self.assertEqual( + 'foo', + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values-foo/whatever.xml')) + self.assertEqual( + 'foo-rBAR', + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values-foo-rBAR/whatever.xml')) + self.assertEqual( + None, + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values-foo/ignore-subdirs/whatever.xml')) + + def test_ParseAndroidResourceStringsFromXml(self): + ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml( + _TEST_XML_INPUT_1) + self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1) + self.assertDictEqual(namespaces, _TEST_NAMESPACES_1) + + def test_GenerateAndroidResourceStringsXml(self): + # Fist, an empty strings map, with no namespaces + result = resource_utils.GenerateAndroidResourceStringsXml({}) + self.assertEqual(result.decode('utf8'), _TEST_XML_OUTPUT_EMPTY) + + result = resource_utils.GenerateAndroidResourceStringsXml( + _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1) + self.assertEqual(result.decode('utf8'), _TEST_XML_INPUT_1) + + @staticmethod + def _CreateTestResourceFile(output_dir, locale, string_map, namespaces): + values_dir = os.path.join(output_dir, 'values-' + locale) + build_utils.MakeDirectory(values_dir) + file_path = os.path.join(values_dir, 'strings.xml') + with open(file_path, 'wb') as f: + file_data = resource_utils.GenerateAndroidResourceStringsXml( + string_map, namespaces) + f.write(file_data) + return file_path + + def _CheckTestResourceFile(self, file_path, expected_data): + with open(file_path) as f: + file_data = f.read() + self.assertEqual(file_data, expected_data) + + def test_FilterAndroidResourceStringsXml(self): + with build_utils.TempDir() as tmp_path: + test_file = self._CreateTestResourceFile( + tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1) + resource_utils.FilterAndroidResourceStringsXml( + test_file, lambda x: x in _TEST_RESOURCES_ALLOWLIST_1) + self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/gyp/util/resources_parser.py b/android/gyp/util/resources_parser.py new file mode 100644 index 000000000000..86d85407d3b4 --- /dev/null +++ b/android/gyp/util/resources_parser.py @@ -0,0 +1,155 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import os +import re +from xml.etree import ElementTree + +from util import build_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. + +_TextSymbolEntry = collections.namedtuple( + 'RTextEntry', ('java_type', 'resource_type', 'name', 'value')) + +_DUMMY_RTXT_ID = '0x7f010001' +_DUMMY_RTXT_INDEX = '1' + + +def _ResourceNameToJavaSymbol(resource_name): + return re.sub('[\.:]', '_', resource_name) + + +class RTxtGenerator: + def __init__(self, + res_dirs, + ignore_pattern=resource_utils.AAPT_IGNORE_PATTERN): + self.res_dirs = res_dirs + self.ignore_pattern = ignore_pattern + + def _ParseDeclareStyleable(self, node): + ret = set() + stylable_name = _ResourceNameToJavaSymbol(node.attrib['name']) + ret.add( + _TextSymbolEntry('int[]', 'styleable', stylable_name, + '{{{}}}'.format(_DUMMY_RTXT_ID))) + for child in node: + if child.tag == 'eat-comment': + continue + if child.tag != 'attr': + # This parser expects everything inside to be either + # an attr or an eat-comment. If new resource xml files are added that do + # not conform to this, this parser needs updating. + raise Exception('Unexpected tag {} inside '.format( + child.tag)) + entry_name = '{}_{}'.format( + stylable_name, _ResourceNameToJavaSymbol(child.attrib['name'])) + ret.add( + _TextSymbolEntry('int', 'styleable', entry_name, _DUMMY_RTXT_INDEX)) + if not child.attrib['name'].startswith('android:'): + resource_name = _ResourceNameToJavaSymbol(child.attrib['name']) + ret.add(_TextSymbolEntry('int', 'attr', resource_name, _DUMMY_RTXT_ID)) + for entry in child: + if entry.tag not in ('enum', 'flag'): + # This parser expects everything inside to be either an + # or an . If new resource xml files are added that do + # not conform to this, this parser needs updating. + raise Exception('Unexpected tag {} inside '.format(entry.tag)) + resource_name = _ResourceNameToJavaSymbol(entry.attrib['name']) + ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID)) + return ret + + def _ExtractNewIdsFromNode(self, node): + ret = set() + # Sometimes there are @+id/ in random attributes (not just in android:id) + # and apparently that is valid. See: + # https://developer.android.com/reference/android/widget/RelativeLayout.LayoutParams.html + for value in node.attrib.values(): + if value.startswith('@+id/'): + resource_name = value[5:] + ret.add(_TextSymbolEntry('int', 'id', resource_name, _DUMMY_RTXT_ID)) + for child in node: + ret.update(self._ExtractNewIdsFromNode(child)) + return ret + + def _ParseXml(self, xml_path): + try: + return ElementTree.parse(xml_path).getroot() + except Exception as e: + raise RuntimeError('Failure parsing {}:\n'.format(xml_path)) from e + + def _ExtractNewIdsFromXml(self, xml_path): + return self._ExtractNewIdsFromNode(self._ParseXml(xml_path)) + + def _ParseValuesXml(self, xml_path): + ret = set() + root = self._ParseXml(xml_path) + + assert root.tag == 'resources' + for child in root: + if child.tag == 'eat-comment': + # eat-comment is just a dummy documentation element. + continue + if child.tag == 'skip': + # skip is just a dummy element. + continue + if child.tag == 'declare-styleable': + ret.update(self._ParseDeclareStyleable(child)) + else: + if child.tag in ('item', 'public'): + resource_type = child.attrib['type'] + elif child.tag in ('array', 'integer-array', 'string-array'): + resource_type = 'array' + else: + resource_type = child.tag + parsed_element = ElementTree.tostring(child, encoding='unicode').strip() + assert resource_type in resource_utils.ALL_RESOURCE_TYPES, ( + f'Infered resource type ({resource_type}) from xml entry ' + f'({parsed_element}) (found in {xml_path}) is not listed in ' + 'resource_utils.ALL_RESOURCE_TYPES. Teach resources_parser.py how ' + 'to parse this entry and/or add to the list.') + name = _ResourceNameToJavaSymbol(child.attrib['name']) + ret.add(_TextSymbolEntry('int', resource_type, name, _DUMMY_RTXT_ID)) + return ret + + def _CollectResourcesListFromDirectory(self, res_dir): + ret = set() + globs = resource_utils._GenerateGlobs(self.ignore_pattern) + for root, _, files in os.walk(res_dir): + resource_type = os.path.basename(root) + if '-' in resource_type: + resource_type = resource_type[:resource_type.index('-')] + for f in files: + if build_utils.MatchesGlob(f, globs): + continue + if resource_type == 'values': + ret.update(self._ParseValuesXml(os.path.join(root, f))) + else: + if '.' in f: + resource_name = f[:f.index('.')] + else: + resource_name = f + ret.add( + _TextSymbolEntry('int', resource_type, resource_name, + _DUMMY_RTXT_ID)) + # Other types not just layouts can contain new ids (eg: Menus and + # Drawables). Just in case, look for new ids in all files. + if f.endswith('.xml'): + ret.update(self._ExtractNewIdsFromXml(os.path.join(root, f))) + return ret + + def _CollectResourcesListFromDirectories(self): + ret = set() + for res_dir in self.res_dirs: + ret.update(self._CollectResourcesListFromDirectory(res_dir)) + return sorted(ret) + + def WriteRTxtFile(self, rtxt_path): + resources = self._CollectResourcesListFromDirectories() + with action_helpers.atomic_output(rtxt_path, mode='w') as f: + for resource in resources: + line = '{0.java_type} {0.resource_type} {0.name} {0.value}\n'.format( + resource) + f.write(line) diff --git a/android/gyp/util/server_utils.py b/android/gyp/util/server_utils.py new file mode 100644 index 000000000000..b634cf978ed3 --- /dev/null +++ b/android/gyp/util/server_utils.py @@ -0,0 +1,47 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import contextlib +import json +import os +import socket + +# Use a unix abstract domain socket: +# https://man7.org/linux/man-pages/man7/unix.7.html#:~:text=abstract: +SOCKET_ADDRESS = '\0chromium_build_server_socket' +BUILD_SERVER_ENV_VARIABLE = 'INVOKED_BY_BUILD_SERVER' + + +def MaybeRunCommand(name, argv, stamp_file, force): + """Returns True if the command was successfully sent to the build server.""" + + # When the build server runs a command, it sets this environment variable. + # This prevents infinite recursion where the script sends a request to the + # build server, then the build server runs the script, and then the script + # sends another request to the build server. + if BUILD_SERVER_ENV_VARIABLE in os.environ: + return False + with contextlib.closing(socket.socket(socket.AF_UNIX)) as sock: + try: + sock.connect(SOCKET_ADDRESS) + sock.sendall( + json.dumps({ + 'name': name, + 'cmd': argv, + 'cwd': os.getcwd(), + 'stamp_file': stamp_file, + }).encode('utf8')) + except socket.error as e: + # [Errno 111] Connection refused. Either the server has not been started + # or the server is not currently accepting new connections. + if e.errno == 111: + if force: + raise RuntimeError( + '\n\nBuild server is not running and ' + 'android_static_analysis="build_server" is set.\nPlease run ' + 'this command in a separate terminal:\n\n' + '$ build/android/fast_local_dev_server.py\n\n') from None + return False + raise e + return True diff --git a/android/gyp/validate_inputs.py b/android/gyp/validate_inputs.py new file mode 100755 index 000000000000..e6435d600828 --- /dev/null +++ b/android/gyp/validate_inputs.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Ensures inputs exist and writes a stamp file.""" + +import argparse +import pathlib +import sys + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--stamp', help='Path to touch on success.') + parser.add_argument('inputs', nargs='+', help='Files to check.') + + args = parser.parse_args() + + for path in args.inputs: + path_obj = pathlib.Path(path) + if not path_obj.is_file(): + if not path_obj.exists(): + sys.stderr.write(f'File not found: {path}\n') + else: + sys.stderr.write(f'Not a file: {path}\n') + sys.exit(1) + + if args.stamp: + pathlib.Path(args.stamp).touch() + + +if __name__ == '__main__': + main() diff --git a/android/gyp/validate_static_library_dex_references.py b/android/gyp/validate_static_library_dex_references.py new file mode 100755 index 000000000000..419776e163be --- /dev/null +++ b/android/gyp/validate_static_library_dex_references.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import re +import sys +import zipfile + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from pylib.dex import dex_parser +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + +_FLAGS_PATH = ( + '//chrome/android/java/static_library_dex_reference_workarounds.flags') + + +def _FindIllegalStaticLibraryReferences(static_lib_dex_files, + main_apk_dex_files): + main_apk_defined_types = set() + for dex_file in main_apk_dex_files: + for class_def_item in dex_file.class_def_item_list: + main_apk_defined_types.add( + dex_file.GetTypeString(class_def_item.class_idx)) + + static_lib_referenced_types = set() + for dex_file in static_lib_dex_files: + for type_item in dex_file.type_item_list: + static_lib_referenced_types.add( + dex_file.GetString(type_item.descriptor_idx)) + + return main_apk_defined_types.intersection(static_lib_referenced_types) + + +def _DexFilesFromPath(path): + if zipfile.is_zipfile(path): + with zipfile.ZipFile(path) as z: + return [ + dex_parser.DexFile(bytearray(z.read(name))) for name in z.namelist() + if re.match(r'.*classes[0-9]*\.dex$', name) + ] + else: + with open(path) as f: + return dex_parser.DexFile(bytearray(f.read())) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument( + '--stamp', required=True, help='Path to file to touch upon success.') + parser.add_argument( + '--static-library-dex', + required=True, + help='classes.dex or classes.zip for the static library APK that was ' + 'proguarded with other dependent APKs') + parser.add_argument( + '--static-library-dependent-dex', + required=True, + action='append', + dest='static_library_dependent_dexes', + help='classes.dex or classes.zip for the APKs that use the static ' + 'library APK') + args = parser.parse_args(args) + + static_library_dexfiles = _DexFilesFromPath(args.static_library_dex) + for path in args.static_library_dependent_dexes: + dependent_dexfiles = _DexFilesFromPath(path) + illegal_references = _FindIllegalStaticLibraryReferences( + static_library_dexfiles, dependent_dexfiles) + + if illegal_references: + msg = 'Found illegal references from {} to {}\n'.format( + args.static_library_dex, path) + msg += 'Add a -keep rule to avoid this. ' + msg += 'See {} for an example and why this is necessary.\n'.format( + _FLAGS_PATH) + msg += 'The illegal references are:\n' + msg += '\n'.join(illegal_references) + sys.stderr.write(msg) + sys.exit(1) + + input_paths = [args.static_library_dex] + args.static_library_dependent_dexes + build_utils.Touch(args.stamp) + action_helpers.write_depfile(args.depfile, args.stamp, inputs=input_paths) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/validate_static_library_dex_references.pydeps b/android/gyp/validate_static_library_dex_references.pydeps new file mode 100644 index 000000000000..7fd91c201e4c --- /dev/null +++ b/android/gyp/validate_static_library_dex_references.pydeps @@ -0,0 +1,10 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/validate_static_library_dex_references.pydeps build/android/gyp/validate_static_library_dex_references.py +../../action_helpers.py +../../gn_helpers.py +../pylib/__init__.py +../pylib/dex/__init__.py +../pylib/dex/dex_parser.py +util/__init__.py +util/build_utils.py +validate_static_library_dex_references.py diff --git a/android/gyp/write_build_config.py b/android/gyp/write_build_config.py new file mode 100755 index 000000000000..7976dd89cc75 --- /dev/null +++ b/android/gyp/write_build_config.py @@ -0,0 +1,2215 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes a build_config file. + +The build_config file for a target is a json file containing information about +how to build that target based on the target's dependencies. This includes +things like: the javac classpath, the list of android resources dependencies, +etc. It also includes the information needed to create the build_config for +other targets that depend on that one. + +Android build scripts should not refer to the build_config directly, and the +build specification should instead pass information in using the special +file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing +of values in a json dict in a file and looks like this: + --python-arg=@FileArg(build_config_path:javac:classpath) + +Note: If paths to input files are passed in this way, it is important that: + 1. inputs/deps of the action ensure that the files are available the first + time the action runs. + 2. Either (a) or (b) + a. inputs/deps ensure that the action runs whenever one of the files changes + b. the files are added to the action's depfile + +NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR. + +This is a technical note describing the format of .build_config files. +Please keep it updated when changing this script. For extraction and +visualization instructions, see build/android/docs/build_config.md + +------------- BEGIN_MARKDOWN --------------------------------------------------- +The .build_config file format +=== + +# Introduction + +This document tries to explain the format of `.build_config` generated during +the Android build of Chromium. For a higher-level explanation of these files, +please read +[build/android/docs/build_config.md](build/android/docs/build_config.md). + +# The `deps_info` top-level dictionary: + +All `.build_config` files have a required `'deps_info'` key, whose value is a +dictionary describing the target and its dependencies. The latter has the +following required keys: + +## Required keys in `deps_info`: + +* `deps_info['type']`: The target type as a string. + + The following types are known by the internal GN build rules and the + build scripts altogether: + + * [java_binary](#target_java_binary) + * [java_annotation_processor](#target_java_annotation_processor) + * [robolectric_binary](#target_robolectric_binary) + * [java_library](#target_java_library) + * [android_assets](#target_android_assets) + * [android_resources](#target_android_resources) + * [android_apk](#target_android_apk) + * [android_app_bundle_module](#target_android_app_bundle_module) + * [android_app_bundle](#target_android_app_bundle) + * [dist_jar](#target_dist_jar) + * [dist_aar](#target_dist_aar) + * [group](#target_group) + + See later sections for more details of some of these. + +* `deps_info['path']`: Path to the target's `.build_config` file. + +* `deps_info['name']`: Nothing more than the basename of `deps_info['path']` +at the moment. + +* `deps_info['deps_configs']`: List of paths to the `.build_config` files of +all *direct* dependencies of the current target. + + NOTE: Because the `.build_config` of a given target is always generated + after the `.build_config` of its dependencies, the `write_build_config.py` + script can use chains of `deps_configs` to compute transitive dependencies + for each target when needed. + +## Optional keys in `deps_info`: + +The following keys will only appear in the `.build_config` files of certain +target types: + +* `deps_info['requires_android']`: True to indicate that the corresponding +code uses Android-specific APIs, and thus cannot run on the host within a +regular JVM. May only appear in Java-related targets. + +* `deps_info['supports_android']`: +May appear in Java-related targets, and indicates that +the corresponding code doesn't use Java APIs that are not available on +Android. As such it may run either on the host or on an Android device. + +* `deps_info['assets']`: +Only seen for the [`android_assets`](#target_android_assets) type. See below. + +* `deps_info['package_name']`: Java package name associated with this target. + + NOTE: For `android_resources` targets, + this is the package name for the corresponding R class. For `android_apk` + targets, this is the corresponding package name. This does *not* appear for + other target types. + +* `deps_info['android_manifest']`: +Path to an AndroidManifest.xml file related to the current target. + +* `deps_info['base_module_config']`: +Only seen for the [`android_app_bundle`](#target_android_app_bundle) type. +Path to the base module for the bundle. + +* `deps_info['module_name']`: +Only seen for the +[`android_app_bundle_module`](#target_android_app_bundle_module) +type. The name of the feature module. + +* `deps_info['dependency_zips']`: +List of `deps_info['resources_zip']` entries for all `android_resources` +dependencies for the current target. + +* `deps_info['extra_package_names']`: +Always empty for `android_resources` types. Otherwise, +the list of `deps_info['package_name']` entries for all `android_resources` +dependencies for the current target. Computed automatically by +`write_build_config.py`. + +* `deps_info['dependency_r_txt_files']`: +Exists only on dist_aar. It is the list of deps_info['r_text_path'] from +transitive dependencies. Computed automatically. + + +# `.build_config` target types description: + +## Target type `group`: + +This type corresponds to a simple target that is only used to group +dependencies. It matches the `java_group()` GN template. Its only top-level +`deps_info` keys are `supports_android` (always True), and `deps_configs`. + + +## Target type `android_resources`: + +This type corresponds to targets that are used to group Android resource files. +For example, all `android_resources` dependencies of an `android_apk` will +end up packaged into the final APK by the build system. + +It uses the following keys: + + +* `deps_info['res_sources_path']`: +Path to file containing a list of resource source files used by the +android_resources target. + +* `deps_info['resources_zip']`: +*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled +resource files for this target (and also no `R.txt`, `R.java` or `R.class`). + + If `deps_info['res_sources_path']` is missing, this must point to a prebuilt + `.aar` archive containing resources. Otherwise, this will point to a zip + archive generated at build time, wrapping the sources listed in + `deps_info['res_sources_path']` into a single zip file. + +* `deps_info['package_name']`: +Java package name that the R class for this target belongs to. + +* `deps_info['android_manifest']`: +Optional. Path to the top-level Android manifest file associated with these +resources (if not provided, an empty manifest will be used to generate R.txt). + +* `deps_info['resource_overlay']`: +Optional. Whether the resources in resources_zip should override resources with +the same name. Does not affect the behaviour of any android_resources() +dependencies of this target. If a target with resource_overlay=true depends +on another target with resource_overlay=true the target with the dependency +overrides the other. + +* `deps_info['r_text_path']`: +Provide the path to the `R.txt` file that describes the resources wrapped by +this target. Normally this file is generated from the content of the resource +directories or zip file, but some targets can provide their own `R.txt` file +if they want. + +* `deps_info['srcjar_path']`: +Path to the `.srcjar` file that contains the auto-generated `R.java` source +file corresponding to the content of `deps_info['r_text_path']`. This is +*always* generated from the content of `deps_info['r_text_path']` by the +`build/android/gyp/process_resources.py` script. + +## Target type `android_assets`: + +This type corresponds to targets used to group Android assets, i.e. liberal +files that will be placed under `//assets/` within the final APK. + +These use an `deps_info['assets']` key to hold a dictionary of values related +to assets covered by this target. + +* `assets['sources']`: +The list of all asset source paths for this target. Each source path can +use an optional `:` suffix, where `` is the final location +of the assets (relative to `//assets/`) within the APK. + +* `assets['outputs']`: +Optional. Some of the sources might be renamed before being stored in the +final //assets/ sub-directory. When this happens, this contains a list of +all renamed output file paths + + NOTE: When not empty, the first items of `assets['sources']` must match + every item in this list. Extra sources correspond to non-renamed sources. + + NOTE: This comes from the `asset_renaming_destinations` parameter for the + `android_assets()` GN template. + +* `assets['disable_compression']`: +Optional. Will be True to indicate that these assets should be stored +uncompressed in the final APK. For example, this is necessary for locale +.pak files used by the System WebView feature. + +* `assets['treat_as_locale_paks']`: +Optional. Will be True to indicate that these assets are locale `.pak` files +(containing localized strings for C++). These are later processed to generate +a special ``.build_config`.java` source file, listing all supported Locales in +the current build. + + +## Target type `java_library`: + +This type is used to describe target that wrap Java bytecode, either created +by compiling sources, or providing them with a prebuilt jar. + +* `deps_info['public_deps_configs']`: List of paths to the `.build_config` files +of *direct* dependencies of the current target which are exposed as part of the +current target's public API. + +* `deps_info['unprocessed_jar_path']`: +Path to the original .jar file for this target, before any kind of processing +through Proguard or other tools. For most targets this is generated +from sources, with a name like `$target_name.javac.jar`. However, when using +a prebuilt jar, this will point to the source archive directly. + +* `deps_info['device_jar_path']`: +Path to a file that is the result of processing +`deps_info['unprocessed_jar_path']` with various tools (ready to be dexed). + +* `deps_info['host_jar_path']`: +Path to a file that is the result of processing +`deps_info['unprocessed_jar_path']` with various tools (use by java_binary). + +* `deps_info['interface_jar_path']: +Path to the interface jar generated for this library. This corresponds to +a jar file that only contains declarations. Generated by running the `ijar` on +`deps_info['unprocessed_jar_path']` or the `turbine` tool on source files. + +* `deps_info['dex_path']`: +Path to the `.dex` file generated for this target, from +`deps_info['device_jar_path']` unless this comes from a prebuilt `.aar` archive. + +* `deps_info['is_prebuilt']`: +True to indicate that this target corresponds to a prebuilt `.jar` file. +In this case, `deps_info['unprocessed_jar_path']` will point to the source +`.jar` file. Otherwise, it will be point to a build-generated file. + +* `deps_info['target_sources_file']`: +Path to a single `.sources` file listing all the Java and Kotlin sources that +were used to generate the library (simple text format, one `.jar` path per +line). + +* `deps_info['lint_android_manifest']`: +Path to an AndroidManifest.xml file to use for this lint target. + +* `deps_info['lint_sources']`: +The list of all `deps_info['target_sources_file']` entries for all library +dependencies that are chromium code. Note: this is a list of files, where each +file contains a list of Java and Kotlin source files. This is used for lint. + +* `deps_info['lint_aars']`: +List of all aars from transitive java dependencies. This allows lint to collect +their custom annotations.zip and run checks like @IntDef on their annotations. + +* `deps_info['lint_srcjars']`: +List of all bundled srcjars of all transitive java library targets. Excludes +non-chromium java libraries. + +* `deps_info['lint_resource_sources']`: +List of all resource sources files belonging to all transitive resource +dependencies of this target. Excludes resources owned by non-chromium code. + +* `deps_info['lint_resource_zips']`: +List of all resource zip files belonging to all transitive resource dependencies +of this target. Excludes resources owned by non-chromium code. + +* `deps_info['javac']`: +A dictionary containing information about the way the sources in this library +are compiled. Appears also on other Java-related targets. See the [dedicated +section about this](#dict_javac) below for details. + +* `deps_info['javac_full_classpath']`: +The classpath used when performing bytecode processing. Essentially the +collection of all `deps_info['unprocessed_jar_path']` entries for the target +and all its dependencies. + +* `deps_info['javac_full_interface_classpath']`: +The classpath used when using the errorprone compiler. + +* `deps_info['proguard_enabled"]`: +True to indicate that ProGuard processing is enabled for this target. + +* `deps_info['proguard_configs"]`: +A list of paths to ProGuard configuration files related to this library. + +* `deps_info['extra_classpath_jars']: +For some Java related types, a list of extra `.jar` files to use at build time +but not at runtime. + +## Target type `java_binary`: + +This type corresponds to a Java binary, which is nothing more than a +`java_library` target that also provides a main class name. It thus inherits +all entries from the `java_library` type, and adds: + +* `deps_info['main_class']`: +Name of the main Java class that serves as an entry point for the binary. + +* `deps_info['device_classpath']`: +The classpath used when running a Java or Android binary. Essentially the +collection of all `deps_info['device_jar_path']` entries for the target and all +its dependencies. + +* `deps_info['all_dex_files']`: +The list of paths to all `deps_info['dex_path']` entries for all libraries +that comprise this APK. Valid only for debug builds. + +* `deps_info['preferred_dep']`: +Whether the target should be the preferred dep. This is usually the case when we +have a java_group that depends on either the public or internal dep accordingly, +and it is better to depend on the group rather than the underlying dep. Another +case is for android_library_factory targets, the factory target should be +preferred instead of the actual implementation. + +## Target type `robolectric_binary`: + +A target type for JUnit-specific binaries. Identical to +[`java_binary`](#target_java_binary) in the context of `.build_config` files, +except the name. + + +## Target type \ +`java_annotation_processor`: + +A target type for Java annotation processors. Identical to +[`java_binary`](#target_java_binary) in the context of `.build_config` files, +except the name, except that it requires a `deps_info['main_class']` entry. + + +## Target type `android_apk`: + +Corresponds to an Android APK. Inherits from the +[`java_binary`](#target_java_binary) type and adds: + +* `deps_info['apk_path']`: +Path to the raw, unsigned, APK generated by this target. + +* `deps_info['incremental_apk_path']`: +Path to the raw, unsigned, incremental APK generated by this target. + +* `deps_info['incremental_install_json_path']`: +Path to the JSON file with per-apk details for incremental install. +See `build/android/gyp/incremental/write_installer_json.py` for more +details about its content. + +* `deps_info['dist_jar']['all_interface_jars']`: +For `android_apk` and `dist_jar` targets, a list of all interface jar files +that will be merged into the final `.jar` file for distribution. + +* `deps_info['final_dex']['path']`: +Path to the final classes.dex file (or classes.zip in case of multi-dex) +for this APK - only used for proguarded builds. + +* `native['libraries']` +List of native libraries for the primary ABI to be embedded in this APK. +E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory +prefix). + +* `native['java_libraries_list']` +The same list as `native['libraries']` as a string holding a Java source +fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so` +suffix (as expected by `System.loadLibrary()`). + +* `native['second_abi_libraries']` +List of native libraries for the secondary ABI to be embedded in this APK. +Empty if only a single ABI is supported. + +* `native['loadable_modules']` +A list of native libraries to store within the APK, in addition to those from +`native['libraries']`. These correspond to things like the Chromium linker +or instrumentation libraries. + +* `native['secondary_abi_loadable_modules']` +Secondary ABI version of loadable_modules + +* `native['library_always_compress']` +A list of library files that we always compress. + +* `assets` +A list of assets stored compressed in the APK. Each entry has the format +`:`, where `` is relative to +`$CHROMIUM_OUTPUT_DIR`, and `` is relative to `//assets/` +within the APK. + +NOTE: Not to be confused with the `deps_info['assets']` dictionary that +belongs to `android_assets` targets only. + +* `uncompressed_assets` +A list of uncompressed assets stored in the APK. Each entry has the format +`:` too. + +* `locales_java_list` +A string holding a Java source fragment that gives the list of locales stored +uncompressed as android assets. + +* `extra_android_manifests` +A list of `deps_configs['android_manifest]` entries, for all resource +dependencies for this target. I.e. a list of paths to manifest files for +all the resources in this APK. These will be merged with the root manifest +file to generate the final one used to build the APK. + +* `java_resources_jars` +This is a list of `.jar` files whose *Java* resources should be included in +the final APK. For example, this is used to copy the `.res` files from the +EMMA Coverage tool. The copy will omit any `.class` file and the top-level +`//meta-inf/` directory from the input jars. Everything else will be copied +into the final APK as-is. + +NOTE: This has nothing to do with *Android* resources. + +* `deps_info['jni_all_source']` +The list of all `deps_info['target_sources_file']` entries for all library +dependencies for this APK. Note: this is a list of files, where each file +contains a list of Java and Kotlin source files. This is used for JNI +registration. + +* `deps_info['proguard_all_configs']`: +The collection of all 'deps_info['proguard_configs']` values from this target +and all its dependencies. + +* `deps_info['proguard_classpath_jars']`: +The collection of all 'deps_info['extra_classpath_jars']` values from all +dependencies. + +* `deps_info['proguard_under_test_mapping']`: +Applicable to apks with proguard enabled that have an apk_under_test. This is +the path to the apk_under_test's output proguard .mapping file. + +## Target type \ +`android_app_bundle_module`: + +Corresponds to an Android app bundle module. Very similar to an APK and +inherits the same fields, except that this does not generate an installable +file (see `android_app_bundle`), and for the following omitted fields: + +* `deps_info['apk_path']`, `deps_info['incremental_apk_path']` and + `deps_info['incremental_install_json_path']` are omitted. + +* top-level `dist_jar` is omitted as well. + +In addition to `android_apk` targets though come these new fields: + +* `deps_info['proto_resources_path']`: +The path of an zip archive containing the APK's resources compiled to the +protocol buffer format (instead of regular binary xml + resources.arsc). + +* `deps_info['module_rtxt_path']`: +The path of the R.txt file generated when compiling the resources for the bundle +module. + +* `deps_info['module_pathmap_path']`: +The path of the pathmap file generated when compiling the resources for the +bundle module, if resource path shortening is enabled. + +* `deps_info['base_allowlist_rtxt_path']`: +Optional path to an R.txt file used as a allowlist for base string resources. +This means that any string resource listed in this file *and* in +`deps_info['module_rtxt_path']` will end up in the base split APK of any +`android_app_bundle` target that uses this target as its base module. + +This ensures that such localized strings are available to all bundle installs, +even when language based splits are enabled (e.g. required for WebView strings +inside the Monochrome bundle). + + +## Target type `android_app_bundle` + +This target type corresponds to an Android app bundle, and is built from one +or more `android_app_bundle_module` targets listed as dependencies. + + +## Target type `dist_aar`: + +This type corresponds to a target used to generate an `.aar` archive for +distribution. The archive's content is determined by the target's dependencies. + +This always has the following entries: + + * `deps_info['supports_android']` (always True). + * `deps_info['requires_android']` (always True). + * `deps_info['proguard_configs']` (optional). + + +## Target type `dist_jar`: + +This type is similar to [`dist_aar`](#target_dist_aar) but is not +Android-specific, and used to create a `.jar` file that can be later +redistributed. + +This always has the following entries: + + * `deps_info['proguard_enabled']` (False by default). + * `deps_info['proguard_configs']` (optional). + * `deps_info['supports_android']` (True by default). + * `deps_info['requires_android']` (False by default). + + + +## The `deps_info['javac']` dictionary: + +This dictionary appears in Java-related targets (e.g. `java_library`, +`android_apk` and others), and contains information related to the compilation +of Java sources, class files, and jars. + +* `javac['classpath']` +The classpath used to compile this target when annotation processors are +present. + +* `javac['interface_classpath']` +The classpath used to compile this target when annotation processors are +not present. These are also always used to known when a target needs to be +rebuilt. + +* `javac['processor_classpath']` +The classpath listing the jars used for annotation processors. I.e. sent as +`-processorpath` when invoking `javac`. + +* `javac['processor_classes']` +The list of annotation processor main classes. I.e. sent as `-processor' when +invoking `javac`. + +## Target type `android_app_bundle`: + +This type corresponds to an Android app bundle (`.aab` file). + +--------------- END_MARKDOWN --------------------------------------------------- +""" + +import collections +import itertools +import json +import optparse +import os +import shutil +import sys +import xml.dom.minidom + +from util import build_utils +from util import resource_utils +import action_helpers # build_utils adds //build to sys.path. + + +# Types that should never be used as a dependency of another build config. +_ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor', + 'robolectric_binary', 'android_app_bundle') +# Types that should not allow code deps to pass through. +_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library') + +# Cache of path -> JSON dict. +_dep_config_cache = {} + + +class OrderedSet(collections.OrderedDict): + @staticmethod + def fromkeys(iterable): + out = OrderedSet() + out.update(iterable) + return out + + def add(self, key): + self[key] = True + + def update(self, iterable): + for v in iterable: + self.add(v) + + +def _ExtractMarkdownDocumentation(input_text): + """Extract Markdown documentation from a list of input strings lines. + + This generates a list of strings extracted from |input_text|, by looking + for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers.""" + in_markdown = False + result = [] + for line in input_text.splitlines(): + if in_markdown: + if '-- END_MARKDOWN --' in line: + in_markdown = False + else: + result.append(line) + else: + if '-- BEGIN_MARKDOWN --' in line: + in_markdown = True + + return result + + +class AndroidManifest: + def __init__(self, path): + self.path = path + dom = xml.dom.minidom.parse(path) + manifests = dom.getElementsByTagName('manifest') + assert len(manifests) == 1 + self.manifest = manifests[0] + + def GetInstrumentationElements(self): + instrumentation_els = self.manifest.getElementsByTagName('instrumentation') + if len(instrumentation_els) == 0: + return None + return instrumentation_els + + def CheckInstrumentationElements(self, expected_package): + instrs = self.GetInstrumentationElements() + if not instrs: + raise Exception('No elements found in %s' % self.path) + for instr in instrs: + instrumented_package = instr.getAttributeNS( + 'http://schemas.android.com/apk/res/android', 'targetPackage') + if instrumented_package != expected_package: + raise Exception( + 'Wrong instrumented package. Expected %s, got %s' + % (expected_package, instrumented_package)) + + def GetPackageName(self): + return self.manifest.getAttribute('package') + + +def GetDepConfigRoot(path): + if not path in _dep_config_cache: + with open(path) as jsonfile: + _dep_config_cache[path] = json.load(jsonfile) + return _dep_config_cache[path] + + +def GetDepConfig(path): + return GetDepConfigRoot(path)['deps_info'] + + +def DepsOfType(wanted_type, configs): + return [c for c in configs if c['type'] == wanted_type] + + +def DepPathsOfType(wanted_type, config_paths): + return [p for p in config_paths if GetDepConfig(p)['type'] == wanted_type] + + +def GetAllDepsConfigsInOrder(deps_config_paths, filter_func=None): + def apply_filter(paths): + if filter_func: + return [p for p in paths if filter_func(GetDepConfig(p))] + return paths + + def discover(path): + config = GetDepConfig(path) + all_deps = config['deps_configs'] + config.get('public_deps_configs', []) + return apply_filter(all_deps) + + deps_config_paths = apply_filter(deps_config_paths) + deps_config_paths = build_utils.GetSortedTransitiveDependencies( + deps_config_paths, discover) + return deps_config_paths + + +def GetObjectByPath(obj, key_path): + """Given an object, return its nth child based on a key path. + """ + return GetObjectByPath(obj[key_path[0]], key_path[1:]) if key_path else obj + + +def RemoveObjDups(obj, base, *key_path): + """Remove array items from an object[*kep_path] that are also + contained in the base[*kep_path] (duplicates). + """ + base_target = set(GetObjectByPath(base, key_path)) + target = GetObjectByPath(obj, key_path) + target[:] = [x for x in target if x not in base_target] + + +class Deps: + def __init__(self, direct_deps_config_paths): + self._all_deps_config_paths = GetAllDepsConfigsInOrder( + direct_deps_config_paths) + self._direct_deps_configs = [ + GetDepConfig(p) for p in direct_deps_config_paths + ] + self._all_deps_configs = [ + GetDepConfig(p) for p in self._all_deps_config_paths + ] + self._direct_deps_config_paths = direct_deps_config_paths + + def All(self, wanted_type=None): + if wanted_type is None: + return self._all_deps_configs + return DepsOfType(wanted_type, self._all_deps_configs) + + def Direct(self, wanted_type=None): + if wanted_type is None: + return self._direct_deps_configs + return DepsOfType(wanted_type, self._direct_deps_configs) + + def AllConfigPaths(self): + return self._all_deps_config_paths + + def GradlePrebuiltJarPaths(self): + ret = [] + + def helper(cur): + for config in cur.Direct('java_library'): + if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']: + if config['unprocessed_jar_path'] not in ret: + ret.append(config['unprocessed_jar_path']) + + helper(self) + return ret + + def GradleLibraryProjectDeps(self): + ret = [] + + def helper(cur): + for config in cur.Direct('java_library'): + if config['is_prebuilt']: + pass + elif config['gradle_treat_as_prebuilt']: + all_deps = config['deps_configs'] + config.get( + 'public_deps_configs', []) + helper(Deps(all_deps)) + elif config not in ret: + ret.append(config) + + helper(self) + return ret + + +def _MergeAssets(all_assets): + """Merges all assets from the given deps. + + Returns: + A tuple of: (compressed, uncompressed, locale_paks) + |compressed| and |uncompressed| are lists of "srcPath:zipPath". srcPath is + the path of the asset to add, and zipPath is the location within the zip + (excluding assets/ prefix). + |locale_paks| is a set of all zipPaths that have been marked as + treat_as_locale_paks=true. + """ + compressed = {} + uncompressed = {} + locale_paks = set() + for asset_dep in all_assets: + entry = asset_dep['assets'] + disable_compression = entry.get('disable_compression') + treat_as_locale_paks = entry.get('treat_as_locale_paks') + dest_map = uncompressed if disable_compression else compressed + other_map = compressed if disable_compression else uncompressed + outputs = entry.get('outputs', []) + for src, dest in itertools.zip_longest(entry['sources'], outputs): + if not dest: + dest = os.path.basename(src) + # Merge so that each path shows up in only one of the lists, and that + # deps of the same target override previous ones. + other_map.pop(dest, 0) + dest_map[dest] = src + if treat_as_locale_paks: + locale_paks.add(dest) + + def create_list(asset_map): + # Sort to ensure deterministic ordering. + items = sorted(asset_map.items()) + return [f'{src}:{dest}' for dest, src in items] + + return create_list(compressed), create_list(uncompressed), locale_paks + + +def _ResolveGroupsAndPublicDeps(config_paths): + """Returns a list of configs with all groups inlined.""" + + def helper(config_path): + config = GetDepConfig(config_path) + if config['type'] == 'group': + # Groups combine public_deps with deps_configs, so no need to check + # public_config_paths separately. + return config['deps_configs'] + if config['type'] == 'android_resources': + # android_resources targets do not support public_deps, but instead treat + # all resource deps as public deps. + return DepPathsOfType('android_resources', config['deps_configs']) + + return config.get('public_deps_configs', []) + + return build_utils.GetSortedTransitiveDependencies(config_paths, helper) + + +def _DepsFromPaths(dep_paths, + target_type, + filter_root_targets=True, + recursive_resource_deps=False): + """Resolves all groups and trims dependency branches that we never want. + + E.g. When a resource or asset depends on an apk target, the intent is to + include the .apk as a resource/asset, not to have the apk's classpath added. + + This method is meant to be called to get the top nodes (i.e. closest to + current target) that we could then use to get a full transitive dependants + list (eg using Deps#all). So filtering single elements out of this list, + filters whole branches of dependencies. By resolving groups (i.e. expanding + them to their constituents), depending on a group is equivalent to directly + depending on each element of that group. + """ + blocklist = [] + allowlist = [] + + # Don't allow root targets to be considered as a dep. + if filter_root_targets: + blocklist.extend(_ROOT_TYPES) + + # Don't allow java libraries to cross through assets/resources. + if target_type in _RESOURCE_TYPES: + allowlist.extend(_RESOURCE_TYPES) + # Pretend that this target directly depends on all of its transitive + # dependencies. + if recursive_resource_deps: + dep_paths = GetAllDepsConfigsInOrder(dep_paths) + # Exclude assets if recursive_resource_deps is set. The + # recursive_resource_deps arg is used to pull resources into the base + # module to workaround bugs accessing resources in isolated DFMs, but + # assets should be kept in the DFMs. + blocklist.append('android_assets') + + return _DepsFromPathsWithFilters(dep_paths, blocklist, allowlist) + + +def _FilterConfigPaths(dep_paths, blocklist=None, allowlist=None): + if not blocklist and not allowlist: + return dep_paths + configs = [GetDepConfig(p) for p in dep_paths] + if blocklist: + configs = [c for c in configs if c['type'] not in blocklist] + if allowlist: + configs = [c for c in configs if c['type'] in allowlist] + + return [c['path'] for c in configs] + + +def _DepsFromPathsWithFilters(dep_paths, blocklist=None, allowlist=None): + """Resolves all groups and trims dependency branches that we never want. + + See _DepsFromPaths. + + |blocklist| if passed, are the types of direct dependencies we do not care + about (i.e. tips of branches that we wish to prune). + + |allowlist| if passed, are the only types of direct dependencies we care + about (i.e. we wish to prune all other branches that do not start from one of + these). + """ + # Filter both before and after so that public_deps of blocked targets are not + # added. + allowlist_with_groups = None + if allowlist: + allowlist_with_groups = set(allowlist) + allowlist_with_groups.add('group') + dep_paths = _FilterConfigPaths(dep_paths, blocklist, allowlist_with_groups) + dep_paths = _ResolveGroupsAndPublicDeps(dep_paths) + dep_paths = _FilterConfigPaths(dep_paths, blocklist, allowlist) + + return Deps(dep_paths) + + +def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file): + ret = [] + with open(runtime_deps_file) as f: + for line in f: + line = line.rstrip() + if not line.endswith('.so'): + continue + # Only unstripped .so files are listed in runtime deps. + # Convert to the stripped .so by going up one directory. + ret.append(os.path.normpath(line.replace('lib.unstripped/', ''))) + ret.reverse() + return ret + + +def _CreateJavaLibrariesList(library_paths): + """Returns a java literal array with the "base" library names: + e.g. libfoo.so -> foo + """ + names = ['"%s"' % os.path.basename(s)[3:-3] for s in library_paths] + return ('{%s}' % ','.join(sorted(set(names)))) + + +def _CreateJavaLocaleListFromAssets(assets, locale_paks): + """Returns a java literal array from a list of locale assets. + + Args: + assets: A list of all APK asset paths in the form 'src:dst' + locale_paks: A list of asset paths that correponds to the locale pak + files of interest. Each |assets| entry will have its 'dst' part matched + against it to determine if they are part of the result. + Returns: + A string that is a Java source literal array listing the locale names + of the corresponding asset files, without directory or .pak suffix. + E.g. '{"en-GB", "en-US", "es-ES", "fr", ... }' + """ + assets_paths = [a.split(':')[1] for a in assets] + locales = [os.path.basename(a)[:-4] for a in assets_paths if a in locale_paks] + return '{%s}' % ','.join('"%s"' % l for l in sorted(locales)) + + +def _AddJarMapping(jar_to_target, configs): + for config in configs: + jar = config.get('unprocessed_jar_path') + if jar: + jar_to_target[jar] = config['gn_target'] + for jar in config.get('extra_classpath_jars', []): + jar_to_target[jar] = config['gn_target'] + + +def _CompareClasspathPriority(dep): + return 1 if dep.get('low_classpath_priority') else 0 + + +def _DedupFeatureModuleSharedCode(uses_split_arg, modules, + field_names_to_dedup): + child_to_ancestors = collections.defaultdict(list) + if uses_split_arg: + for split_pair in uses_split_arg: + child, parent = split_pair.split(':') + assert child in modules + assert parent in modules + child_to_ancestors[child] = [parent] + + # Create a full list of ancestors for each module. + for name in modules: + if name == 'base': + continue + curr_name = name + while curr_name in child_to_ancestors: + parent = child_to_ancestors[curr_name][0] + if parent not in child_to_ancestors[name]: + child_to_ancestors[name].append(parent) + curr_name = parent + + if curr_name != 'base': + child_to_ancestors[name].append('base') + + # Strip out duplicates from ancestors. + for name, module in modules.items(): + if name == 'base': + continue + # Make sure we get all ancestors, not just direct parent. + for ancestor in child_to_ancestors[name]: + for f in field_names_to_dedup: + if f in module: + RemoveObjDups(module, modules[ancestor], f) + + # Strip out duplicates from siblings/cousins. + for f in field_names_to_dedup: + _PromoteToCommonAncestor(modules, child_to_ancestors, f) + + +def _PromoteToCommonAncestor(modules, child_to_ancestors, field_name): + module_to_fields_set = {} + for module_name, module in modules.items(): + if field_name in module: + module_to_fields_set[module_name] = set(module[field_name]) + + seen = set() + dupes = set() + for fields in module_to_fields_set.values(): + new_dupes = seen & fields + if new_dupes: + dupes |= new_dupes + seen |= fields + + for d in dupes: + owning_modules = [] + for module_name, fields in module_to_fields_set.items(): + if d in fields: + owning_modules.append(module_name) + assert len(owning_modules) >= 2 + # Rely on the fact that ancestors are inserted from closest to + # farthest, where "base" should always be the last element. + # Arbitrarily using the first owning module - any would work. + for ancestor in child_to_ancestors[owning_modules[0]]: + ancestor_is_shared_with_all = True + for o in owning_modules[1:]: + if ancestor not in child_to_ancestors[o]: + ancestor_is_shared_with_all = False + break + if ancestor_is_shared_with_all: + common_ancestor = ancestor + break + for o in owning_modules: + module_to_fields_set[o].remove(d) + module_to_fields_set[common_ancestor].add(d) + + for module_name, module in modules.items(): + if field_name in module: + module[field_name] = sorted(list(module_to_fields_set[module_name])) + + +def _CopyBuildConfigsForDebugging(debug_dir): + shutil.rmtree(debug_dir, ignore_errors=True) + os.makedirs(debug_dir) + for src_path in _dep_config_cache: + dst_path = os.path.join(debug_dir, src_path) + assert dst_path.startswith(debug_dir), dst_path + os.makedirs(os.path.dirname(dst_path), exist_ok=True) + shutil.copy(src_path, dst_path) + print(f'Copied {len(_dep_config_cache)} .build_config.json into {debug_dir}') + + +def main(argv): + parser = optparse.OptionParser() + action_helpers.add_depfile_arg(parser) + parser.add_option('--build-config', help='Path to build_config output.') + parser.add_option('--store-deps-for-debugging-to', + help='Path to copy all transitive build config files to.') + parser.add_option( + '--type', + help='Type of this target (e.g. android_library).') + parser.add_option('--gn-target', help='GN label for this target') + parser.add_option( + '--deps-configs', + help='GN-list of dependent build_config files.') + parser.add_option( + '--annotation-processor-configs', + help='GN-list of build_config files for annotation processors.') + + # android_resources options + parser.add_option('--srcjar', help='Path to target\'s resources srcjar.') + parser.add_option('--resources-zip', help='Path to target\'s resources zip.') + parser.add_option('--package-name', + help='Java package name for these resources.') + parser.add_option('--android-manifest', + help='Path to the root android manifest.') + parser.add_option('--merged-android-manifest', + help='Path to the merged android manifest.') + parser.add_option('--resource-dirs', action='append', default=[], + help='GYP-list of resource dirs') + parser.add_option( + '--res-sources-path', + help='Path to file containing a list of paths to resources.') + parser.add_option( + '--resource-overlay', + action='store_true', + help='Whether resources passed in via --resources-zip should override ' + 'resources with the same name') + parser.add_option( + '--recursive-resource-deps', + action='store_true', + help='Whether deps should be walked recursively to find resource deps.') + + # android_assets options + parser.add_option('--asset-sources', help='List of asset sources.') + parser.add_option('--asset-renaming-sources', + help='List of asset sources with custom destinations.') + parser.add_option('--asset-renaming-destinations', + help='List of asset custom destinations.') + parser.add_option('--disable-asset-compression', action='store_true', + help='Whether to disable asset compression.') + parser.add_option('--treat-as-locale-paks', action='store_true', + help='Consider the assets as locale paks in BuildConfig.java') + + # java library and group options + parser.add_option('--preferred-dep', + action='store_true', + help='Whether the target should be preferred as a dep.') + + # java library options + parser.add_option('--public-deps-configs', + help='GN list of config files of deps which are exposed as ' + 'part of the target\'s public API.') + parser.add_option('--aar-path', help='Path to containing .aar file.') + parser.add_option('--device-jar-path', help='Path to .jar for dexing.') + parser.add_option('--host-jar-path', help='Path to .jar for java_binary.') + parser.add_option('--unprocessed-jar-path', + help='Path to the .jar to use for javac classpath purposes.') + parser.add_option( + '--interface-jar-path', + help='Path to the interface .jar to use for javac classpath purposes.') + parser.add_option('--is-prebuilt', action='store_true', + help='Whether the jar was compiled or pre-compiled.') + parser.add_option('--target-sources-file', help='Path to .sources file') + parser.add_option('--bundled-srcjars', + help='GYP-list of .srcjars that have been included in this java_library.') + parser.add_option('--supports-android', action='store_true', + help='Whether this library supports running on the Android platform.') + parser.add_option('--requires-android', action='store_true', + help='Whether this library requires running on the Android platform.') + parser.add_option('--bypass-platform-checks', action='store_true', + help='Bypass checks for support/require Android platform.') + parser.add_option('--extra-classpath-jars', + help='GYP-list of .jar files to include on the classpath when compiling, ' + 'but not to include in the final binary.') + parser.add_option( + '--low-classpath-priority', + action='store_true', + help='Indicates that the library should be placed at the end of the ' + 'classpath.') + parser.add_option( + '--mergeable-android-manifests', + help='GN-list of AndroidManifest.xml to include in manifest merging.') + parser.add_option('--gradle-treat-as-prebuilt', action='store_true', + help='Whether this library should be treated as a prebuilt library by ' + 'generate_gradle.py.') + parser.add_option('--main-class', + help='Main class for java_binary or java_annotation_processor targets.') + parser.add_option('--java-resources-jar-path', + help='Path to JAR that contains java resources. Everything ' + 'from this JAR except meta-inf/ content and .class files ' + 'will be added to the final APK.') + parser.add_option( + '--non-chromium-code', + action='store_true', + help='True if a java library is not chromium code, used for lint.') + + # robolectric_library options + parser.add_option('--is-robolectric', + action='store_true', + help='Whether this is a host side android test library.') + + # android library options + parser.add_option('--dex-path', help='Path to target\'s dex output.') + + # native library options + parser.add_option('--shared-libraries-runtime-deps', + help='Path to file containing runtime deps for shared ' + 'libraries.') + parser.add_option( + '--loadable-modules', + action='append', + help='GN-list of native libraries for primary ' + 'android-abi. Can be specified multiple times.', + default=[]) + parser.add_option('--secondary-abi-shared-libraries-runtime-deps', + help='Path to file containing runtime deps for secondary ' + 'abi shared libraries.') + parser.add_option( + '--secondary-abi-loadable-modules', + action='append', + help='GN-list of native libraries for secondary ' + 'android-abi. Can be specified multiple times.', + default=[]) + parser.add_option( + '--native-lib-placeholders', + action='append', + help='GN-list of native library placeholders to add.', + default=[]) + parser.add_option( + '--secondary-native-lib-placeholders', + action='append', + help='GN-list of native library placeholders to add ' + 'for the secondary android-abi.', + default=[]) + parser.add_option('--uncompress-shared-libraries', default=False, + action='store_true', + help='Whether to store native libraries uncompressed') + parser.add_option( + '--library-always-compress', + help='The list of library files that we always compress.') + + # apk options + parser.add_option('--apk-path', help='Path to the target\'s apk output.') + parser.add_option('--incremental-apk-path', + help="Path to the target's incremental apk output.") + parser.add_option('--incremental-install-json-path', + help="Path to the target's generated incremental install " + "json.") + parser.add_option( + '--tested-apk-config', + help='Path to the build config of the tested apk (for an instrumentation ' + 'test apk).') + parser.add_option( + '--proguard-enabled', + action='store_true', + help='Whether proguard is enabled for this apk or bundle module.') + parser.add_option( + '--proguard-configs', + help='GN-list of proguard flag files to use in final apk.') + parser.add_option( + '--proguard-mapping-path', help='Path to jar created by ProGuard step') + + # apk options that are static library specific + parser.add_option( + '--static-library-dependent-configs', + help='GN list of .build_configs of targets that use this target as a ' + 'static library.') + + # options shared between android_resources and apk targets + parser.add_option('--r-text-path', help='Path to target\'s R.txt file.') + + parser.add_option('--fail', + help='GN-list of error message lines to fail with.') + + parser.add_option('--final-dex-path', + help='Path to final input classes.dex (or classes.zip) to ' + 'use in final apk.') + parser.add_option('--res-size-info', help='Path to .ap_.info') + parser.add_option('--apk-proto-resources', + help='Path to resources compiled in protocol buffer format ' + ' for this apk.') + parser.add_option( + '--module-pathmap-path', + help='Path to pathmap file for resource paths in a bundle module.') + parser.add_option( + '--base-allowlist-rtxt-path', + help='Path to R.txt file for the base resources allowlist.') + + parser.add_option('--generate-markdown-format-doc', action='store_true', + help='Dump the Markdown .build_config format documentation ' + 'then exit immediately.') + + parser.add_option('--module-name', help='The name of this feature module.') + parser.add_option( + '--base-module-build-config', + help='Path to the base module\'s build config ' + 'if this is a feature module.') + parser.add_option('--parent-module-build-config', + help='Path to the parent module\'s build config ' + 'when not using base module as parent.') + + parser.add_option( + '--module-build-configs', + help='For bundles, the paths of all non-async module .build_configs ' + 'for modules that are part of the bundle.') + parser.add_option( + '--uses-split', + action='append', + help='List of name pairs separated by : mapping a feature module to a ' + 'dependent feature module.') + + parser.add_option( + '--trace-events-jar-dir', + help='Directory of rewritten .jar files for trace event rewriting.') + + parser.add_option('--version-name', help='Version name for this APK.') + parser.add_option('--version-code', help='Version code for this APK.') + + options, args = parser.parse_args(argv) + + if args: + parser.error('No positional arguments should be given.') + + if options.generate_markdown_format_doc: + doc_lines = _ExtractMarkdownDocumentation(__doc__) + for line in doc_lines: + print(line) + return 0 + + if options.fail: + parser.error('\n'.join(action_helpers.parse_gn_list(options.fail))) + + lib_options = ['unprocessed_jar_path', 'interface_jar_path'] + device_lib_options = ['device_jar_path', 'dex_path'] + required_options_map = { + 'android_apk': ['build_config'] + lib_options + device_lib_options, + 'android_app_bundle_module': + ['build_config', 'res_size_info'] + lib_options + device_lib_options, + 'android_assets': ['build_config'], + 'android_resources': ['build_config', 'resources_zip'], + 'dist_aar': ['build_config'], + 'dist_jar': ['build_config'], + 'group': ['build_config'], + 'java_annotation_processor': ['build_config', 'main_class'], + 'java_binary': ['build_config'], + 'java_library': ['build_config', 'host_jar_path'] + lib_options, + 'robolectric_binary': ['build_config'], + 'system_java_library': ['build_config', 'unprocessed_jar_path'], + 'android_app_bundle': ['build_config', 'module_build_configs'], + } + required_options = required_options_map.get(options.type) + if not required_options: + raise Exception('Unknown type: <%s>' % options.type) + + build_utils.CheckOptions(options, parser, required_options) + + if options.type != 'android_app_bundle_module': + if options.apk_proto_resources: + raise Exception('--apk-proto-resources can only be used with ' + '--type=android_app_bundle_module') + if options.module_pathmap_path: + raise Exception('--module-pathmap-path can only be used with ' + '--type=android_app_bundle_module') + if options.base_allowlist_rtxt_path: + raise Exception('--base-allowlist-rtxt-path can only be used with ' + '--type=android_app_bundle_module') + if options.module_name: + raise Exception('--module-name can only be used with ' + '--type=android_app_bundle_module') + + is_apk_or_module_target = options.type in ('android_apk', + 'android_app_bundle_module') + + if not is_apk_or_module_target: + if options.library_always_compress: + raise Exception( + '--library-always-compress can only be used with --type=android_apk ' + 'or --type=android_app_bundle_module') + + if options.device_jar_path and not options.dex_path: + raise Exception('java_library that supports Android requires a dex path.') + if any(getattr(options, x) for x in lib_options): + for attr in lib_options: + if not getattr(options, attr): + raise('Expected %s to be set.' % attr) + + if options.requires_android and not options.supports_android: + raise Exception( + '--supports-android is required when using --requires-android') + + is_java_target = options.type in ('java_binary', 'robolectric_binary', + 'java_annotation_processor', 'java_library', + 'android_apk', 'dist_aar', 'dist_jar', + 'system_java_library', + 'android_app_bundle_module') + + deps_configs_paths = action_helpers.parse_gn_list(options.deps_configs) + public_deps_configs_paths = action_helpers.parse_gn_list( + options.public_deps_configs) + deps_configs_paths += public_deps_configs_paths + deps = _DepsFromPaths(deps_configs_paths, + options.type, + recursive_resource_deps=options.recursive_resource_deps) + public_deps = _DepsFromPaths(public_deps_configs_paths, options.type) + processor_deps = _DepsFromPaths(action_helpers.parse_gn_list( + options.annotation_processor_configs or ''), + options.type, + filter_root_targets=False) + + all_inputs = (deps.AllConfigPaths() + processor_deps.AllConfigPaths()) + + if options.recursive_resource_deps: + # Include java_library targets since changes to these targets can remove + # resource deps from the build, which would require rebuilding this target's + # build config file: crbug.com/1168655. + recursive_java_deps = _DepsFromPathsWithFilters( + GetAllDepsConfigsInOrder(deps_configs_paths), + allowlist=['java_library']) + all_inputs.extend(recursive_java_deps.AllConfigPaths()) + + system_library_deps = deps.Direct('system_java_library') + all_deps = deps.All() + all_library_deps = deps.All('java_library') + + if options.type == 'java_library': + # For Java libraries, restrict to resource targets that are direct deps, or + # are indirect via other resource targets. + # The indirect-through-other-targets ones are picked up because + # _ResolveGroupsAndPublicDeps() treats resource deps of resource targets as + # public_deps. + all_resources_deps = deps.Direct('android_resources') + else: + all_resources_deps = deps.All('android_resources') + + if options.type == 'android_resources' and options.recursive_resource_deps: + # android_resources targets that want recursive resource deps also need to + # collect package_names from all library deps. This ensures the R.java files + # for these libraries will get pulled in along with the resources. + android_resources_library_deps = _DepsFromPathsWithFilters( + deps_configs_paths, allowlist=['java_library']).All('java_library') + + base_module_build_config = None + if options.base_module_build_config: + base_module_build_config = GetDepConfigRoot( + options.base_module_build_config) + parent_module_build_config = base_module_build_config + if options.parent_module_build_config: + parent_module_build_config = GetDepConfigRoot( + options.parent_module_build_config) + + # Initialize some common config. + # Any value that needs to be queryable by dependents must go within deps_info. + config = { + 'deps_info': { + 'name': os.path.basename(options.build_config), + 'path': options.build_config, + 'type': options.type, + 'gn_target': options.gn_target, + 'chromium_code': not options.non_chromium_code, + }, + # Info needed only by generate_gradle.py. + 'gradle': {} + } + deps_info = config['deps_info'] + gradle = config['gradle'] + + # The paths we record as deps can differ from deps_config_paths: + # 1) Paths can be removed when blocked by _ROOT_TYPES / _RESOURCE_TYPES. + # 2) Paths can be added when promoted from group deps or public_deps of deps. + # Deps are promoted from groups/public_deps in order to make the filtering + # of 1) work through group() targets (which themselves are not resource + # targets, but should be treated as such when depended on by a resource + # target. A more involved filtering implementation could work to maintain + # the semantics of 1) without the need to promote deps, but we've avoided + # such an undertaking so far. + public_deps_set = set() + if public_deps_configs_paths: + deps_info['public_deps_configs'] = [d['path'] for d in public_deps.Direct()] + public_deps_set = set(deps_info['public_deps_configs']) + + deps_info['deps_configs'] = [ + d['path'] for d in deps.Direct() if d['path'] not in public_deps_set + ] + + if options.type == 'android_apk' and options.tested_apk_config: + tested_apk_deps = Deps([options.tested_apk_config]) + tested_apk_config = tested_apk_deps.Direct()[0] + gradle['apk_under_test'] = tested_apk_config['name'] + + if options.type == 'android_app_bundle_module': + deps_info['module_name'] = options.module_name + + # Required for generating gradle files. + if options.type == 'java_library': + deps_info['is_prebuilt'] = bool(options.is_prebuilt) + deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt + + if options.preferred_dep: + deps_info['preferred_dep'] = bool(options.preferred_dep) + + if options.android_manifest: + deps_info['android_manifest'] = options.android_manifest + + if options.merged_android_manifest: + deps_info['merged_android_manifest'] = options.merged_android_manifest + + if options.bundled_srcjars: + deps_info['bundled_srcjars'] = action_helpers.parse_gn_list( + options.bundled_srcjars) + + if options.target_sources_file: + deps_info['target_sources_file'] = options.target_sources_file + + if is_java_target: + if options.main_class: + deps_info['main_class'] = options.main_class + + dependent_prebuilt_jars = deps.GradlePrebuiltJarPaths() + dependent_prebuilt_jars.sort() + if dependent_prebuilt_jars: + gradle['dependent_prebuilt_jars'] = dependent_prebuilt_jars + + dependent_android_projects = [] + dependent_java_projects = [] + for c in deps.GradleLibraryProjectDeps(): + if c['requires_android']: + dependent_android_projects.append(c['path']) + else: + dependent_java_projects.append(c['path']) + + gradle['dependent_android_projects'] = dependent_android_projects + gradle['dependent_java_projects'] = dependent_java_projects + + if options.r_text_path: + deps_info['r_text_path'] = options.r_text_path + + # TODO(tiborg): Remove creation of JNI info for type group and java_library + # once we can generate the JNI registration based on APK / module targets as + # opposed to groups and libraries. + if is_apk_or_module_target or options.type in ('group', 'java_library', + 'robolectric_binary', + 'dist_aar'): + all_target_sources = [ + c['target_sources_file'] for c in all_library_deps + if 'target_sources_file' in c + ] + if options.target_sources_file: + all_target_sources.append(options.target_sources_file) + + if is_apk_or_module_target or options.type in ('group', 'java_library', + 'robolectric_binary'): + if options.apk_proto_resources: + deps_info['proto_resources_path'] = options.apk_proto_resources + + deps_info['version_name'] = options.version_name + deps_info['version_code'] = options.version_code + if options.module_pathmap_path: + deps_info['module_pathmap_path'] = options.module_pathmap_path + else: + # Ensure there is an entry, even if it is empty, for modules + # that have not enabled resource path shortening. Otherwise + # build_utils.ExpandFileArgs fails. + deps_info['module_pathmap_path'] = '' + + if options.base_allowlist_rtxt_path: + deps_info['base_allowlist_rtxt_path'] = options.base_allowlist_rtxt_path + else: + # Ensure there is an entry, even if it is empty, for modules + # that don't need such a allowlist. + deps_info['base_allowlist_rtxt_path'] = '' + + if is_java_target: + deps_info['requires_android'] = bool(options.requires_android) + deps_info['supports_android'] = bool(options.supports_android) + + # robolectric is special in that its an android target that runs on host. + # You are allowed to depend on both android |deps_require_android| and + # non-android |deps_not_support_android| targets. + if not options.bypass_platform_checks and not options.is_robolectric: + deps_require_android = (all_resources_deps + + [d['name'] for d in all_library_deps if d['requires_android']]) + deps_not_support_android = ( + [d['name'] for d in all_library_deps if not d['supports_android']]) + + if deps_require_android and not options.requires_android: + raise Exception('Some deps require building for the Android platform: ' + + str(deps_require_android)) + + if deps_not_support_android and options.supports_android: + raise Exception('Not all deps support the Android platform: ' + + str(deps_not_support_android)) + + if is_apk_or_module_target or options.type == 'dist_jar': + all_dex_files = [c['dex_path'] for c in all_library_deps] + + if is_java_target: + # Classpath values filled in below (after applying tested_apk_config). + config['javac'] = {} + if options.aar_path: + deps_info['aar_path'] = options.aar_path + if options.unprocessed_jar_path: + deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path + deps_info['interface_jar_path'] = options.interface_jar_path + if options.device_jar_path: + deps_info['device_jar_path'] = options.device_jar_path + if options.host_jar_path: + deps_info['host_jar_path'] = options.host_jar_path + if options.dex_path: + deps_info['dex_path'] = options.dex_path + if is_apk_or_module_target: + all_dex_files.append(options.dex_path) + if options.low_classpath_priority: + deps_info['low_classpath_priority'] = True + if options.type == 'android_apk': + deps_info['apk_path'] = options.apk_path + deps_info['incremental_apk_path'] = options.incremental_apk_path + deps_info['incremental_install_json_path'] = ( + options.incremental_install_json_path) + + if options.type == 'android_assets': + all_asset_sources = [] + if options.asset_renaming_sources: + all_asset_sources.extend( + action_helpers.parse_gn_list(options.asset_renaming_sources)) + if options.asset_sources: + all_asset_sources.extend( + action_helpers.parse_gn_list(options.asset_sources)) + + deps_info['assets'] = { + 'sources': all_asset_sources + } + if options.asset_renaming_destinations: + deps_info['assets']['outputs'] = (action_helpers.parse_gn_list( + options.asset_renaming_destinations)) + if options.disable_asset_compression: + deps_info['assets']['disable_compression'] = True + if options.treat_as_locale_paks: + deps_info['assets']['treat_as_locale_paks'] = True + + if options.type == 'android_resources': + deps_info['resources_zip'] = options.resources_zip + if options.resource_overlay: + deps_info['resource_overlay'] = True + if options.srcjar: + deps_info['srcjar'] = options.srcjar + if options.android_manifest: + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if options.package_name: + deps_info['package_name'] = options.package_name + deps_info['res_sources_path'] = '' + if options.res_sources_path: + deps_info['res_sources_path'] = options.res_sources_path + + if (options.requires_android + and options.type == 'java_library') or options.is_robolectric: + if options.package_name: + deps_info['package_name'] = options.package_name + + if options.type in ('android_resources', 'android_apk', 'robolectric_binary', + 'dist_aar', 'android_app_bundle_module', 'java_library'): + dependency_zips = [] + dependency_zip_overlays = [] + for c in all_resources_deps: + if not c['resources_zip']: + continue + + dependency_zips.append(c['resources_zip']) + if c.get('resource_overlay'): + dependency_zip_overlays.append(c['resources_zip']) + + extra_package_names = [] + + if options.type != 'android_resources': + extra_package_names = [ + c['package_name'] for c in all_resources_deps if 'package_name' in c + ] + if options.package_name: + extra_package_names += [options.package_name] + + # android_resources targets which specified recursive_resource_deps may + # have extra_package_names. + for resources_dep in all_resources_deps: + extra_package_names.extend(resources_dep['extra_package_names']) + + # In final types (i.e. apks and modules) that create real R.java files, + # they must collect package names from java_libraries as well. + # https://crbug.com/1073476 + if options.type != 'java_library': + extra_package_names.extend([ + c['package_name'] for c in all_library_deps if 'package_name' in c + ]) + elif options.recursive_resource_deps: + # Pull extra_package_names from library deps if recursive resource deps + # are required. + extra_package_names = [ + c['package_name'] for c in android_resources_library_deps + if 'package_name' in c + ] + config['deps_info']['includes_recursive_resources'] = True + + if options.type in ('dist_aar', 'java_library'): + r_text_files = [ + c['r_text_path'] for c in all_resources_deps if 'r_text_path' in c + ] + deps_info['dependency_r_txt_files'] = r_text_files + + if options.type == 'android_apk' and options.tested_apk_config: + config['deps_info']['arsc_package_name'] = ( + tested_apk_config['package_name']) + # We should not shadow the actual R.java files of the apk_under_test by + # creating new R.java files with the same package names in the tested apk. + extra_package_names = [ + package for package in extra_package_names + if package not in tested_apk_config['extra_package_names'] + ] + if options.res_size_info: + config['deps_info']['res_size_info'] = options.res_size_info + + # Safe to sort: Build checks that non-overlay resource have no overlap. + dependency_zips.sort() + config['deps_info']['dependency_zips'] = dependency_zips + config['deps_info']['dependency_zip_overlays'] = dependency_zip_overlays + # Order doesn't matter, so make stable. + extra_package_names.sort() + config['deps_info']['extra_package_names'] = extra_package_names + + # These are .jars to add to javac classpath but not to runtime classpath. + extra_classpath_jars = action_helpers.parse_gn_list( + options.extra_classpath_jars) + if extra_classpath_jars: + extra_classpath_jars.sort() + deps_info['extra_classpath_jars'] = extra_classpath_jars + + mergeable_android_manifests = action_helpers.parse_gn_list( + options.mergeable_android_manifests) + mergeable_android_manifests.sort() + if mergeable_android_manifests: + deps_info['mergeable_android_manifests'] = mergeable_android_manifests + + extra_proguard_classpath_jars = [] + proguard_configs = action_helpers.parse_gn_list(options.proguard_configs) + if proguard_configs: + # Make a copy of |proguard_configs| since it's mutated below. + deps_info['proguard_configs'] = list(proguard_configs) + + + if is_java_target: + classpath_direct_deps = deps.Direct() + classpath_direct_library_deps = deps.Direct('java_library') + + # The classpath used to compile this target when annotation processors are + # present. + javac_classpath = set(c['unprocessed_jar_path'] + for c in classpath_direct_library_deps) + # The classpath used to compile this target when annotation processors are + # not present. These are also always used to know when a target needs to be + # rebuilt. + javac_interface_classpath = set(c['interface_jar_path'] + for c in classpath_direct_library_deps) + + # Preserve order of |all_library_deps|. Move low priority libraries to the + # end of the classpath. + all_library_deps_sorted_for_classpath = sorted( + all_library_deps[::-1], key=_CompareClasspathPriority) + + # The classpath used for bytecode-rewritting. + javac_full_classpath = OrderedSet.fromkeys( + c['unprocessed_jar_path'] + for c in all_library_deps_sorted_for_classpath) + # The classpath used for error prone. + javac_full_interface_classpath = OrderedSet.fromkeys( + c['interface_jar_path'] for c in all_library_deps_sorted_for_classpath) + + # Adding base module to classpath to compile against its R.java file + if base_module_build_config: + javac_full_classpath.add( + base_module_build_config['deps_info']['unprocessed_jar_path']) + javac_full_interface_classpath.add( + base_module_build_config['deps_info']['interface_jar_path']) + # Turbine now compiles headers against only the direct classpath, so the + # base module's interface jar must be on the direct interface classpath. + javac_interface_classpath.add( + base_module_build_config['deps_info']['interface_jar_path']) + + for dep in classpath_direct_deps: + if 'extra_classpath_jars' in dep: + javac_classpath.update(dep['extra_classpath_jars']) + javac_interface_classpath.update(dep['extra_classpath_jars']) + for dep in all_deps: + if 'extra_classpath_jars' in dep: + javac_full_classpath.update(dep['extra_classpath_jars']) + javac_full_interface_classpath.update(dep['extra_classpath_jars']) + + # TODO(agrieve): Might be less confusing to fold these into bootclasspath. + # Deps to add to the compile-time classpath (but not the runtime classpath). + # These are jars specified by input_jars_paths that almost never change. + # Just add them directly to all the classpaths. + if options.extra_classpath_jars: + javac_classpath.update(extra_classpath_jars) + javac_interface_classpath.update(extra_classpath_jars) + javac_full_classpath.update(extra_classpath_jars) + javac_full_interface_classpath.update(extra_classpath_jars) + + if is_java_target or options.type == 'android_app_bundle': + # The classpath to use to run this target (or as an input to ProGuard). + device_classpath = [] + if is_java_target and options.device_jar_path: + device_classpath.append(options.device_jar_path) + device_classpath.extend( + c.get('device_jar_path') for c in all_library_deps + if c.get('device_jar_path')) + if options.type == 'android_app_bundle': + for d in deps.Direct('android_app_bundle_module'): + device_classpath.extend(c for c in d.get('device_classpath', []) + if c not in device_classpath) + + if options.type in ('dist_jar', 'java_binary', 'robolectric_binary'): + # The classpath to use to run this target. + host_classpath = [] + if options.host_jar_path: + host_classpath.append(options.host_jar_path) + host_classpath.extend(c['host_jar_path'] for c in all_library_deps) + deps_info['host_classpath'] = host_classpath + + # We allow lint to be run on android_apk targets, so we collect lint + # artifacts for them. + # We allow lint to be run on android_app_bundle targets, so we need to + # collect lint artifacts for the android_app_bundle_module targets that the + # bundle includes. Different android_app_bundle targets may include different + # android_app_bundle_module targets, so the bundle needs to be able to + # de-duplicate these lint artifacts. + if options.type in ('android_app_bundle_module', 'android_apk'): + # Collect all sources and resources at the apk/bundle_module level. + lint_aars = set() + lint_srcjars = set() + lint_sources = set() + lint_resource_sources = set() + lint_resource_zips = set() + + if options.target_sources_file: + lint_sources.add(options.target_sources_file) + if options.bundled_srcjars: + lint_srcjars.update(deps_info['bundled_srcjars']) + for c in all_library_deps: + if c['chromium_code'] and c['requires_android']: + if 'target_sources_file' in c: + lint_sources.add(c['target_sources_file']) + lint_srcjars.update(c['bundled_srcjars']) + if 'aar_path' in c: + lint_aars.add(c['aar_path']) + + if options.res_sources_path: + lint_resource_sources.add(options.res_sources_path) + if options.resources_zip: + lint_resource_zips.add(options.resources_zip) + for c in all_resources_deps: + if c['chromium_code']: + # Prefer res_sources_path to resources_zips so that lint errors have + # real paths and to avoid needing to extract during lint. + if c['res_sources_path']: + lint_resource_sources.add(c['res_sources_path']) + else: + lint_resource_zips.add(c['resources_zip']) + + deps_info['lint_aars'] = sorted(lint_aars) + deps_info['lint_srcjars'] = sorted(lint_srcjars) + deps_info['lint_sources'] = sorted(lint_sources) + deps_info['lint_resource_sources'] = sorted(lint_resource_sources) + deps_info['lint_resource_zips'] = sorted(lint_resource_zips) + deps_info['lint_extra_android_manifests'] = [] + + if options.type == 'android_apk': + assert options.android_manifest, 'Android APKs must define a manifest' + deps_info['lint_android_manifest'] = options.android_manifest + + if options.type == 'android_app_bundle': + module_config_paths = action_helpers.parse_gn_list( + options.module_build_configs) + module_configs = [GetDepConfig(c) for c in module_config_paths] + module_configs_by_name = {d['module_name']: d for d in module_configs} + per_module_fields = [ + 'device_classpath', 'trace_event_rewritten_device_classpath', + 'all_dex_files' + ] + jni_all_source = set() + lint_aars = set() + lint_srcjars = set() + lint_sources = set() + lint_resource_sources = set() + lint_resource_zips = set() + lint_extra_android_manifests = set() + config['modules'] = {} + modules = config['modules'] + for n, c in module_configs_by_name.items(): + if n == 'base': + assert 'base_module_config' not in deps_info, ( + 'Must have exactly 1 base module!') + deps_info['package_name'] = c['package_name'] + deps_info['version_code'] = c['version_code'] + deps_info['version_name'] = c['version_name'] + deps_info['base_module_config'] = c['path'] + # Use the base module's android manifest for linting. + deps_info['lint_android_manifest'] = c['android_manifest'] + else: + lint_extra_android_manifests.add(c['android_manifest']) + jni_all_source.update(c['jni_all_source']) + lint_aars.update(c['lint_aars']) + lint_srcjars.update(c['lint_srcjars']) + lint_sources.update(c['lint_sources']) + lint_resource_sources.update(c['lint_resource_sources']) + lint_resource_zips.update(c['lint_resource_zips']) + module = modules[n] = {} + for f in per_module_fields: + if f in c: + module[f] = c[f] + deps_info['jni_all_source'] = sorted(jni_all_source) + deps_info['lint_aars'] = sorted(lint_aars) + deps_info['lint_srcjars'] = sorted(lint_srcjars) + deps_info['lint_sources'] = sorted(lint_sources) + deps_info['lint_resource_sources'] = sorted(lint_resource_sources) + deps_info['lint_resource_zips'] = sorted(lint_resource_zips) + deps_info['lint_extra_android_manifests'] = sorted( + lint_extra_android_manifests) + + _DedupFeatureModuleSharedCode(options.uses_split, modules, + per_module_fields) + + if is_apk_or_module_target or options.type in ('group', 'java_library', + 'robolectric_binary', + 'dist_aar'): + deps_info['jni_all_source'] = sorted(set(all_target_sources)) + + system_jars = [c['unprocessed_jar_path'] for c in system_library_deps] + system_interface_jars = [c['interface_jar_path'] for c in system_library_deps] + if system_library_deps: + config['android'] = {} + config['android']['sdk_interface_jars'] = system_interface_jars + config['android']['sdk_jars'] = system_jars + + if options.type in ('android_apk', 'dist_aar', + 'dist_jar', 'android_app_bundle_module', 'android_app_bundle'): + for c in all_deps: + proguard_configs.extend(c.get('proguard_configs', [])) + extra_proguard_classpath_jars.extend(c.get('extra_classpath_jars', [])) + if options.type == 'android_app_bundle': + for c in deps.Direct('android_app_bundle_module'): + proguard_configs.extend(p for p in c.get('proguard_configs', [])) + if options.type == 'android_app_bundle': + for d in deps.Direct('android_app_bundle_module'): + extra_proguard_classpath_jars.extend( + c for c in d.get('proguard_classpath_jars', []) + if c not in extra_proguard_classpath_jars) + + if options.type == 'android_app_bundle': + deps_proguard_enabled = [] + deps_proguard_disabled = [] + for d in deps.Direct('android_app_bundle_module'): + if not d['device_classpath']: + # We don't care about modules that have no Java code for proguarding. + continue + if d['proguard_enabled']: + deps_proguard_enabled.append(d['name']) + else: + deps_proguard_disabled.append(d['name']) + if deps_proguard_enabled and deps_proguard_disabled: + raise Exception('Deps %s have proguard enabled while deps %s have ' + 'proguard disabled' % (deps_proguard_enabled, + deps_proguard_disabled)) + deps_info['proguard_enabled'] = bool(options.proguard_enabled) + + if options.proguard_mapping_path: + deps_info['proguard_mapping_path'] = options.proguard_mapping_path + + # The java code for an instrumentation test apk is assembled differently for + # ProGuard vs. non-ProGuard. + # + # Without ProGuard: Each library's jar is dexed separately and then combined + # into a single classes.dex. A test apk will include all dex files not already + # present in the apk-under-test. At runtime all test code lives in the test + # apk, and the program code lives in the apk-under-test. + # + # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs + # a single .jar, which is then dexed into a classes.dex. A test apk includes + # all jar files from the program and the tests because having them separate + # doesn't work with ProGuard's whole-program optimizations. Although the + # apk-under-test still has all of its code in its classes.dex, none of it is + # used at runtime because the copy of it within the test apk takes precidence. + + if options.type == 'android_apk' and options.tested_apk_config: + if tested_apk_config['proguard_enabled']: + assert options.proguard_enabled, ('proguard must be enabled for ' + 'instrumentation apks if it\'s enabled for the tested apk.') + # Mutating lists, so no need to explicitly re-assign to dict. + proguard_configs.extend( + p for p in tested_apk_config['proguard_all_configs']) + extra_proguard_classpath_jars.extend( + p for p in tested_apk_config['proguard_classpath_jars']) + tested_apk_config = GetDepConfig(options.tested_apk_config) + deps_info['proguard_under_test_mapping'] = ( + tested_apk_config['proguard_mapping_path']) + elif options.proguard_enabled: + # Not sure why you'd want to proguard the test apk when the under-test apk + # is not proguarded, but it's easy enough to support. + deps_info['proguard_under_test_mapping'] = '' + + # Add all tested classes to the test's classpath to ensure that the test's + # java code is a superset of the tested apk's java code + device_classpath_extended = list(device_classpath) + device_classpath_extended.extend( + p for p in tested_apk_config['device_classpath'] + if p not in device_classpath) + # Include in the classpath classes that are added directly to the apk under + # test (those that are not a part of a java_library). + javac_classpath.add(tested_apk_config['unprocessed_jar_path']) + javac_interface_classpath.add(tested_apk_config['interface_jar_path']) + javac_full_classpath.add(tested_apk_config['unprocessed_jar_path']) + javac_full_interface_classpath.add(tested_apk_config['interface_jar_path']) + javac_full_classpath.update(tested_apk_config['javac_full_classpath']) + javac_full_interface_classpath.update( + tested_apk_config['javac_full_interface_classpath']) + + # Exclude .jar files from the test apk that exist within the apk under test. + tested_apk_library_deps = tested_apk_deps.All('java_library') + tested_apk_dex_files = {c['dex_path'] for c in tested_apk_library_deps} + all_dex_files = [p for p in all_dex_files if p not in tested_apk_dex_files] + tested_apk_jar_files = set(tested_apk_config['device_classpath']) + device_classpath = [ + p for p in device_classpath if p not in tested_apk_jar_files + ] + + if options.type in ('android_apk', 'dist_aar', 'dist_jar', + 'android_app_bundle_module', 'android_app_bundle'): + deps_info['proguard_all_configs'] = sorted(set(proguard_configs)) + deps_info['proguard_classpath_jars'] = sorted( + set(extra_proguard_classpath_jars)) + + if options.final_dex_path: + config['final_dex'] = {'path': options.final_dex_path} + if is_apk_or_module_target or options.type == 'dist_jar': + # Dependencies for the final dex file of an apk. + deps_info['all_dex_files'] = all_dex_files + + if is_java_target: + config['javac']['classpath'] = sorted(javac_classpath) + config['javac']['interface_classpath'] = sorted(javac_interface_classpath) + # Direct() will be of type 'java_annotation_processor', and so not included + # in All('java_library'). + # Annotation processors run as part of the build, so need host_jar_path. + config['javac']['processor_classpath'] = [ + c['host_jar_path'] for c in processor_deps.Direct() + if c.get('host_jar_path') + ] + config['javac']['processor_classpath'] += [ + c['host_jar_path'] for c in processor_deps.All('java_library') + ] + config['javac']['processor_classes'] = sorted( + c['main_class'] for c in processor_deps.Direct()) + deps_info['javac_full_classpath'] = list(javac_full_classpath) + deps_info['javac_full_interface_classpath'] = list( + javac_full_interface_classpath) + elif options.type == 'android_app_bundle': + # bundles require javac_full_classpath to create .aab.jar.info and require + # javac_full_interface_classpath for lint. + javac_full_classpath = OrderedSet() + javac_full_interface_classpath = OrderedSet() + for d in deps.Direct('android_app_bundle_module'): + javac_full_classpath.update(d['javac_full_classpath']) + javac_full_interface_classpath.update(d['javac_full_interface_classpath']) + javac_full_classpath.add(d['unprocessed_jar_path']) + javac_full_interface_classpath.add(d['interface_jar_path']) + deps_info['javac_full_classpath'] = list(javac_full_classpath) + deps_info['javac_full_interface_classpath'] = list( + javac_full_interface_classpath) + + if options.type in ('android_apk', 'android_app_bundle', + 'android_app_bundle_module', 'dist_aar', 'dist_jar'): + deps_info['device_classpath'] = device_classpath + if options.trace_events_jar_dir: + trace_event_rewritten_device_classpath = [] + for jar_path in device_classpath: + file_path = jar_path.replace('../', '') + file_path = file_path.replace('obj/', '') + file_path = file_path.replace('gen/', '') + file_path = file_path.replace('.jar', '.tracing_rewritten.jar') + rewritten_jar_path = os.path.join(options.trace_events_jar_dir, + file_path) + trace_event_rewritten_device_classpath.append(rewritten_jar_path) + + deps_info['trace_event_rewritten_device_classpath'] = ( + trace_event_rewritten_device_classpath) + + if options.tested_apk_config: + deps_info['device_classpath_extended'] = device_classpath_extended + + if options.type in ('android_apk', 'dist_jar'): + all_interface_jars = [] + if options.interface_jar_path: + all_interface_jars.append(options.interface_jar_path) + all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps) + + config['dist_jar'] = { + 'all_interface_jars': all_interface_jars, + } + + if is_apk_or_module_target: + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if not options.tested_apk_config and manifest.GetInstrumentationElements(): + # This must then have instrumentation only for itself. + manifest.CheckInstrumentationElements(manifest.GetPackageName()) + + library_paths = [] + java_libraries_list = None + if options.shared_libraries_runtime_deps: + library_paths = _ExtractSharedLibsFromRuntimeDeps( + options.shared_libraries_runtime_deps) + java_libraries_list = _CreateJavaLibrariesList(library_paths) + all_inputs.append(options.shared_libraries_runtime_deps) + + secondary_abi_library_paths = [] + if options.secondary_abi_shared_libraries_runtime_deps: + secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps( + options.secondary_abi_shared_libraries_runtime_deps) + secondary_abi_library_paths.sort() + paths_without_parent_dirs = [ + p for p in secondary_abi_library_paths if os.path.sep not in p + ] + if paths_without_parent_dirs: + sys.stderr.write('Found secondary native libraries from primary ' + 'toolchain directory. This is a bug!\n') + sys.stderr.write('\n'.join(paths_without_parent_dirs)) + sys.stderr.write('\n\nIt may be helpful to run: \n') + sys.stderr.write(' gn path out/Default //chrome/android:' + 'monochrome_secondary_abi_lib //base:base\n') + sys.exit(1) + + all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps) + + native_library_placeholder_paths = action_helpers.parse_gn_list( + options.native_lib_placeholders) + native_library_placeholder_paths.sort() + + secondary_native_library_placeholder_paths = action_helpers.parse_gn_list( + options.secondary_native_lib_placeholders) + secondary_native_library_placeholder_paths.sort() + + loadable_modules = action_helpers.parse_gn_list(options.loadable_modules) + loadable_modules.sort() + secondary_abi_loadable_modules = action_helpers.parse_gn_list( + options.secondary_abi_loadable_modules) + secondary_abi_loadable_modules.sort() + + config['native'] = { + 'libraries': + library_paths, + 'native_library_placeholders': + native_library_placeholder_paths, + 'secondary_abi_libraries': + secondary_abi_library_paths, + 'secondary_native_library_placeholders': + secondary_native_library_placeholder_paths, + 'java_libraries_list': + java_libraries_list, + 'library_always_compress': + options.library_always_compress, + 'loadable_modules': + loadable_modules, + 'secondary_abi_loadable_modules': + secondary_abi_loadable_modules, + } + + # Collect java resources + java_resources_jars = [d['java_resources_jar'] for d in all_library_deps + if 'java_resources_jar' in d] + if options.tested_apk_config: + tested_apk_resource_jars = [d['java_resources_jar'] + for d in tested_apk_library_deps + if 'java_resources_jar' in d] + java_resources_jars = [jar for jar in java_resources_jars + if jar not in tested_apk_resource_jars] + java_resources_jars.sort() + config['java_resources_jars'] = java_resources_jars + + if is_apk_or_module_target or options.type == 'robolectric_binary': + # android_resources deps which had recursive_resource_deps set should not + # have the manifests from the recursively collected deps added to this + # module. This keeps the manifest declarations in the child DFMs, since they + # will have the Java implementations. + def ExcludeRecursiveResourcesDeps(config): + return not config.get('includes_recursive_resources', False) + + extra_manifest_deps = [ + GetDepConfig(p) for p in GetAllDepsConfigsInOrder( + deps_configs_paths, filter_func=ExcludeRecursiveResourcesDeps) + ] + # Manifests are listed from highest priority to lowest priority. + # Ensure directly manfifests come first, and then sort the rest by name. + # https://developer.android.com/build/manage-manifests#merge_priorities + config['extra_android_manifests'] = list(mergeable_android_manifests) + manifests_from_deps = [] + for c in extra_manifest_deps: + manifests_from_deps += c.get('mergeable_android_manifests', []) + manifests_from_deps.sort(key=lambda p: (os.path.basename(p), p)) + config['extra_android_manifests'] += manifests_from_deps + + config['assets'], config['uncompressed_assets'], locale_paks = ( + _MergeAssets(deps.All('android_assets'))) + deps_info['locales_java_list'] = _CreateJavaLocaleListFromAssets( + config['uncompressed_assets'], locale_paks) + + if options.java_resources_jar_path: + deps_info['java_resources_jar'] = options.java_resources_jar_path + + # DYNAMIC FEATURE MODULES: + # There are two approaches to dealing with modules dependencies: + # 1) Perform steps in android_apk_or_module(), with only the knowledge of + # ancesstor splits. Our implementation currently allows only for 2 levels: + # base -> parent -> leaf + # Bundletool normally fails if two leaf nodes merge the same manifest or + # resources. The fix is to add the common dep to the chrome or base module + # so that our deduplication logic will work. + # RemoveObjDups() implements this approach. + # 2) Perform steps in android_app_bundle(), with knowledge of full set of + # modules. This is required for dex because it can handle the case of two + # leaf nodes having the same dep, and promoting that dep to their common + # parent. + # _DedupFeatureModuleSharedCode() implements this approach. + if base_module_build_config: + ancestors = [base_module_build_config] + if parent_module_build_config is not base_module_build_config: + ancestors += [parent_module_build_config] + for ancestor in ancestors: + RemoveObjDups(config, ancestor, 'deps_info', 'dependency_zips') + RemoveObjDups(config, ancestor, 'deps_info', 'dependency_zip_overlays') + RemoveObjDups(config, ancestor, 'deps_info', 'extra_package_names') + RemoveObjDups(config, ancestor, 'deps_info', 'jni_all_source') + RemoveObjDups(config, ancestor, 'extra_android_manifests') + + if is_java_target: + jar_to_target = {} + _AddJarMapping(jar_to_target, [deps_info]) + _AddJarMapping(jar_to_target, all_deps) + if base_module_build_config: + _AddJarMapping(jar_to_target, [base_module_build_config['deps_info']]) + if parent_module_build_config is not base_module_build_config: + _AddJarMapping(jar_to_target, [parent_module_build_config['deps_info']]) + if options.tested_apk_config: + _AddJarMapping(jar_to_target, [tested_apk_config]) + for jar, target in zip(tested_apk_config['javac_full_classpath'], + tested_apk_config['javac_full_classpath_targets']): + jar_to_target[jar] = target + + # Used by bytecode_processor to give better error message when missing + # deps are found. Both javac_full_classpath_targets and javac_full_classpath + # must be in identical orders, as they get passed as separate arrays and + # then paired up based on index. + config['deps_info']['javac_full_classpath_targets'] = [ + jar_to_target[x] for x in deps_info['javac_full_classpath'] + ] + + build_utils.WriteJson(config, options.build_config, only_if_changed=True) + + if options.depfile: + action_helpers.write_depfile(options.depfile, options.build_config, + sorted(set(all_inputs))) + + if options.store_deps_for_debugging_to: + GetDepConfig(options.build_config) # Add it to cache. + _CopyBuildConfigsForDebugging(options.store_deps_for_debugging_to) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/gyp/write_build_config.pydeps b/android/gyp/write_build_config.pydeps new file mode 100644 index 000000000000..fa7209c2798d --- /dev/null +++ b/android/gyp/write_build_config.pydeps @@ -0,0 +1,30 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_identifier.py +../../../third_party/jinja2/async_utils.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../action_helpers.py +../../gn_helpers.py +util/__init__.py +util/build_utils.py +util/resource_utils.py +write_build_config.py diff --git a/android/gyp/write_native_libraries_java.py b/android/gyp/write_native_libraries_java.py new file mode 100755 index 000000000000..fb4d2ad18334 --- /dev/null +++ b/android/gyp/write_native_libraries_java.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +# +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes list of native libraries to srcjar file.""" + +import argparse +import os +import sys +import zipfile + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +_NATIVE_LIBRARIES_TEMPLATE = """\ +// This file is autogenerated by +// build/android/gyp/write_native_libraries_java.py +// Please do not change its content. + +package org.chromium.build; + +public class NativeLibraries {{ + public static final int CPU_FAMILY_UNKNOWN = 0; + public static final int CPU_FAMILY_ARM = 1; + public static final int CPU_FAMILY_MIPS = 2; + public static final int CPU_FAMILY_X86 = 3; + + // Set to true to enable the use of the Chromium Linker. + public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER}; + + // This is the list of native libraries to be loaded (in the correct order) + // by LibraryLoader.java. + public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}}; + + public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY}; +}} +""" + + +def _FormatLibraryName(library_name): + filename = os.path.split(library_name)[1] + assert filename.startswith('lib') + assert filename.endswith('.so') + # Remove lib prefix and .so suffix. + return '"%s"' % filename[3:-3] + + +def main(): + parser = argparse.ArgumentParser() + + action_helpers.add_depfile_arg(parser) + parser.add_argument('--final', action='store_true', help='Use final fields.') + parser.add_argument( + '--enable-chromium-linker', + action='store_true', + help='Enable Chromium linker.') + parser.add_argument( + '--native-libraries-list', help='File with list of native libraries.') + parser.add_argument( + '--cpu-family', + choices={ + 'CPU_FAMILY_ARM', 'CPU_FAMILY_X86', 'CPU_FAMILY_MIPS', + 'CPU_FAMILY_UNKNOWN' + }, + required=True, + default='CPU_FAMILY_UNKNOWN', + help='CPU family.') + parser.add_argument( + '--main-component-library', + help='If used, the list of native libraries will only contain this ' + 'library. Dependencies are found in the library\'s "NEEDED" section.') + + parser.add_argument( + '--output', required=True, help='Path to the generated srcjar file.') + + options = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) + + native_libraries = [] + if options.main_component_library: + native_libraries.append(options.main_component_library) + elif options.native_libraries_list: + with open(options.native_libraries_list) as f: + native_libraries.extend(l.strip() for l in f) + + if options.enable_chromium_linker and len(native_libraries) > 1: + sys.stderr.write( + 'Multiple libraries not supported when using chromium linker. Found:\n') + sys.stderr.write('\n'.join(native_libraries)) + sys.stderr.write('\n') + sys.exit(1) + + def bool_str(value): + if value: + return ' = true' + if options.final: + return ' = false' + return '' + + format_dict = { + 'MAYBE_FINAL': 'final ' if options.final else '', + 'USE_LINKER': bool_str(options.enable_chromium_linker), + 'LIBRARIES': ','.join(_FormatLibraryName(n) for n in native_libraries), + 'CPU_FAMILY': options.cpu_family, + } + with action_helpers.atomic_output(options.output) as f: + with zipfile.ZipFile(f.name, 'w') as srcjar_file: + zip_helpers.add_to_zip_hermetic( + zip_file=srcjar_file, + zip_path='org/chromium/build/NativeLibraries.java', + data=_NATIVE_LIBRARIES_TEMPLATE.format(**format_dict)) + + if options.depfile: + assert options.native_libraries_list + action_helpers.write_depfile(options.depfile, + options.output, + inputs=[options.native_libraries_list]) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/gyp/write_native_libraries_java.pydeps b/android/gyp/write_native_libraries_java.pydeps new file mode 100644 index 000000000000..c47e1652ce0f --- /dev/null +++ b/android/gyp/write_native_libraries_java.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_native_libraries_java.pydeps build/android/gyp/write_native_libraries_java.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +util/__init__.py +util/build_utils.py +write_native_libraries_java.py diff --git a/android/gyp/zip.py b/android/gyp/zip.py new file mode 100755 index 000000000000..f4b4acfb2efd --- /dev/null +++ b/android/gyp/zip.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 +# +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Archives a set of files.""" + +import argparse +import json +import os +import sys +import zipfile + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. +import zip_helpers + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser(args) + parser.add_argument('--input-files', help='GN-list of files to zip.') + parser.add_argument( + '--input-files-base-dir', + help='Paths in the archive will be relative to this directory') + parser.add_argument('--input-zips', help='GN-list of zips to merge.') + parser.add_argument( + '--input-zips-excluded-globs', + help='GN-list of globs for paths to exclude.') + parser.add_argument('--output', required=True, help='Path to output archive.') + compress_group = parser.add_mutually_exclusive_group() + compress_group.add_argument( + '--compress', action='store_true', help='Compress entries') + compress_group.add_argument( + '--no-compress', + action='store_false', + dest='compress', + help='Do not compress entries') + parser.add_argument('--comment-json', + action='append', + metavar='KEY=VALUE', + type=lambda x: x.split('=', 1), + help='Entry to store in JSON-encoded archive comment.') + action_helpers.add_depfile_arg(parser) + options = parser.parse_args(args) + + with action_helpers.atomic_output(options.output) as f: + with zipfile.ZipFile(f.name, 'w') as out_zip: + depfile_deps = None + if options.input_files: + files = action_helpers.parse_gn_list(options.input_files) + zip_helpers.add_files_to_zip(files, + out_zip, + base_dir=options.input_files_base_dir, + compress=options.compress) + + if options.input_zips: + files = action_helpers.parse_gn_list(options.input_zips) + depfile_deps = files + path_transform = None + if options.input_zips_excluded_globs: + globs = action_helpers.parse_gn_list( + options.input_zips_excluded_globs) + path_transform = ( + lambda p: None if build_utils.MatchesGlob(p, globs) else p) + zip_helpers.merge_zips(out_zip, + files, + path_transform=path_transform, + compress=options.compress) + + if options.comment_json: + out_zip.comment = json.dumps(dict(options.comment_json), + sort_keys=True).encode('utf-8') + + # Depfile used only by dist_jar(). + if options.depfile: + action_helpers.write_depfile(options.depfile, + options.output, + inputs=depfile_deps) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/gyp/zip.pydeps b/android/gyp/zip.pydeps new file mode 100644 index 000000000000..973fe436c2e4 --- /dev/null +++ b/android/gyp/zip.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py +../../action_helpers.py +../../gn_helpers.py +../../zip_helpers.py +util/__init__.py +util/build_utils.py +zip.py diff --git a/android/host_heartbeat.py b/android/host_heartbeat.py new file mode 100755 index 000000000000..f22c2d7e8e0f --- /dev/null +++ b/android/host_heartbeat.py @@ -0,0 +1,36 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Sends a heart beat pulse to the currently online Android devices. +This heart beat lets the devices know that they are connected to a host. +""" +# pylint: disable=W0702 + +import sys +import time + +import devil_chromium +from devil.android import device_utils + +PULSE_PERIOD = 20 + +def main(): + devil_chromium.Initialize() + + while True: + try: + devices = device_utils.DeviceUtils.HealthyDevices(denylist=None) + for d in devices: + d.RunShellCommand(['touch', '/sdcard/host_heartbeat'], + check_return=True) + except: + # Keep the heatbeat running bypassing all errors. + pass + time.sleep(PULSE_PERIOD) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/incremental_install/BUILD.gn b/android/incremental_install/BUILD.gn new file mode 100644 index 000000000000..e2134dd14990 --- /dev/null +++ b/android/incremental_install/BUILD.gn @@ -0,0 +1,24 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +android_library("bootstrap_java") { + sources = [ + "java/org/chromium/incrementalinstall/BootstrapApplication.java", + "java/org/chromium/incrementalinstall/BootstrapInstrumentation.java", + "java/org/chromium/incrementalinstall/ClassLoaderPatcher.java", + "java/org/chromium/incrementalinstall/LockFile.java", + "java/org/chromium/incrementalinstall/Reflect.java", + "java/org/chromium/incrementalinstall/SecondInstrumentation.java", + ] + deps = [ "third_party/AndroidHiddenApiBypass:hidden_api_bypass_java" ] + jacoco_never_instrument = true + no_build_hooks = true +} + +dist_dex("apk_dex") { + output = "$target_out_dir/apk.dex" + deps = [ ":bootstrap_java" ] +} diff --git a/android/incremental_install/README.md b/android/incremental_install/README.md new file mode 100644 index 000000000000..9a27b8c5a65c --- /dev/null +++ b/android/incremental_install/README.md @@ -0,0 +1,83 @@ +# Incremental Install + +Incremental Install is a way of building & deploying an APK that tries to +minimize the time it takes to make a change and see that change running on +device. They work best with `is_component_build=true`, and do *not* require a +rooted device. + +## Building + +Add the gn arg: + + incremental_install = true + +This causes all apks to be built as incremental except for denylisted ones. + +## Running + +It is not enough to `adb install` them. You must use the generated wrapper +script: + + out/Debug/bin/your_apk run + out/Debug/bin/run_chrome_public_test_apk # Automatically sets --fast-local-dev + +# How it Works + +## Overview + +The basic idea is to sideload .dex and .so files to `/data/local/tmp` rather +than bundling them in the .apk. Then, when making a change, only the changed +.dex / .so needs to be pushed to the device. + +Faster Builds: + + * No `final_dex` step (where all .dex files are merged into one) + * No need to rebuild .apk for code-only changes (but required for resources) + * Apks sign faster because they are smaller. + +Faster Installs: + + * The .apk is smaller, and so faster to verify. + * No need to run `adb install` for code-only changes. + * Only changed .so / .dex files are pushed. MD5s of existing on-device files + are cached on host computer. + +Slower Initial Runs: + + * The first time you run an incremental .apk, the `DexOpt` needs to run on all + .dex files. This step is normally done during `adb install`, but is done on + start-up for incremental apks. + * DexOpt results are cached, so subsequent runs are faster. + * The slowdown varies significantly based on the Android version. Android O+ + has almost no visible slow-down. + +Caveats: + * Isolated processes (on L+) are incompatible with incremental install. As a + work-around, isolated processes are disabled when building incremental apks. + * Android resources, assets, and `loadable_modules` are not sideloaded (they + remain in the apk), so builds & installs that modify any of these are not as + fast as those that modify only .java / .cc. + * Since files are sideloaded to `/data/local/tmp`, you need to use the wrapper + scripts to uninstall them fully. E.g.: + ```shell + out/Default/bin/chrome_public_apk uninstall + ``` + +## The Code + +All incremental apks have the same classes.dex, which is built from: + + //build/android/incremental_install:bootstrap_java + +They also have a transformed `AndroidManifest.xml`, which overrides the the +main application class and any instrumentation classes so that they instead +point to `BootstrapApplication`. This is built by: + + //build/android/incremental_install/generate_android_manifest.py + +Wrapper scripts and install logic is contained in: + + //build/android/incremental_install/create_install_script.py + //build/android/incremental_install/installer.py + +Finally, GN logic for incremental apks is sprinkled throughout. diff --git a/android/incremental_install/__init__.py b/android/incremental_install/__init__.py new file mode 100644 index 000000000000..a43e6af7224b --- /dev/null +++ b/android/incremental_install/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/incremental_install/generate_android_manifest.py b/android/incremental_install/generate_android_manifest.py new file mode 100755 index 000000000000..ffa26c20b924 --- /dev/null +++ b/android/incremental_install/generate_android_manifest.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +# +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Creates an AndroidManifest.xml for an incremental APK. + +Given the manifest file for the real APK, generates an AndroidManifest.xml with +the application class changed to IncrementalApplication. +""" + +import argparse +import os +import sys +from xml.etree import ElementTree + +sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'gyp')) +from util import build_utils +from util import manifest_utils +import action_helpers # build_utils adds //build to sys.path. + +_INCREMENTAL_APP_NAME = 'org.chromium.incrementalinstall.BootstrapApplication' +_META_DATA_APP_NAME = 'incremental-install-real-app' +_DEFAULT_APPLICATION_CLASS = 'android.app.Application' +_META_DATA_INSTRUMENTATION_NAMES = [ + 'incremental-install-real-instrumentation-0', + 'incremental-install-real-instrumentation-1', +] +_INCREMENTAL_INSTRUMENTATION_CLASSES = [ + 'android.app.Instrumentation', + 'org.chromium.incrementalinstall.SecondInstrumentation', +] + + +def _AddNamespace(name): + """Adds the android namespace prefix to the given identifier.""" + return '{%s}%s' % (manifest_utils.ANDROID_NAMESPACE, name) + + +def _ParseArgs(args): + parser = argparse.ArgumentParser() + parser.add_argument('--src-manifest', + required=True, + help='The main manifest of the app.') + parser.add_argument('--dst-manifest', + required=True, + help='The output modified manifest.') + parser.add_argument('--disable-isolated-processes', + help='Changes all android:isolatedProcess to false. ' + 'This is required on Android M+', + action='store_true') + + ret = parser.parse_args(build_utils.ExpandFileArgs(args)) + return ret + + +def _CreateMetaData(parent, name, value): + meta_data_node = ElementTree.SubElement(parent, 'meta-data') + meta_data_node.set(_AddNamespace('name'), name) + meta_data_node.set(_AddNamespace('value'), value) + + +def _ProcessManifest(path, disable_isolated_processes): + doc, _, app_node = manifest_utils.ParseManifest(path) + + # Pylint for some reason things app_node is an int. + # pylint: disable=no-member + real_app_class = app_node.get(_AddNamespace('name'), + _DEFAULT_APPLICATION_CLASS) + app_node.set(_AddNamespace('name'), _INCREMENTAL_APP_NAME) + # pylint: enable=no-member + _CreateMetaData(app_node, _META_DATA_APP_NAME, real_app_class) + + # Seems to be a bug in ElementTree, as doc.find() doesn't work here. + instrumentation_nodes = doc.findall('instrumentation') + assert len(instrumentation_nodes) <= 2, ( + 'Need to update incremental install to support >2 tags') + for i, instrumentation_node in enumerate(instrumentation_nodes): + real_instrumentation_class = instrumentation_node.get(_AddNamespace('name')) + instrumentation_node.set(_AddNamespace('name'), + _INCREMENTAL_INSTRUMENTATION_CLASSES[i]) + _CreateMetaData(app_node, _META_DATA_INSTRUMENTATION_NAMES[i], + real_instrumentation_class) + + ret = ElementTree.tostring(doc.getroot(), encoding='UTF-8') + # Disable check for page-aligned native libraries. + ret = ret.replace(b'extractNativeLibs="false"', b'extractNativeLibs="true"') + if disable_isolated_processes: + ret = ret.replace(b'isolatedProcess="true"', b'isolatedProcess="false"') + # externalService only matters for isolatedProcess="true". See: + # https://developer.android.com/reference/android/R.attr#externalService + ret = ret.replace(b'externalService="true"', b'externalService="false"') + return ret + + +def main(raw_args): + options = _ParseArgs(raw_args) + + new_manifest_data = _ProcessManifest(options.src_manifest, + options.disable_isolated_processes) + with action_helpers.atomic_output(options.dst_manifest) as out_manifest: + out_manifest.write(new_manifest_data) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/incremental_install/generate_android_manifest.pydeps b/android/incremental_install/generate_android_manifest.pydeps new file mode 100644 index 000000000000..68c832bccb72 --- /dev/null +++ b/android/incremental_install/generate_android_manifest.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/generate_android_manifest.pydeps build/android/incremental_install/generate_android_manifest.py +../../action_helpers.py +../../gn_helpers.py +../gyp/util/__init__.py +../gyp/util/build_utils.py +../gyp/util/manifest_utils.py +generate_android_manifest.py diff --git a/android/incremental_install/installer.py b/android/incremental_install/installer.py new file mode 100755 index 000000000000..68e28b48a3e6 --- /dev/null +++ b/android/incremental_install/installer.py @@ -0,0 +1,374 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Install *_incremental.apk targets as well as their dependent files.""" + +import argparse +import collections +import functools +import glob +import hashlib +import json +import logging +import os +import posixpath +import shutil +import sys + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +import devil_chromium +from devil.android import apk_helper +from devil.android import device_utils +from devil.utils import reraiser_thread +from devil.utils import run_tests_helper +from pylib import constants +from pylib.utils import time_profile + +prev_sys_path = list(sys.path) +sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) +import dex +from util import build_utils +sys.path = prev_sys_path + + +_R8_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'r8', 'lib', + 'r8.jar') +_SHARD_JSON_FILENAME = 'shards.json' + + +def _DeviceCachePath(device): + file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial() + return os.path.join(constants.GetOutDirectory(), file_name) + + +def _Execute(concurrently, *funcs): + """Calls all functions in |funcs| concurrently or in sequence.""" + timer = time_profile.TimeProfile() + if concurrently: + reraiser_thread.RunAsync(funcs) + else: + for f in funcs: + f() + timer.Stop(log=False) + return timer + + +def _GetDeviceIncrementalDir(package): + """Returns the device path to put incremental files for the given package.""" + return '/data/local/tmp/incremental-app-%s' % package + + +def _IsStale(src_paths, old_src_paths, dest_path): + """Returns if |dest| is older than any of |src_paths|, or missing.""" + if not os.path.exists(dest_path): + return True + # Always mark as stale if any paths were added or removed. + if set(src_paths) != set(old_src_paths): + return True + dest_time = os.path.getmtime(dest_path) + for path in src_paths: + if os.path.getmtime(path) > dest_time: + return True + return False + + +def _LoadPrevShards(dex_staging_dir): + shards_json_path = os.path.join(dex_staging_dir, _SHARD_JSON_FILENAME) + if not os.path.exists(shards_json_path): + return {} + with open(shards_json_path) as f: + return json.load(f) + + +def _SaveNewShards(shards, dex_staging_dir): + shards_json_path = os.path.join(dex_staging_dir, _SHARD_JSON_FILENAME) + with open(shards_json_path, 'w') as f: + json.dump(shards, f) + + +def _AllocateDexShards(dex_files): + """Divides input dex files into buckets.""" + # Goals: + # * Make shards small enough that they are fast to merge. + # * Minimize the number of shards so they load quickly on device. + # * Partition files into shards such that a change in one file results in only + # one shard having to be re-created. + shards = collections.defaultdict(list) + # As of Oct 2019, 10 shards results in a min/max size of 582K/2.6M. + NUM_CORE_SHARDS = 10 + # As of Oct 2019, 17 dex files are larger than 1M. + SHARD_THRESHOLD = 2**20 + for src_path in dex_files: + if os.path.getsize(src_path) >= SHARD_THRESHOLD: + # Use the path as the name rather than an incrementing number to ensure + # that it shards to the same name every time. + name = os.path.relpath(src_path, constants.GetOutDirectory()).replace( + os.sep, '.') + shards[name].append(src_path) + else: + # The stdlib hash(string) function is salted differently across python3 + # invocations. Thus we use md5 instead to consistently shard the same + # file to the same shard across runs. + hex_hash = hashlib.md5(src_path.encode('utf-8')).hexdigest() + name = 'shard{}.dex.jar'.format(int(hex_hash, 16) % NUM_CORE_SHARDS) + shards[name].append(src_path) + logging.info('Sharding %d dex files into %d buckets', len(dex_files), + len(shards)) + return shards + + +def _CreateDexFiles(shards, prev_shards, dex_staging_dir, min_api, + use_concurrency): + """Creates dex files within |dex_staging_dir| defined by |shards|.""" + tasks = [] + for name, src_paths in shards.items(): + dest_path = os.path.join(dex_staging_dir, name) + if _IsStale(src_paths=src_paths, + old_src_paths=prev_shards.get(name, []), + dest_path=dest_path): + tasks.append( + functools.partial(dex.MergeDexForIncrementalInstall, _R8_PATH, + src_paths, dest_path, min_api)) + + # TODO(agrieve): It would be more performant to write a custom d8.jar + # wrapper in java that would process these in bulk, rather than spinning + # up a new process for each one. + _Execute(use_concurrency, *tasks) + + # Remove any stale shards. + for name in os.listdir(dex_staging_dir): + if name not in shards: + os.unlink(os.path.join(dex_staging_dir, name)) + + +def Uninstall(device, package, enable_device_cache=False): + """Uninstalls and removes all incremental files for the given package.""" + main_timer = time_profile.TimeProfile() + device.Uninstall(package) + if enable_device_cache: + # Uninstall is rare, so just wipe the cache in this case. + cache_path = _DeviceCachePath(device) + if os.path.exists(cache_path): + os.unlink(cache_path) + device.RunShellCommand(['rm', '-rf', _GetDeviceIncrementalDir(package)], + check_return=True) + logging.info('Uninstall took %s seconds.', main_timer.GetDelta()) + + +def Install(device, install_json, apk=None, enable_device_cache=False, + use_concurrency=True, permissions=()): + """Installs the given incremental apk and all required supporting files. + + Args: + device: A DeviceUtils instance (to install to). + install_json: Path to .json file or already parsed .json object. + apk: An existing ApkHelper instance for the apk (optional). + enable_device_cache: Whether to enable on-device caching of checksums. + use_concurrency: Whether to speed things up using multiple threads. + permissions: A list of the permissions to grant, or None to grant all + non-denylisted permissions in the manifest. + """ + if isinstance(install_json, str): + with open(install_json) as f: + install_dict = json.load(f) + else: + install_dict = install_json + + main_timer = time_profile.TimeProfile() + install_timer = time_profile.TimeProfile() + push_native_timer = time_profile.TimeProfile() + merge_dex_timer = time_profile.TimeProfile() + push_dex_timer = time_profile.TimeProfile() + + def fix_path(p): + return os.path.normpath(os.path.join(constants.GetOutDirectory(), p)) + + if not apk: + apk = apk_helper.ToHelper(fix_path(install_dict['apk_path'])) + split_globs = [fix_path(p) for p in install_dict['split_globs']] + native_libs = [fix_path(p) for p in install_dict['native_libs']] + dex_files = [fix_path(p) for p in install_dict['dex_files']] + show_proguard_warning = install_dict.get('show_proguard_warning') + + apk_package = apk.GetPackageName() + device_incremental_dir = _GetDeviceIncrementalDir(apk_package) + dex_staging_dir = os.path.join(constants.GetOutDirectory(), + 'incremental-install', + install_dict['apk_path']) + device_dex_dir = posixpath.join(device_incremental_dir, 'dex') + + # Install .apk(s) if any of them have changed. + def do_install(): + install_timer.Start() + if split_globs: + splits = [] + for split_glob in split_globs: + splits.extend((f for f in glob.glob(split_glob))) + device.InstallSplitApk( + apk, + splits, + allow_downgrade=True, + reinstall=True, + allow_cached_props=True, + permissions=permissions) + else: + device.Install( + apk, allow_downgrade=True, reinstall=True, permissions=permissions) + install_timer.Stop(log=False) + + # Push .so and .dex files to the device (if they have changed). + def do_push_files(): + + def do_push_native(): + push_native_timer.Start() + if native_libs: + with build_utils.TempDir() as temp_dir: + device_lib_dir = posixpath.join(device_incremental_dir, 'lib') + for path in native_libs: + # Note: Can't use symlinks as they don't work when + # "adb push parent_dir" is used (like we do here). + shutil.copy(path, os.path.join(temp_dir, os.path.basename(path))) + device.PushChangedFiles([(temp_dir, device_lib_dir)], + delete_device_stale=True) + push_native_timer.Stop(log=False) + + def do_merge_dex(): + merge_dex_timer.Start() + prev_shards = _LoadPrevShards(dex_staging_dir) + shards = _AllocateDexShards(dex_files) + build_utils.MakeDirectory(dex_staging_dir) + _CreateDexFiles(shards, prev_shards, dex_staging_dir, + apk.GetMinSdkVersion(), use_concurrency) + # New shard information must be saved after _CreateDexFiles since + # _CreateDexFiles removes all non-dex files from the staging dir. + _SaveNewShards(shards, dex_staging_dir) + merge_dex_timer.Stop(log=False) + + def do_push_dex(): + push_dex_timer.Start() + device.PushChangedFiles([(dex_staging_dir, device_dex_dir)], + delete_device_stale=True) + push_dex_timer.Stop(log=False) + + _Execute(use_concurrency, do_push_native, do_merge_dex) + do_push_dex() + + cache_path = _DeviceCachePath(device) + def restore_cache(): + if not enable_device_cache: + return + if os.path.exists(cache_path): + logging.info('Using device cache: %s', cache_path) + with open(cache_path) as f: + device.LoadCacheData(f.read()) + # Delete the cached file so that any exceptions cause it to be cleared. + os.unlink(cache_path) + else: + logging.info('No device cache present: %s', cache_path) + + def save_cache(): + if not enable_device_cache: + return + with open(cache_path, 'w') as f: + f.write(device.DumpCacheData()) + logging.info('Wrote device cache: %s', cache_path) + + # Create 2 lock files: + # * install.lock tells the app to pause on start-up (until we release it). + # * firstrun.lock is used by the app to pause all secondary processes until + # the primary process finishes loading the .dex / .so files. + def create_lock_files(): + # Creates or zeros out lock files. + cmd = ('D="%s";' + 'mkdir -p $D &&' + 'echo -n >$D/install.lock 2>$D/firstrun.lock') + device.RunShellCommand( + cmd % device_incremental_dir, shell=True, check_return=True) + + # The firstrun.lock is released by the app itself. + def release_installer_lock(): + device.RunShellCommand('echo > %s/install.lock' % device_incremental_dir, + check_return=True, shell=True) + + # Concurrency here speeds things up quite a bit, but DeviceUtils hasn't + # been designed for multi-threading. Enabling only because this is a + # developer-only tool. + setup_timer = _Execute(use_concurrency, create_lock_files, restore_cache) + + _Execute(use_concurrency, do_install, do_push_files) + + finalize_timer = _Execute(use_concurrency, release_installer_lock, save_cache) + + logging.info( + 'Install of %s took %s seconds (setup=%s, install=%s, lib_push=%s, ' + 'dex_merge=%s dex_push=%s, finalize=%s)', os.path.basename(apk.path), + main_timer.GetDelta(), setup_timer.GetDelta(), install_timer.GetDelta(), + push_native_timer.GetDelta(), merge_dex_timer.GetDelta(), + push_dex_timer.GetDelta(), finalize_timer.GetDelta()) + if show_proguard_warning: + logging.warning('Target had proguard enabled, but incremental install uses ' + 'non-proguarded .dex files. Performance characteristics ' + 'may differ.') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('json_path', + help='The path to the generated incremental apk .json.') + parser.add_argument('-d', '--device', dest='device', + help='Target device for apk to install on.') + parser.add_argument('--uninstall', + action='store_true', + default=False, + help='Remove the app and all side-loaded files.') + parser.add_argument('--output-directory', + help='Path to the root build directory.') + parser.add_argument('--no-threading', + action='store_false', + default=True, + dest='threading', + help='Do not install and push concurrently') + parser.add_argument('--no-cache', + action='store_false', + default=True, + dest='cache', + help='Do not use cached information about what files are ' + 'currently on the target device.') + parser.add_argument('-v', + '--verbose', + dest='verbose_count', + default=0, + action='count', + help='Verbose level (multiple times for more)') + + args = parser.parse_args() + + run_tests_helper.SetLogLevel(args.verbose_count) + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + + devil_chromium.Initialize(output_directory=constants.GetOutDirectory()) + + # Retries are annoying when commands fail for legitimate reasons. Might want + # to enable them if this is ever used on bots though. + device = device_utils.DeviceUtils.HealthyDevices( + device_arg=args.device, + default_retries=0, + enable_device_files_cache=True)[0] + + if args.uninstall: + with open(args.json_path) as f: + install_dict = json.load(f) + apk = apk_helper.ToHelper(install_dict['apk_path']) + Uninstall(device, apk.GetPackageName(), enable_device_cache=args.cache) + else: + Install(device, args.json_path, enable_device_cache=args.cache, + use_concurrency=args.threading) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java b/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java new file mode 100644 index 000000000000..f88297050758 --- /dev/null +++ b/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapApplication.java @@ -0,0 +1,297 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.app.Application; +import android.app.Instrumentation; +import android.content.Context; +import android.content.pm.ApplicationInfo; +import android.content.pm.PackageManager; +import android.content.pm.PackageManager.NameNotFoundException; +import android.os.Bundle; +import android.util.Log; + +import dalvik.system.DexFile; + +import java.io.File; +import java.lang.ref.WeakReference; +import java.util.List; +import java.util.Map; + +/** + * An Application that replaces itself with another Application (as defined in + * an AndroidManifext.xml meta-data tag). It loads the other application only + * after side-loading its .so and .dex files from /data/local/tmp. + * + * This class is highly dependent on the private implementation details of + * Android's ActivityThread.java. However, it has been tested to work with + * JellyBean through Marshmallow. + */ +public final class BootstrapApplication extends Application { + private static final String TAG = "incrementalinstall"; + private static final String MANAGED_DIR_PREFIX = "/data/local/tmp/incremental-app-"; + private static final String REAL_APP_META_DATA_NAME = "incremental-install-real-app"; + private static final String REAL_INSTRUMENTATION_META_DATA_NAME0 = + "incremental-install-real-instrumentation-0"; + private static final String REAL_INSTRUMENTATION_META_DATA_NAME1 = + "incremental-install-real-instrumentation-1"; + + private ClassLoaderPatcher mClassLoaderPatcher; + private Application mRealApplication; + private Instrumentation mOrigInstrumentation; + private Instrumentation mRealInstrumentation; + private Object mStashedProviderList; + private Object mActivityThread; + public static DexFile[] sIncrementalDexFiles; // Needed by junit test runner. + + @Override + protected void attachBaseContext(Context context) { + super.attachBaseContext(context); + try { + mActivityThread = Reflect.invokeMethod(Class.forName("android.app.ActivityThread"), + "currentActivityThread"); + mClassLoaderPatcher = new ClassLoaderPatcher(context); + + mOrigInstrumentation = + (Instrumentation) Reflect.getField(mActivityThread, "mInstrumentation"); + Context instContext = mOrigInstrumentation.getContext(); + if (instContext == null) { + instContext = context; + } + + // When running with an instrumentation that lives in a different package from the + // application, we must load the dex files and native libraries from both pacakges. + // This logic likely won't work when the instrumentation is incremental, but the app is + // non-incremental. This configuration isn't used right now though. + String appPackageName = getPackageName(); + String instPackageName = instContext.getPackageName(); + boolean instPackageNameDiffers = !appPackageName.equals(instPackageName); + Log.i(TAG, "App PackageName: " + appPackageName); + if (instPackageNameDiffers) { + Log.i(TAG, "Inst PackageName: " + instPackageName); + } + + File appIncrementalRootDir = new File(MANAGED_DIR_PREFIX + appPackageName); + File appLibDir = new File(appIncrementalRootDir, "lib"); + File appDexDir = new File(appIncrementalRootDir, "dex"); + File appInstallLockFile = new File(appIncrementalRootDir, "install.lock"); + File appFirstRunLockFile = new File(appIncrementalRootDir, "firstrun.lock"); + File instIncrementalRootDir = new File(MANAGED_DIR_PREFIX + instPackageName); + File instLibDir = new File(instIncrementalRootDir, "lib"); + File instDexDir = new File(instIncrementalRootDir, "dex"); + File instInstallLockFile = new File(instIncrementalRootDir, "install.lock"); + File instFirstRunLockFile = new File(instIncrementalRootDir, "firstrun.lock"); + + boolean isFirstRun = LockFile.installerLockExists(appFirstRunLockFile) + || (instPackageNameDiffers + && LockFile.installerLockExists(instFirstRunLockFile)); + if (isFirstRun) { + if (mClassLoaderPatcher.mIsPrimaryProcess) { + // Wait for incremental_install.py to finish. + LockFile.waitForInstallerLock(appInstallLockFile, 30 * 1000); + LockFile.waitForInstallerLock(instInstallLockFile, 30 * 1000); + } else { + // Wait for the browser process to create the optimized dex files + // and copy the library files. + LockFile.waitForInstallerLock(appFirstRunLockFile, 60 * 1000); + LockFile.waitForInstallerLock(instFirstRunLockFile, 60 * 1000); + } + } + + mClassLoaderPatcher.importNativeLibs(instLibDir); + sIncrementalDexFiles = mClassLoaderPatcher.loadDexFiles(instDexDir, instPackageName); + if (instPackageNameDiffers) { + mClassLoaderPatcher.importNativeLibs(appLibDir); + mClassLoaderPatcher.loadDexFiles(appDexDir, appPackageName); + } + + if (isFirstRun && mClassLoaderPatcher.mIsPrimaryProcess) { + LockFile.clearInstallerLock(appFirstRunLockFile); + if (instPackageNameDiffers) { + LockFile.clearInstallerLock(instFirstRunLockFile); + } + } + + // mInstrumentationAppDir is one of a set of fields that is initialized only when + // instrumentation is active. + if (Reflect.getField(mActivityThread, "mInstrumentationAppDir") != null) { + String metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME0; + if (mOrigInstrumentation instanceof SecondInstrumentation) { + metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME1; + } + mRealInstrumentation = + initInstrumentation(getClassNameFromMetadata(metaDataName, instContext)); + } else { + Log.i(TAG, "No instrumentation active."); + } + + // Even when instrumentation is not enabled, ActivityThread uses a default + // Instrumentation instance internally. We hook it here in order to hook into the + // call to Instrumentation.onCreate(). + BootstrapInstrumentation bootstrapInstrumentation = new BootstrapInstrumentation(this); + populateInstrumenationFields(bootstrapInstrumentation); + Reflect.setField(mActivityThread, "mInstrumentation", bootstrapInstrumentation); + + // attachBaseContext() is called from ActivityThread#handleBindApplication() and + // Application#mApplication is changed right after we return. Thus, we cannot swap + // the Application instances until onCreate() is called. + String realApplicationName = getClassNameFromMetadata(REAL_APP_META_DATA_NAME, context); + Log.i(TAG, "Instantiating " + realApplicationName); + Instrumentation anyInstrumentation = + mRealInstrumentation != null ? mRealInstrumentation : mOrigInstrumentation; + mRealApplication = anyInstrumentation.newApplication( + getClassLoader(), realApplicationName, context); + + // Between attachBaseContext() and onCreate(), ActivityThread tries to instantiate + // all ContentProviders. The ContentProviders break without the correct Application + // class being installed, so temporarily pretend there are no providers, and then + // instantiate them explicitly within onCreate(). + disableContentProviders(); + Log.i(TAG, "Waiting for Instrumentation.onCreate"); + } catch (Exception e) { + throw new RuntimeException("Incremental install failed.", e); + } + } + + /** + * Returns the fully-qualified class name for the given key, stored in a + * <meta> witin the manifest. + */ + private static String getClassNameFromMetadata(String key, Context context) + throws NameNotFoundException { + String pkgName = context.getPackageName(); + ApplicationInfo appInfo = context.getPackageManager().getApplicationInfo(pkgName, + PackageManager.GET_META_DATA); + String value = appInfo.metaData.getString(key); + if (value != null && !value.contains(".")) { + value = pkgName + "." + value; + } + return value; + } + + /** + * Instantiates and initializes mRealInstrumentation (the real Instrumentation class). + */ + private Instrumentation initInstrumentation(String realInstrumentationName) + throws ReflectiveOperationException { + if (realInstrumentationName == null) { + // This is the case when an incremental app is used as a target for an instrumentation + // test. In this case, ActivityThread can instantiate the proper class just fine since + // it exists within the test apk (as opposed to the incremental apk-under-test). + Log.i(TAG, "Running with external instrumentation"); + return null; + } + // For unit tests, the instrumentation class is replaced in the manifest by a build step + // because ActivityThread tries to instantiate it before we get a chance to load the + // incremental dex files. + Log.i(TAG, "Instantiating instrumentation " + realInstrumentationName); + Instrumentation ret = + (Instrumentation) Reflect.newInstance(Class.forName(realInstrumentationName)); + populateInstrumenationFields(ret); + return ret; + } + + /** + * Sets important fields on a newly created Instrumentation object by copying them from the + * original Instrumentation instance. + */ + private void populateInstrumenationFields(Instrumentation target) + throws ReflectiveOperationException { + // Initialize the fields that are set by Instrumentation.init(). + String[] initFields = {"mAppContext", "mComponent", "mInstrContext", "mMessageQueue", + "mThread", "mUiAutomationConnection", "mWatcher"}; + for (String fieldName : initFields) { + Reflect.setField(target, fieldName, Reflect.getField(mOrigInstrumentation, fieldName)); + } + } + + /** + * Called by BootstrapInstrumentation from Instrumentation.onCreate(). + * This happens regardless of whether or not instrumentation is enabled. + */ + void onInstrumentationCreate(Bundle arguments) { + Log.i(TAG, "Instrumentation.onCreate() called. Swapping references."); + try { + swapApplicationReferences(); + enableContentProviders(); + if (mRealInstrumentation != null) { + Reflect.setField(mActivityThread, "mInstrumentation", mRealInstrumentation); + mRealInstrumentation.onCreate(arguments); + } + } catch (Exception e) { + throw new RuntimeException("Incremental install failed.", e); + } + } + + @Override + public void onCreate() { + super.onCreate(); + try { + Log.i(TAG, "Application.onCreate() called."); + mRealApplication.onCreate(); + } catch (Exception e) { + throw new RuntimeException("Incremental install failed.", e); + } + } + + /** + * Nulls out ActivityThread.mBoundApplication.providers. + */ + private void disableContentProviders() throws ReflectiveOperationException { + Object data = Reflect.getField(mActivityThread, "mBoundApplication"); + mStashedProviderList = Reflect.getField(data, "providers"); + Reflect.setField(data, "providers", null); + } + + /** + * Restores the value of ActivityThread.mBoundApplication.providers, and invokes + * ActivityThread#installContentProviders(). + */ + private void enableContentProviders() throws ReflectiveOperationException { + Object data = Reflect.getField(mActivityThread, "mBoundApplication"); + Reflect.setField(data, "providers", mStashedProviderList); + if (mStashedProviderList != null && mClassLoaderPatcher.mIsPrimaryProcess) { + Log.i(TAG, "Instantiating content providers"); + Reflect.invokeMethod(mActivityThread, "installContentProviders", mRealApplication, + mStashedProviderList); + } + mStashedProviderList = null; + } + + /** + * Changes all fields within framework classes that have stored an reference to this + * BootstrapApplication to instead store references to mRealApplication. + */ + @SuppressWarnings("unchecked") + private void swapApplicationReferences() throws ReflectiveOperationException { + if (Reflect.getField(mActivityThread, "mInitialApplication") == this) { + Reflect.setField(mActivityThread, "mInitialApplication", mRealApplication); + } + + List allApplications = + (List) Reflect.getField(mActivityThread, "mAllApplications"); + for (int i = 0; i < allApplications.size(); i++) { + if (allApplications.get(i) == this) { + allApplications.set(i, mRealApplication); + } + } + + // Contains a reference to BootstrapApplication and will cause BroadCastReceivers to fail + // if not replaced. + Context contextImpl = mRealApplication.getBaseContext(); + Reflect.setField(contextImpl, "mOuterContext", mRealApplication); + + for (String fieldName : new String[] {"mPackages", "mResourcePackages"}) { + Map> packageMap = + (Map>) Reflect.getField(mActivityThread, fieldName); + for (Map.Entry> entry : packageMap.entrySet()) { + Object loadedApk = entry.getValue().get(); + if (loadedApk != null && Reflect.getField(loadedApk, "mApplication") == this) { + Reflect.setField(loadedApk, "mApplication", mRealApplication); + } + } + } + } +} diff --git a/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java b/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java new file mode 100644 index 000000000000..f1f507af8bc1 --- /dev/null +++ b/android/incremental_install/java/org/chromium/incrementalinstall/BootstrapInstrumentation.java @@ -0,0 +1,25 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.app.Instrumentation; +import android.os.Bundle; + +/** + * Notifies BootstrapApplication of the call to Instrumentation.onCreate(). + */ +public final class BootstrapInstrumentation extends Instrumentation { + private final BootstrapApplication mApp; + + BootstrapInstrumentation(BootstrapApplication app) { + mApp = app; + } + + @Override + public void onCreate(Bundle arguments) { + super.onCreate(arguments); + mApp.onInstrumentationCreate(arguments); + } +} diff --git a/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java b/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java new file mode 100644 index 000000000000..53e926e626bd --- /dev/null +++ b/android/incremental_install/java/org/chromium/incrementalinstall/ClassLoaderPatcher.java @@ -0,0 +1,303 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.os.Build; +import android.os.Process; +import android.util.Log; + +import dalvik.system.DexFile; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +/** + * Provides the ability to add native libraries and .dex files to an existing class loader. + * Tested with Jellybean MR2 - Marshmellow. + */ +final class ClassLoaderPatcher { + private static final String TAG = "incrementalinstall"; + private final File mAppFilesSubDir; + private final ClassLoader mClassLoader; + private final Object mLibcoreOs; + private final int mProcessUid; + final boolean mIsPrimaryProcess; + + ClassLoaderPatcher(Context context) throws ReflectiveOperationException { + mAppFilesSubDir = + new File(context.getApplicationInfo().dataDir, "incremental-install-files"); + mClassLoader = context.getClassLoader(); + mLibcoreOs = Reflect.getField(Class.forName("libcore.io.Libcore"), "os"); + mProcessUid = Process.myUid(); + mIsPrimaryProcess = context.getApplicationInfo().uid == mProcessUid; + Log.i(TAG, "uid=" + mProcessUid + " (isPrimary=" + mIsPrimaryProcess + ")"); + } + + /** + * Loads all dex files within |dexDir| into the app's ClassLoader. + */ + @SuppressLint({ + "SetWorldReadable", + "SetWorldWritable", + }) + DexFile[] loadDexFiles(File dexDir, String packageName) + throws ReflectiveOperationException, IOException { + Log.i(TAG, "Installing dex files from: " + dexDir); + + File optimizedDir = null; + boolean isAtLeastOreo = Build.VERSION.SDK_INT >= Build.VERSION_CODES.O; + + if (isAtLeastOreo) { + // In O, optimizedDirectory is ignored, and the files are always put in an "oat" + // directory that is a sibling to the dex files themselves. SELinux policies + // prevent using odex files from /data/local/tmp, so we must first copy them + // into the app's data directory in order to get the odex files to live there. + // Use a package-name subdirectory to prevent name collisions when apk-under-test is + // used. + File newDexDir = new File(mAppFilesSubDir, packageName + "-dexes"); + if (mIsPrimaryProcess) { + safeCopyAllFiles(dexDir, newDexDir); + } + dexDir = newDexDir; + } else { + // The optimized dex files will be owned by this process' user. + // Store them within the app's data dir rather than on /data/local/tmp + // so that they are still deleted (by the OS) when we uninstall + // (even on a non-rooted device). + File incrementalDexesDir = new File(mAppFilesSubDir, "optimized-dexes"); + File isolatedDexesDir = new File(mAppFilesSubDir, "isolated-dexes"); + + if (mIsPrimaryProcess) { + ensureAppFilesSubDirExists(); + // Allows isolated processes to access the same files. + incrementalDexesDir.mkdir(); + incrementalDexesDir.setReadable(true, false); + incrementalDexesDir.setExecutable(true, false); + // Create a directory for isolated processes to create directories in. + isolatedDexesDir.mkdir(); + isolatedDexesDir.setWritable(true, false); + isolatedDexesDir.setExecutable(true, false); + + optimizedDir = incrementalDexesDir; + } else { + // There is a UID check of the directory in dalvik.system.DexFile(): + // https://android.googlesource.com/platform/libcore/+/45e0260/dalvik/src/main/java/dalvik/system/DexFile.java#101 + // Rather than have each isolated process run DexOpt though, we use + // symlinks within the directory to point at the browser process' + // optimized dex files. + optimizedDir = new File(isolatedDexesDir, "isolated-" + mProcessUid); + optimizedDir.mkdir(); + // Always wipe it out and re-create for simplicity. + Log.i(TAG, "Creating dex file symlinks for isolated process"); + for (File f : optimizedDir.listFiles()) { + f.delete(); + } + for (File f : incrementalDexesDir.listFiles()) { + String to = "../../" + incrementalDexesDir.getName() + "/" + f.getName(); + File from = new File(optimizedDir, f.getName()); + createSymlink(to, from); + } + } + Log.i(TAG, "Code cache dir: " + optimizedDir); + } + + // Ignore "oat" directory. + // Also ignore files that sometimes show up (e.g. .jar.arm.flock). + File[] dexFilesArr = dexDir.listFiles(f -> f.getName().endsWith(".jar")); + if (dexFilesArr == null) { + throw new FileNotFoundException("Dex dir does not exist: " + dexDir); + } + + Log.i(TAG, "Loading " + dexFilesArr.length + " dex files"); + + Object dexPathList = Reflect.getField(mClassLoader, "pathList"); + Object[] dexElements = (Object[]) Reflect.getField(dexPathList, "dexElements"); + dexElements = addDexElements(dexFilesArr, optimizedDir, dexElements); + Reflect.setField(dexPathList, "dexElements", dexElements); + + // Return the list of new DexFile instances for the .jars in dexPathList. + DexFile[] ret = new DexFile[dexFilesArr.length]; + int startIndex = dexElements.length - dexFilesArr.length; + for (int i = 0; i < ret.length; ++i) { + ret[i] = (DexFile) Reflect.getField(dexElements[startIndex + i], "dexFile"); + } + return ret; + } + + /** + * Sets up all libraries within |libDir| to be loadable by System.loadLibrary(). + */ + @SuppressLint("SetWorldReadable") + void importNativeLibs(File libDir) throws ReflectiveOperationException, IOException { + Log.i(TAG, "Importing native libraries from: " + libDir); + if (!libDir.exists()) { + Log.i(TAG, "No native libs exist."); + return; + } + // The library copying is not necessary on older devices, but we do it anyways to + // simplify things (it's fast compared to dexing). + // https://code.google.com/p/android/issues/detail?id=79480 + File localLibsDir = new File(mAppFilesSubDir, "lib"); + safeCopyAllFiles(libDir, localLibsDir); + addNativeLibrarySearchPath(localLibsDir); + } + + @SuppressLint("SetWorldReadable") + private void safeCopyAllFiles(File srcDir, File dstDir) throws IOException { + if (!mIsPrimaryProcess) { + // TODO: Work around this issue by using APK splits to install each dex / lib. + throw new RuntimeException("Incremental install does not work on Android M+ " + + "with isolated processes. Build system should have removed this. " + + "Please file a bug."); + } + + // The library copying is not necessary on older devices, but we do it anyways to + // simplify things (it's fast compared to dexing). + // https://code.google.com/p/android/issues/detail?id=79480 + ensureAppFilesSubDirExists(); + File lockFile = new File(mAppFilesSubDir, dstDir.getName() + ".lock"); + LockFile lock = LockFile.acquireRuntimeLock(lockFile); + if (lock == null) { + LockFile.waitForRuntimeLock(lockFile, 10 * 1000); + } else { + try { + dstDir.mkdir(); + dstDir.setReadable(true, false); + dstDir.setExecutable(true, false); + copyChangedFiles(srcDir, dstDir); + } finally { + lock.release(); + } + } + } + + @SuppressWarnings("unchecked") + private void addNativeLibrarySearchPath(File nativeLibDir) throws ReflectiveOperationException { + Object dexPathList = Reflect.getField(mClassLoader, "pathList"); + Object currentDirs = Reflect.getField(dexPathList, "nativeLibraryDirectories"); + File[] newDirs = new File[] { nativeLibDir }; + // Switched from an array to an ArrayList in Lollipop. + if (currentDirs instanceof List) { + List dirsAsList = (List) currentDirs; + dirsAsList.add(0, nativeLibDir); + } else { + File[] dirsAsArray = (File[]) currentDirs; + Reflect.setField(dexPathList, "nativeLibraryDirectories", + Reflect.concatArrays(newDirs, newDirs, dirsAsArray)); + } + + Object[] nativeLibraryPathElements; + try { + nativeLibraryPathElements = + (Object[]) Reflect.getField(dexPathList, "nativeLibraryPathElements"); + } catch (NoSuchFieldException e) { + // This field doesn't exist pre-M. + return; + } + Object[] additionalElements = makeNativePathElements(newDirs); + Reflect.setField(dexPathList, "nativeLibraryPathElements", + Reflect.concatArrays(nativeLibraryPathElements, additionalElements, + nativeLibraryPathElements)); + } + + private static void copyChangedFiles(File srcDir, File dstDir) throws IOException { + int numUpdated = 0; + File[] srcFiles = srcDir.listFiles(); + for (File f : srcFiles) { + // Note: Tried using hardlinks, but resulted in EACCES exceptions. + File dest = new File(dstDir, f.getName()); + if (copyIfModified(f, dest)) { + numUpdated++; + } + } + // Delete stale files. + int numDeleted = 0; + for (File f : dstDir.listFiles()) { + File src = new File(srcDir, f.getName()); + if (!src.exists()) { + numDeleted++; + f.delete(); + } + } + String msg = String.format(Locale.US, + "copyChangedFiles: %d of %d updated. %d stale files removed.", numUpdated, + srcFiles.length, numDeleted); + Log.i(TAG, msg); + } + + @SuppressLint("SetWorldReadable") + private static boolean copyIfModified(File src, File dest) throws IOException { + long lastModified = src.lastModified(); + if (dest.exists() && dest.lastModified() == lastModified) { + return false; + } + Log.i(TAG, "Copying " + src + " -> " + dest); + FileInputStream istream = new FileInputStream(src); + FileOutputStream ostream = new FileOutputStream(dest); + ostream.getChannel().transferFrom(istream.getChannel(), 0, istream.getChannel().size()); + istream.close(); + ostream.close(); + dest.setReadable(true, false); + dest.setExecutable(true, false); + dest.setLastModified(lastModified); + return true; + } + + private void ensureAppFilesSubDirExists() { + mAppFilesSubDir.mkdir(); + mAppFilesSubDir.setExecutable(true, false); + } + + private void createSymlink(String to, File from) throws ReflectiveOperationException { + Reflect.invokeMethod(mLibcoreOs, "symlink", to, from.getAbsolutePath()); + } + + private static Object[] makeNativePathElements(File[] paths) + throws ReflectiveOperationException { + Object[] entries = new Object[paths.length]; + if (Build.VERSION.SDK_INT >= 26) { + Class entryClazz = Class.forName("dalvik.system.DexPathList$NativeLibraryElement"); + for (int i = 0; i < paths.length; ++i) { + entries[i] = Reflect.newInstance(entryClazz, paths[i]); + } + } else { + Class entryClazz = Class.forName("dalvik.system.DexPathList$Element"); + for (int i = 0; i < paths.length; ++i) { + entries[i] = Reflect.newInstance(entryClazz, paths[i], true, null, null); + } + } + return entries; + } + + private Object[] addDexElements(File[] files, File optimizedDirectory, Object[] curDexElements) + throws ReflectiveOperationException { + Class entryClazz = Class.forName("dalvik.system.DexPathList$Element"); + Class clazz = Class.forName("dalvik.system.DexPathList"); + Object[] ret = + Reflect.concatArrays(curDexElements, curDexElements, new Object[files.length]); + File emptyDir = new File(""); + for (int i = 0; i < files.length; ++i) { + File file = files[i]; + // loadDexFile requires that ret contain all previously added elements. + Object dexFile = Reflect.invokeMethod( + clazz, "loadDexFile", file, optimizedDirectory, mClassLoader, ret); + Object dexElement; + if (Build.VERSION.SDK_INT >= 26) { + dexElement = Reflect.newInstance(entryClazz, dexFile, file); + } else { + dexElement = Reflect.newInstance(entryClazz, emptyDir, false, file, dexFile); + } + ret[curDexElements.length + i] = dexElement; + } + return ret; + } +} diff --git a/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java b/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java new file mode 100644 index 000000000000..08d4c66c3070 --- /dev/null +++ b/android/incremental_install/java/org/chromium/incrementalinstall/LockFile.java @@ -0,0 +1,129 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.util.Log; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.channels.FileLock; +import java.util.concurrent.Callable; + +/** + * Helpers for dealing with .lock files used during install / first run. + */ +final class LockFile { + private static final String TAG = "incrementalinstall"; + + private final File mFile; + private final FileOutputStream mOutputStream; + private final FileLock mFileLock; + + private LockFile(File file, FileOutputStream outputStream, FileLock fileLock) { + mFile = file; + mOutputStream = outputStream; + mFileLock = fileLock; + } + + /** + * Clears the lock file by writing to it (making it non-zero in length); + */ + static void clearInstallerLock(File lockFile) throws IOException { + Log.i(TAG, "Clearing " + lockFile); + // On Android M+, we can't delete files in /data/local/tmp, so we write to it instead. + FileOutputStream os = new FileOutputStream(lockFile); + os.write(1); + os.close(); + } + + /** + * Waits for the given file to be non-zero in length. + */ + static void waitForInstallerLock(final File file, long timeoutMs) { + pollingWait(new Callable() { + @Override public Boolean call() { + return !installerLockExists(file); + } + }, file, timeoutMs); + } + + /** + * Waits for the given file to be non-zero in length. + */ + private static void pollingWait(Callable func, File file, long timeoutMs) { + long pollIntervalMs = 200; + for (int i = 0; i < timeoutMs / pollIntervalMs; i++) { + try { + if (func.call()) { + if (i > 0) { + Log.i(TAG, "Finished waiting on lock file: " + file); + } + return; + } else if (i == 0) { + Log.i(TAG, "Waiting on lock file: " + file); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + try { + Thread.sleep(pollIntervalMs); + } catch (InterruptedException e) { + // Should never happen. + } + } + throw new RuntimeException("Timed out waiting for lock file: " + file); + } + + /** + * Returns whether the given lock file is missing or is in the locked state. + */ + static boolean installerLockExists(File file) { + return !file.exists() || file.length() == 0; + } + + /** + * Attempts to acquire a lock for the given file. + * @return Returns the FileLock if it was acquired, or null otherwise. + */ + static LockFile acquireRuntimeLock(File file) { + try { + FileOutputStream outputStream = new FileOutputStream(file); + FileLock lock = outputStream.getChannel().tryLock(); + if (lock != null) { + Log.i(TAG, "Created lock file: " + file); + return new LockFile(file, outputStream, lock); + } + outputStream.close(); + } catch (IOException e) { + // Do nothing. We didn't get the lock. + Log.w(TAG, "Exception trying to acquire lock " + file, e); + } + return null; + } + + /** + * Waits for the given file to not exist. + */ + static void waitForRuntimeLock(final File file, long timeoutMs) { + pollingWait(new Callable() { + @Override public Boolean call() { + return !file.exists(); + } + }, file, timeoutMs); + } + + /** + * Releases and deletes the lock file. + */ + void release() throws IOException { + Log.i(TAG, "Deleting lock file: " + mFile); + mFileLock.release(); + mOutputStream.close(); + if (!mFile.delete()) { + throw new IOException("Failed to delete lock file: " + mFile); + } + } +} diff --git a/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java b/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java new file mode 100644 index 000000000000..6ce74eb819ce --- /dev/null +++ b/android/incremental_install/java/org/chromium/incrementalinstall/Reflect.java @@ -0,0 +1,157 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.os.Build; + +import org.lsposed.hiddenapibypass.HiddenApiBypass; + +import java.lang.reflect.Array; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; + +/** + * Reflection helper methods. + */ +final class Reflect { + /** + * Sets the value of an object's field (even if it's not visible). + * + * @param instance The object containing the field to set. + * @param name The name of the field to set. + * @param value The new value for the field. + */ + static void setField(Object instance, String name, Object value) + throws ReflectiveOperationException { + Field field = findField(instance, name); + field.setAccessible(true); + field.set(instance, value); + } + + /** + * Retrieves the value of an object's field (even if it's not visible). + * + * @param instance The object containing the field to set. + * @param name The name of the field to set. + * @return The field's value. Primitive values are returned as their boxed + * type. + */ + static Object getField(Object instance, String name) throws ReflectiveOperationException { + Field field = findField(instance, name); + field.setAccessible(true); + return field.get(instance); + } + + /** + * Concatenates two arrays into a new array. The arrays must be of the same + * type. + */ + static Object[] concatArrays(Object[] arrType, Object[] left, Object[] right) { + Object[] result = (Object[]) Array.newInstance( + arrType.getClass().getComponentType(), left.length + right.length); + System.arraycopy(left, 0, result, 0, left.length); + System.arraycopy(right, 0, result, left.length, right.length); + return result; + } + + /** + * Invokes a method with zero or more parameters. For static methods, use the Class as the + * instance. + */ + static Object invokeMethod(Object instance, String name, Object... params) + throws ReflectiveOperationException { + boolean isStatic = instance instanceof Class; + Class clazz = isStatic ? (Class) instance : instance.getClass(); + Method method = findMethod(clazz, name, params); + method.setAccessible(true); + return method.invoke(instance, params); + } + + /** + * Calls a constructor with zero or more parameters. + */ + static Object newInstance(Class clazz, Object... params) + throws ReflectiveOperationException { + Constructor constructor = findConstructor(clazz, params); + constructor.setAccessible(true); + return constructor.newInstance(params); + } + + private static Field findField(Object instance, String name) throws NoSuchFieldException { + boolean isStatic = instance instanceof Class; + Class clazz = isStatic ? (Class) instance : instance.getClass(); + for (; clazz != null; clazz = clazz.getSuperclass()) { + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.P) { + try { + return clazz.getDeclaredField(name); + } catch (NoSuchFieldException e) { + // Need to look in the super class. + } + } else { + List fields = isStatic ? HiddenApiBypass.getStaticFields(clazz) + : HiddenApiBypass.getInstanceFields(clazz); + for (Field field : fields) { + if (field.getName().equals(name)) { + return field; + } + } + } + } + throw new NoSuchFieldException("Field " + name + " not found in " + instance.getClass()); + } + + private static Method findMethod(Class clazz, String name, Object... params) + throws NoSuchMethodException { + for (; clazz != null; clazz = clazz.getSuperclass()) { + for (Method method : clazz.getDeclaredMethods()) { + if (method.getName().equals(name) + && areParametersCompatible(method.getParameterTypes(), params)) { + return method; + } + } + } + throw new NoSuchMethodException("Method " + name + " with parameters " + + Arrays.asList(params) + " not found in " + clazz); + } + + private static Constructor findConstructor(Class clazz, Object... params) + throws NoSuchMethodException { + for (Constructor constructor : clazz.getDeclaredConstructors()) { + if (areParametersCompatible(constructor.getParameterTypes(), params)) { + return constructor; + } + } + throw new NoSuchMethodException("Constructor with parameters " + Arrays.asList(params) + + " not found in " + clazz); + } + + private static boolean areParametersCompatible(Class[] paramTypes, Object... params) { + if (params.length != paramTypes.length) { + return false; + } + for (int i = 0; i < params.length; i++) { + if (!isAssignableFrom(paramTypes[i], params[i])) { + return false; + } + } + return true; + } + + private static boolean isAssignableFrom(Class left, Object right) { + if (right == null) { + return !left.isPrimitive(); + } + Class rightClazz = right.getClass(); + if (left.isPrimitive()) { + // TODO(agrieve): Fill in the rest as needed. + return left == boolean.class && rightClazz == Boolean.class + || left == int.class && rightClazz == Integer.class; + } + return left.isAssignableFrom(rightClazz); + } +} diff --git a/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java b/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java new file mode 100644 index 000000000000..ecf4870e80f1 --- /dev/null +++ b/android/incremental_install/java/org/chromium/incrementalinstall/SecondInstrumentation.java @@ -0,0 +1,12 @@ +// Copyright 2017 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.incrementalinstall; + +import android.app.Instrumentation; + +/** + * Exists to support an app having multiple instrumentations. + */ +public final class SecondInstrumentation extends Instrumentation {} diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn b/android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn new file mode 100644 index 000000000000..86e146663330 --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/BUILD.gn @@ -0,0 +1,29 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +# Approved by chrome-security@ only for use by incremental install. +visibility = [ + ":*", + "//build/android/incremental_install:*", +] + +android_library("stub_java") { + sources = [ "stub/src/main/java/dalvik/system/VMRuntime.java" ] + jar_excluded_patterns = [ "*" ] +} + +android_library("hidden_api_bypass_java") { + sources = [ + "library/src/main/java/org/lsposed/hiddenapibypass/Helper.java", + "library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java", + "local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java", + ] + deps = [ + ":stub_java", + "//third_party/androidx:androidx_annotation_annotation_jvm_java", + ] + jacoco_never_instrument = true +} diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE b/android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE new file mode 100644 index 000000000000..261eeb9e9f8b --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium b/android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium new file mode 100644 index 000000000000..b1fdc9c594f1 --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/README.chromium @@ -0,0 +1,16 @@ +Name: AndroidHiddenApiBypass +URL: https://github.com/LSPosed/AndroidHiddenApiBypass +Version: b16cc3934a27e55e51f00f5504c7f49e7c8cfab7 +License: Apache 2.0 +License File: NOT_SHIPPED +Security Critical: no + +Description: +AndroidHiddenApiBypass enables reflection on APIs that are meant to be guarded +by Android's API Blocklist. + +Local Modifications: +* Removed files related to Gradle. +* Added local_modifications/.../BuildConfig.java to replace what Gradle would + have generated. +* Added BUILD.gn diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/README.md b/android/incremental_install/third_party/AndroidHiddenApiBypass/README.md new file mode 100644 index 000000000000..c7e06817ed87 --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/README.md @@ -0,0 +1,84 @@ +# AndroidHiddenApiBypass + +[![Android CI status](https://github.com/LSPosed/AndroidHiddenApiBypass/actions/workflows/android.yml/badge.svg?branch=main)](https://github.com/LSPosed/AndroidHiddenApiBypass/actions/workflows/android.yml) + +Bypass restrictions on non-SDK interfaces. + +## Why AndroidHiddenApiBypass? + +- Pure Java: no native code used. +- Reliable: does not rely on specific behaviors, so it will not be blocked like meta-reflection or `dexfile`. +- Stable: `unsafe`, art structs and `setHiddenApiExemptions` are stable APIs. + +[How it works (Chinese)](https://lovesykun.cn/archives/android-hidden-api-bypass.html) + +## Integration + +Gradle: + +```gradle +repositories { + mavenCentral() +} +dependencies { + implementation 'org.lsposed.hiddenapibypass:hiddenapibypass:4.3' +} +``` + +## Usage + +1. Invoke a restricted method: + ```java + HiddenApiBypass.invoke(ApplicationInfo.class, new ApplicationInfo(), "usesNonSdkApi"/*, args*/) + ``` +1. Invoke restricted constructor: + ```java + Object instance = HiddenApiBypass.newInstance(Class.forName("android.app.IActivityManager$Default")/*, args*/); + ``` +1. Get all methods including restricted ones from a class: + ```java + var allMethods = HiddenApiBypass.getDeclaredMethods(ApplicationInfo.class); + ((Method).stream(allMethods).filter(e -> e.getName().equals("usesNonSdkApi")).findFirst().get()).invoke(new ApplicationInfo()); + ``` +1. Get all non-static fields including restricted ones from a class: + ```java + var allInstanceFields = HiddenApiBypass.getInstanceFields(ApplicationInfo.class); + ((Method).stream(allInstanceFields).filter(e -> e.getName().equals("longVersionCode")).findFirst().get()).get(new ApplicationInfo()); + ``` +1. Get all static fields including restricted ones from a class: + ```java + var allStaticFields = HiddenApiBypass.getStaticFields(ApplicationInfo.class); + ((Method).stream(allInstanceFields).filter(e -> e.getName().equals("HIDDEN_API_ENFORCEMENT_DEFAULT")).findFirst().get()).get(null); + ``` +1. Get specific class method or class constructor + ```java + var ctor = HiddenApiBypass.getDeclaredConstructor(ClipDrawable.class /*, args */); + var method = HiddenApiBypass.getDeclaredMethod(ApplicationInfo.class, "getHiddenApiEnforcementPolicy" /*, args */); + ``` +1. Add a class to exemption list: + ```java + HiddenApiBypass.addHiddenApiExemptions( + "Landroid/content/pm/ApplicationInfo;", // one specific class + "Ldalvik/system" // all classes in packages dalvik.system + "Lx" // all classes whose full name is started with x + ); + ``` + if you are going to add all classes to exemption list, just leave an empty prefix: + ```java + HiddenApiBypass.addHiddenApiExemptions(""); + ``` +## License + + Copyright 2021 LSPosed + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java b/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java new file mode 100644 index 000000000000..07d130dd29cf --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/Helper.java @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2021 LSPosed + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.lsposed.hiddenapibypass; + +import java.lang.invoke.MethodHandleInfo; +import java.lang.invoke.MethodType; +import java.lang.reflect.Member; + +@SuppressWarnings("unused") +public class Helper { + static public class MethodHandle { + private final MethodType type = null; + private MethodType nominalType; + private MethodHandle cachedSpreadInvoker; + protected final int handleKind = 0; + + // The ArtMethod* or ArtField* associated with this method handle (used by the runtime). + protected final long artFieldOrMethod = 0; + } + + static final public class MethodHandleImpl extends MethodHandle { + private final MethodHandleInfo info = null; + } + + static final public class HandleInfo { + private final Member member = null; + private final MethodHandle handle = null; + } + + static final public class Class { + private transient ClassLoader classLoader; + private transient java.lang.Class componentType; + private transient Object dexCache; + private transient Object extData; + private transient Object[] ifTable; + private transient String name; + private transient java.lang.Class superClass; + private transient Object vtable; + private transient long iFields; + private transient long methods; + private transient long sFields; + private transient int accessFlags; + private transient int classFlags; + private transient int classSize; + private transient int clinitThreadId; + private transient int dexClassDefIndex; + private transient volatile int dexTypeIndex; + private transient int numReferenceInstanceFields; + private transient int numReferenceStaticFields; + private transient int objectSize; + private transient int objectSizeAllocFastPath; + private transient int primitiveType; + private transient int referenceInstanceOffsets; + private transient int status; + private transient short copiedMethodsOffset; + private transient short virtualMethodsOffset; + } + + static public class AccessibleObject { + private boolean override; + } + + static final public class Executable extends AccessibleObject { + private Class declaringClass; + private Class declaringClassOfOverriddenMethod; + private Object[] parameters; + private long artMethod; + private int accessFlags; + } + + @SuppressWarnings("EmptyMethod") + public static class NeverCall { + private static void a() { + } + + private static void b() { + } + + private static int s; + private static int t; + private int i; + private int j; + } + + public static class InvokeStub { + private static Object invoke(Object... args) { + throw new IllegalStateException("Failed to invoke the method"); + } + + private InvokeStub(Object... args) { + throw new IllegalStateException("Failed to new a instance"); + } + } +} diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java b/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java new file mode 100644 index 000000000000..2344acff9e96 --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/library/src/main/java/org/lsposed/hiddenapibypass/HiddenApiBypass.java @@ -0,0 +1,415 @@ +/* + * Copyright (C) 2021 LSPosed + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.lsposed.hiddenapibypass; + +import android.os.Build; +import android.util.Log; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.RequiresApi; +import androidx.annotation.VisibleForTesting; + +import org.lsposed.hiddenapibypass.library.BuildConfig; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandleInfo; +import java.lang.invoke.MethodHandles; +import java.lang.reflect.Constructor; +import java.lang.reflect.Executable; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import dalvik.system.VMRuntime; +import sun.misc.Unsafe; + +@RequiresApi(Build.VERSION_CODES.P) +public final class HiddenApiBypass { + private static final String TAG = "HiddenApiBypass"; + private static final Unsafe unsafe; + private static final long methodOffset; + private static final long classOffset; + private static final long artOffset; + private static final long infoOffset; + private static final long methodsOffset; + private static final long iFieldOffset; + private static final long sFieldOffset; + private static final long memberOffset; + private static final long artMethodSize; + private static final long artMethodBias; + private static final long artFieldSize; + private static final long artFieldBias; + private static final Set signaturePrefixes = new HashSet<>(); + + static { + try { + //noinspection JavaReflectionMemberAccess DiscouragedPrivateApi + unsafe = (Unsafe) Unsafe.class.getDeclaredMethod("getUnsafe").invoke(null); + assert unsafe != null; + methodOffset = unsafe.objectFieldOffset(Helper.Executable.class.getDeclaredField("artMethod")); + classOffset = unsafe.objectFieldOffset(Helper.Executable.class.getDeclaredField("declaringClass")); + artOffset = unsafe.objectFieldOffset(Helper.MethodHandle.class.getDeclaredField("artFieldOrMethod")); + infoOffset = unsafe.objectFieldOffset(Helper.MethodHandleImpl.class.getDeclaredField("info")); + methodsOffset = unsafe.objectFieldOffset(Helper.Class.class.getDeclaredField("methods")); + iFieldOffset = unsafe.objectFieldOffset(Helper.Class.class.getDeclaredField("iFields")); + sFieldOffset = unsafe.objectFieldOffset(Helper.Class.class.getDeclaredField("sFields")); + memberOffset = unsafe.objectFieldOffset(Helper.HandleInfo.class.getDeclaredField("member")); + Method mA = Helper.NeverCall.class.getDeclaredMethod("a"); + Method mB = Helper.NeverCall.class.getDeclaredMethod("b"); + mA.setAccessible(true); + mB.setAccessible(true); + MethodHandle mhA = MethodHandles.lookup().unreflect(mA); + MethodHandle mhB = MethodHandles.lookup().unreflect(mB); + long aAddr = unsafe.getLong(mhA, artOffset); + long bAddr = unsafe.getLong(mhB, artOffset); + long aMethods = unsafe.getLong(Helper.NeverCall.class, methodsOffset); + artMethodSize = bAddr - aAddr; + if (BuildConfig.DEBUG) Log.v(TAG, artMethodSize + " " + + Long.toString(aAddr, 16) + ", " + + Long.toString(bAddr, 16) + ", " + + Long.toString(aMethods, 16)); + artMethodBias = aAddr - aMethods - artMethodSize; + Field fI = Helper.NeverCall.class.getDeclaredField("i"); + Field fJ = Helper.NeverCall.class.getDeclaredField("j"); + fI.setAccessible(true); + fJ.setAccessible(true); + MethodHandle mhI = MethodHandles.lookup().unreflectGetter(fI); + MethodHandle mhJ = MethodHandles.lookup().unreflectGetter(fJ); + long iAddr = unsafe.getLong(mhI, artOffset); + long jAddr = unsafe.getLong(mhJ, artOffset); + long iFields = unsafe.getLong(Helper.NeverCall.class, iFieldOffset); + artFieldSize = jAddr - iAddr; + if (BuildConfig.DEBUG) Log.v(TAG, artFieldSize + " " + + Long.toString(iAddr, 16) + ", " + + Long.toString(jAddr, 16) + ", " + + Long.toString(iFields, 16)); + artFieldBias = iAddr - iFields; + } catch (ReflectiveOperationException e) { + Log.e(TAG, "Initialize error", e); + throw new ExceptionInInitializerError(e); + } + } + + @VisibleForTesting + static boolean checkArgsForInvokeMethod(Class[] params, Object[] args) { + if (params.length != args.length) return false; + for (int i = 0; i < params.length; ++i) { + if (params[i].isPrimitive()) { + if (params[i] == int.class && !(args[i] instanceof Integer)) return false; + else if (params[i] == byte.class && !(args[i] instanceof Byte)) return false; + else if (params[i] == char.class && !(args[i] instanceof Character)) return false; + else if (params[i] == boolean.class && !(args[i] instanceof Boolean)) return false; + else if (params[i] == double.class && !(args[i] instanceof Double)) return false; + else if (params[i] == float.class && !(args[i] instanceof Float)) return false; + else if (params[i] == long.class && !(args[i] instanceof Long)) return false; + else if (params[i] == short.class && !(args[i] instanceof Short)) return false; + } else if (args[i] != null && !params[i].isInstance(args[i])) return false; + } + return true; + } + + /** + * create an instance of the given class {@code clazz} calling the restricted constructor with arguments {@code args} + * + * @param clazz the class of the instance to new + * @param initargs arguments to call constructor + * @return the new instance + * @see Constructor#newInstance(Object...) + */ + public static Object newInstance(@NonNull Class clazz, Object... initargs) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { + Method stub = Helper.InvokeStub.class.getDeclaredMethod("invoke", Object[].class); + Constructor ctor = Helper.InvokeStub.class.getDeclaredConstructor(Object[].class); + ctor.setAccessible(true); + long methods = unsafe.getLong(clazz, methodsOffset); + if (methods == 0) throw new NoSuchMethodException("Cannot find matching constructor"); + int numMethods = unsafe.getInt(methods); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numMethods + " methods"); + for (int i = 0; i < numMethods; i++) { + long method = methods + i * artMethodSize + artMethodBias; + unsafe.putLong(stub, methodOffset, method); + if (BuildConfig.DEBUG) Log.v(TAG, "got " + clazz.getTypeName() + "." + stub.getName() + + "(" + Arrays.stream(stub.getParameterTypes()).map(Type::getTypeName).collect(Collectors.joining()) + ")"); + if ("".equals(stub.getName())) { + unsafe.putLong(ctor, methodOffset, method); + unsafe.putObject(ctor, classOffset, clazz); + Class[] params = ctor.getParameterTypes(); + if (checkArgsForInvokeMethod(params, initargs)) + return ctor.newInstance(initargs); + } + } + throw new NoSuchMethodException("Cannot find matching constructor"); + } + + /** + * invoke a restrict method named {@code methodName} of the given class {@code clazz} with this object {@code thiz} and arguments {@code args} + * + * @param clazz the class call the method on (this parameter is required because this method cannot call inherit method) + * @param thiz this object, which can be {@code null} if the target method is static + * @param methodName the method name + * @param args arguments to call the method with name {@code methodName} + * @return the return value of the method + * @see Method#invoke(Object, Object...) + */ + public static Object invoke(@NonNull Class clazz, @Nullable Object thiz, @NonNull String methodName, Object... args) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { + if (thiz != null && !clazz.isInstance(thiz)) { + throw new IllegalArgumentException("this object is not an instance of the given class"); + } + Method stub = Helper.InvokeStub.class.getDeclaredMethod("invoke", Object[].class); + stub.setAccessible(true); + long methods = unsafe.getLong(clazz, methodsOffset); + if (methods == 0) throw new NoSuchMethodException("Cannot find matching method"); + int numMethods = unsafe.getInt(methods); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numMethods + " methods"); + for (int i = 0; i < numMethods; i++) { + long method = methods + i * artMethodSize + artMethodBias; + unsafe.putLong(stub, methodOffset, method); + if (BuildConfig.DEBUG) Log.v(TAG, "got " + clazz.getTypeName() + "." + stub.getName() + + "(" + Arrays.stream(stub.getParameterTypes()).map(Type::getTypeName).collect(Collectors.joining()) + ")"); + if (methodName.equals(stub.getName())) { + Class[] params = stub.getParameterTypes(); + if (checkArgsForInvokeMethod(params, args)) + return stub.invoke(thiz, args); + } + } + throw new NoSuchMethodException("Cannot find matching method"); + } + + /** + * get declared methods of given class without hidden api restriction + * + * @param clazz the class to fetch declared methods (including constructors with name `<init>`) + * @return list of declared methods of {@code clazz} + */ + @NonNull + public static List getDeclaredMethods(@NonNull Class clazz) { + ArrayList list = new ArrayList<>(); + if (clazz.isPrimitive() || clazz.isArray()) return list; + MethodHandle mh; + try { + Method mA = Helper.NeverCall.class.getDeclaredMethod("a"); + mA.setAccessible(true); + mh = MethodHandles.lookup().unreflect(mA); + } catch (NoSuchMethodException | IllegalAccessException e) { + return list; + } + long methods = unsafe.getLong(clazz, methodsOffset); + if (methods == 0) return list; + int numMethods = unsafe.getInt(methods); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numMethods + " methods"); + for (int i = 0; i < numMethods; i++) { + long method = methods + i * artMethodSize + artMethodBias; + unsafe.putLong(mh, artOffset, method); + unsafe.putObject(mh, infoOffset, null); + try { + MethodHandles.lookup().revealDirect(mh); + } catch (Throwable ignored) { + } + MethodHandleInfo info = (MethodHandleInfo) unsafe.getObject(mh, infoOffset); + Executable member = (Executable) unsafe.getObject(info, memberOffset); + if (BuildConfig.DEBUG) + Log.v(TAG, "got " + clazz.getTypeName() + "." + member.getName() + + "(" + Arrays.stream(member.getParameterTypes()).map(Type::getTypeName).collect(Collectors.joining()) + ")"); + list.add(member); + } + return list; + } + + /** + * get a restrict method named {@code methodName} of the given class {@code clazz} with argument types {@code parameterTypes} + * + * @param clazz the class where the expected method declares + * @param methodName the expected method's name + * @param parameterTypes argument types of the expected method with name {@code methodName} + * @return the found method + * @throws NoSuchMethodException when no method matches the given parameters + * @see Class#getDeclaredMethod(String, Class[]) + */ + @NonNull + public static Method getDeclaredMethod(@NonNull Class clazz, @NonNull String methodName, @NonNull Class... parameterTypes) throws NoSuchMethodException { + List methods = getDeclaredMethods(clazz); + allMethods: + for (Executable method : methods) { + if (!method.getName().equals(methodName)) continue; + if (!(method instanceof Method)) continue; + Class[] expectedTypes = method.getParameterTypes(); + if (expectedTypes.length != parameterTypes.length) continue; + for (int i = 0; i < parameterTypes.length; ++i) { + if (parameterTypes[i] != expectedTypes[i]) continue allMethods; + } + return (Method) method; + } + throw new NoSuchMethodException("Cannot find matching method"); + } + + /** + * get a restrict constructor of the given class {@code clazz} with argument types {@code parameterTypes} + * + * @param clazz the class where the expected constructor declares + * @param parameterTypes argument types of the expected constructor + * @return the found constructor + * @throws NoSuchMethodException when no constructor matches the given parameters + * @see Class#getDeclaredConstructor(Class[]) + */ + @NonNull + public static Constructor getDeclaredConstructor(@NonNull Class clazz, @NonNull Class... parameterTypes) throws NoSuchMethodException { + List methods = getDeclaredMethods(clazz); + allMethods: + for (Executable method : methods) { + if (!(method instanceof Constructor)) continue; + Class[] expectedTypes = method.getParameterTypes(); + if (expectedTypes.length != parameterTypes.length) continue; + for (int i = 0; i < parameterTypes.length; ++i) { + if (parameterTypes[i] != expectedTypes[i]) continue allMethods; + } + return (Constructor) method; + } + throw new NoSuchMethodException("Cannot find matching constructor"); + } + + + /** + * get declared non-static fields of given class without hidden api restriction + * + * @param clazz the class to fetch declared methods + * @return list of declared non-static fields of {@code clazz} + */ + @NonNull + public static List getInstanceFields(@NonNull Class clazz) { + ArrayList list = new ArrayList<>(); + if (clazz.isPrimitive() || clazz.isArray()) return list; + MethodHandle mh; + try { + Field fI = Helper.NeverCall.class.getDeclaredField("i"); + fI.setAccessible(true); + mh = MethodHandles.lookup().unreflectGetter(fI); + } catch (IllegalAccessException | NoSuchFieldException e) { + return list; + } + long fields = unsafe.getLong(clazz, iFieldOffset); + if (fields == 0) return list; + int numFields = unsafe.getInt(fields); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numFields + " instance fields"); + for (int i = 0; i < numFields; i++) { + long field = fields + i * artFieldSize + artFieldBias; + unsafe.putLong(mh, artOffset, field); + unsafe.putObject(mh, infoOffset, null); + try { + MethodHandles.lookup().revealDirect(mh); + } catch (Throwable ignored) { + } + MethodHandleInfo info = (MethodHandleInfo) unsafe.getObject(mh, infoOffset); + Field member = (Field) unsafe.getObject(info, memberOffset); + if (BuildConfig.DEBUG) + Log.v(TAG, "got " + member.getType() + " " + clazz.getTypeName() + "." + member.getName()); + list.add(member); + } + return list; + } + + /** + * get declared static fields of given class without hidden api restriction + * + * @param clazz the class to fetch declared methods + * @return list of declared static fields of {@code clazz} + */ + @NonNull + public static List getStaticFields(@NonNull Class clazz) { + ArrayList list = new ArrayList<>(); + if (clazz.isPrimitive() || clazz.isArray()) return list; + MethodHandle mh; + try { + Field fS = Helper.NeverCall.class.getDeclaredField("s"); + fS.setAccessible(true); + mh = MethodHandles.lookup().unreflectGetter(fS); + } catch (IllegalAccessException | NoSuchFieldException e) { + return list; + } + long fields = unsafe.getLong(clazz, sFieldOffset); + if (fields == 0) return list; + int numFields = unsafe.getInt(fields); + if (BuildConfig.DEBUG) Log.d(TAG, clazz + " has " + numFields + " static fields"); + for (int i = 0; i < numFields; i++) { + long field = fields + i * artFieldSize + artFieldBias; + unsafe.putLong(mh, artOffset, field); + unsafe.putObject(mh, infoOffset, null); + try { + MethodHandles.lookup().revealDirect(mh); + } catch (Throwable ignored) { + } + MethodHandleInfo info = (MethodHandleInfo) unsafe.getObject(mh, infoOffset); + Field member = (Field) unsafe.getObject(info, memberOffset); + if (BuildConfig.DEBUG) + Log.v(TAG, "got " + member.getType() + " " + clazz.getTypeName() + "." + member.getName()); + list.add(member); + } + return list; + } + + /** + * Sets the list of exemptions from hidden API access enforcement. + * + * @param signaturePrefixes A list of class signature prefixes. Each item in the list is a prefix match on the type + * signature of a blacklisted API. All matching APIs are treated as if they were on + * the whitelist: access permitted, and no logging.. + * @return whether the operation is successful + */ + public static boolean setHiddenApiExemptions(@NonNull String... signaturePrefixes) { + try { + Object runtime = invoke(VMRuntime.class, null, "getRuntime"); + invoke(VMRuntime.class, runtime, "setHiddenApiExemptions", (Object) signaturePrefixes); + return true; + } catch (Throwable e) { + Log.w(TAG, "setHiddenApiExemptions", e); + return false; + } + } + + /** + * Adds the list of exemptions from hidden API access enforcement. + * + * @param signaturePrefixes A list of class signature prefixes. Each item in the list is a prefix match on the type + * signature of a blacklisted API. All matching APIs are treated as if they were on + * the whitelist: access permitted, and no logging.. + * @return whether the operation is successful + */ + public static boolean addHiddenApiExemptions(String... signaturePrefixes) { + HiddenApiBypass.signaturePrefixes.addAll(Arrays.asList(signaturePrefixes)); + String[] strings = new String[HiddenApiBypass.signaturePrefixes.size()]; + HiddenApiBypass.signaturePrefixes.toArray(strings); + return setHiddenApiExemptions(strings); + } + + /** + * Clear the list of exemptions from hidden API access enforcement. + * Android runtime will cache access flags, so if a hidden API has been accessed unrestrictedly, + * running this method will not restore the restriction on it. + * + * @return whether the operation is successful + */ + public static boolean clearHiddenApiExemptions() { + HiddenApiBypass.signaturePrefixes.clear(); + return setHiddenApiExemptions(); + } +} diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java b/android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java new file mode 100644 index 000000000000..9788a8edcbff --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/local_modifications/org/lsposed/hiddenapibypass/library/BuildConfig.java @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +package org.lsposed.hiddenapibypass.library; + +/** When building with Gradle, this file would be generated. */ +public class BuildConfig { + public static final boolean DEBUG = false; +} diff --git a/android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java b/android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java new file mode 100644 index 000000000000..87db1ece1b61 --- /dev/null +++ b/android/incremental_install/third_party/AndroidHiddenApiBypass/stub/src/main/java/dalvik/system/VMRuntime.java @@ -0,0 +1,9 @@ +package dalvik.system; + +@SuppressWarnings("unused") +public class VMRuntime { + public static VMRuntime getRuntime() { + throw new IllegalArgumentException("stub"); + } + public native void setHiddenApiExemptions(String[] signaturePrefixes); +} diff --git a/android/incremental_install/write_installer_json.py b/android/incremental_install/write_installer_json.py new file mode 100755 index 000000000000..4825a80e1afc --- /dev/null +++ b/android/incremental_install/write_installer_json.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 + +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes a .json file with the per-apk details for an incremental install.""" + +import argparse +import json +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp')) + +from util import build_utils +import action_helpers # build_utils adds //build to sys.path. + + +def _ParseArgs(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + parser.add_argument('--output-path', + help='Output path for .json file.', + required=True) + parser.add_argument('--apk-path', + help='Path to .apk relative to output directory.', + required=True) + parser.add_argument('--split', + action='append', + dest='split_globs', + default=[], + help='A glob matching the apk splits. ' + 'Can be specified multiple times.') + parser.add_argument( + '--native-libs', + action='append', + help='GN-list of paths to native libraries relative to ' + 'output directory. Can be repeated.') + parser.add_argument( + '--dex-files', help='GN-list of dex paths relative to output directory.') + parser.add_argument('--show-proguard-warning', + action='store_true', + default=False, + help='Print a warning about proguard being disabled') + + options = parser.parse_args(args) + options.dex_files = action_helpers.parse_gn_list(options.dex_files) + options.native_libs = action_helpers.parse_gn_list(options.native_libs) + return options + + +def main(args): + options = _ParseArgs(args) + + data = { + 'apk_path': options.apk_path, + 'native_libs': options.native_libs, + 'dex_files': options.dex_files, + 'show_proguard_warning': options.show_proguard_warning, + 'split_globs': options.split_globs, + } + + with action_helpers.atomic_output(options.output_path, mode='w+') as f: + json.dump(data, f, indent=2, sort_keys=True) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/android/incremental_install/write_installer_json.pydeps b/android/incremental_install/write_installer_json.pydeps new file mode 100644 index 000000000000..519281fe6fc2 --- /dev/null +++ b/android/incremental_install/write_installer_json.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/incremental_install --output build/android/incremental_install/write_installer_json.pydeps build/android/incremental_install/write_installer_json.py +../../action_helpers.py +../../gn_helpers.py +../gyp/util/__init__.py +../gyp/util/build_utils.py +write_installer_json.py diff --git a/android/java/src/org/chromium/build/annotations/AlwaysInline.java b/android/java/src/org/chromium/build/annotations/AlwaysInline.java new file mode 100644 index 000000000000..e79bfe77abdd --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/AlwaysInline.java @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Tells R8 to always inline the annotated method/constructor. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.METHOD}) +@Retention(RetentionPolicy.CLASS) +public @interface AlwaysInline {} diff --git a/android/java/src/org/chromium/build/annotations/CheckDiscard.java b/android/java/src/org/chromium/build/annotations/CheckDiscard.java new file mode 100644 index 000000000000..897067e2ee96 --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/CheckDiscard.java @@ -0,0 +1,24 @@ +// Copyright 2019 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Causes build to assert that annotated classes / methods / fields are + * optimized away in release builds (without dcheck_always_on). + */ +@Target({ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface CheckDiscard { + /** + * Describes why the element should be discarded. + * @return reason for discarding (crbug links are preferred unless reason is trivial). + */ + String value(); +} diff --git a/android/java/src/org/chromium/build/annotations/DoNotClassMerge.java b/android/java/src/org/chromium/build/annotations/DoNotClassMerge.java new file mode 100644 index 000000000000..94c9fa3a690c --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/DoNotClassMerge.java @@ -0,0 +1,20 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The annotated class should never be horizontally or vertically merged. + * + * The annotated classes are guaranteed not to be horizontally or vertically + * merged by Proguard. Other optimizations may still apply. + */ +@Target({ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface DoNotClassMerge {} diff --git a/android/java/src/org/chromium/build/annotations/DoNotInline.java b/android/java/src/org/chromium/build/annotations/DoNotInline.java new file mode 100644 index 000000000000..4dd193332da2 --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/DoNotInline.java @@ -0,0 +1,20 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The annotated method or class should never be inlined. + * + * The annotated method (or methods on the annotated class) are guaranteed not to be inlined by + * Proguard. Other optimizations may still apply. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface DoNotInline {} diff --git a/android/java/src/org/chromium/build/annotations/DoNotStripLogs.java b/android/java/src/org/chromium/build/annotations/DoNotStripLogs.java new file mode 100644 index 000000000000..be96d9a590e2 --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/DoNotStripLogs.java @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * The annotated method or class will have -maximumremovedandroidloglevel 0 applied to it. + */ +@Target({ElementType.CONSTRUCTOR, ElementType.METHOD, ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface DoNotStripLogs {} diff --git a/android/java/src/org/chromium/build/annotations/IdentifierNameString.java b/android/java/src/org/chromium/build/annotations/IdentifierNameString.java new file mode 100644 index 000000000000..ca8b2df67655 --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/IdentifierNameString.java @@ -0,0 +1,35 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation used to mark field that may contain Strings referring to fully qualified class names + * and methods whose arguments may be fully qualified class names. These classes may then be + * obfuscated by R8. A couple caveats when using this: + * - This only obfuscates the string, it does not actually check that the class exists. + * - If a field has this annotation, it must be non-final, otherwise javac will inline the constant + * and R8 won't obfuscate it. + * - Any field/method must be assigned/called with a String literal or a variable R8 can easily + * trace to a String literal. + * + *

    Usage example:
    + * {@code + * @IdentifierNameString + * public static final String LOGGING_TAG = "com.google.android.apps.foo.FooActivity"; + * + * // In this example, both className and message are treated as identifier name strings, but will + * // only be obfuscated if the string points to a real class. + * @IdentifierNameString + * public void doSomeLogging(String className, String message) { ... } + * } + */ +@Target({ElementType.FIELD, ElementType.METHOD}) +@Retention(RetentionPolicy.CLASS) +public @interface IdentifierNameString {} diff --git a/android/java/src/org/chromium/build/annotations/MainDex.java b/android/java/src/org/chromium/build/annotations/MainDex.java new file mode 100644 index 000000000000..5eedb0b778ce --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/MainDex.java @@ -0,0 +1,23 @@ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Classes with native methods (contain @NativeMethods interfaces) that are used within renderer + * processes must be annotated with with @MainDex in order for their native methods work. + * + * Applies only for Chrome/ChromeModern (not needed for Monochrome+). + * + * For Cronet builds, which use a default_min_sdk_version of less than 21, this annotation also + * causes classes to appear in the main dex file (for "Legacy multidex"). + */ +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +public @interface MainDex {} diff --git a/android/java/src/org/chromium/build/annotations/MockedInTests.java b/android/java/src/org/chromium/build/annotations/MockedInTests.java new file mode 100644 index 000000000000..6b486f7becd5 --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/MockedInTests.java @@ -0,0 +1,17 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + +/** + * See b/147584922. Proguard and Mockito don't play nicely together, and proguard rules make it + * impossible to keep the base class/interface for a mocked class without providing additional + * explicit information, like this annotation. This annotation should only need to be used on a + * class/interface that is extended/implemented by another class/interface that is then mocked. + */ +@Target(ElementType.TYPE) +public @interface MockedInTests {} diff --git a/android/java/src/org/chromium/build/annotations/UsedByReflection.java b/android/java/src/org/chromium/build/annotations/UsedByReflection.java new file mode 100644 index 000000000000..f28f38348e71 --- /dev/null +++ b/android/java/src/org/chromium/build/annotations/UsedByReflection.java @@ -0,0 +1,22 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + +/** + * Annotation used for marking methods and fields that are called by reflection. + * Useful for keeping components that would otherwise be removed by Proguard. + * Use the value parameter to mention a file that calls this method. + * + * Note that adding this annotation to a method is not enough to guarantee that + * it is kept - either its class must be referenced elsewhere in the program, or + * the class must be annotated with this as well. + */ +@Target({ElementType.METHOD, ElementType.FIELD, ElementType.TYPE, ElementType.CONSTRUCTOR}) +public @interface UsedByReflection { + String value(); +} diff --git a/android/java/templates/BuildConfig.template b/android/java/templates/BuildConfig.template new file mode 100644 index 000000000000..cfecb6fefdcb --- /dev/null +++ b/android/java/templates/BuildConfig.template @@ -0,0 +1,95 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build; + +#define Q(x) #x +#define QUOTE(x) Q(x) + +#if defined(USE_FINAL) +#define MAYBE_FINAL final +#define MAYBE_ZERO = 0 +#define MAYBE_FALSE = false +#else +#define MAYBE_FINAL +#define MAYBE_ZERO +#define MAYBE_FALSE +#endif + +/** + * Build configuration. Generated on a per-target basis. + */ +public class BuildConfig { + +#if defined(ENABLE_MULTIDEX) + public static MAYBE_FINAL boolean IS_MULTIDEX_ENABLED = true; +#else + public static MAYBE_FINAL boolean IS_MULTIDEX_ENABLED MAYBE_FALSE; +#endif + +#if defined(_ENABLE_ASSERTS) + public static MAYBE_FINAL boolean ENABLE_ASSERTS = true; +#else + public static MAYBE_FINAL boolean ENABLE_ASSERTS MAYBE_FALSE; +#endif + +#if defined(_IS_UBSAN) + public static MAYBE_FINAL boolean IS_UBSAN = true; +#else + public static MAYBE_FINAL boolean IS_UBSAN MAYBE_FALSE; +#endif + +#if defined(_IS_CHROME_BRANDED) + public static MAYBE_FINAL boolean IS_CHROME_BRANDED = true; +#else + public static MAYBE_FINAL boolean IS_CHROME_BRANDED MAYBE_FALSE; +#endif + + // The ID of the android string resource that stores the product version. + // This layer of indirection is necessary to make the resource dependency + // optional for android_apk targets/base_java (ex. for cronet). +#if defined(_RESOURCES_VERSION_VARIABLE) + public static MAYBE_FINAL int R_STRING_PRODUCT_VERSION = _RESOURCES_VERSION_VARIABLE; +#else + // Default value, do not use. + public static MAYBE_FINAL int R_STRING_PRODUCT_VERSION MAYBE_ZERO; +#endif + + // Minimum SDK Version supported by this apk. + // Be cautious when using this value, as it can happen that older apks get + // installed on newer Android version (e.g. when a device goes through a + // system upgrade). It is also convenient for developing to have all + // features available through a single APK. + // However, it's pretty safe to assument that a feature specific to KitKat + // will never be needed in an APK with MIN_SDK_VERSION = Oreo. +#if defined(_MIN_SDK_VERSION) + public static MAYBE_FINAL int MIN_SDK_VERSION = _MIN_SDK_VERSION; +#else + public static MAYBE_FINAL int MIN_SDK_VERSION = 1; +#endif + +#if defined(_BUNDLES_SUPPORTED) + public static MAYBE_FINAL boolean BUNDLES_SUPPORTED = true; +#else + public static MAYBE_FINAL boolean BUNDLES_SUPPORTED MAYBE_FALSE; +#endif + +#if defined(_IS_INCREMENTAL_INSTALL) + public static MAYBE_FINAL boolean IS_INCREMENTAL_INSTALL = true; +#else + public static MAYBE_FINAL boolean IS_INCREMENTAL_INSTALL MAYBE_FALSE; +#endif + +#if defined(_ISOLATED_SPLITS_ENABLED) + public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED = true; +#else + public static MAYBE_FINAL boolean ISOLATED_SPLITS_ENABLED MAYBE_FALSE; +#endif + +#if defined(_IS_FOR_TEST) + public static MAYBE_FINAL boolean IS_FOR_TEST = true; +#else + public static MAYBE_FINAL boolean IS_FOR_TEST MAYBE_FALSE; +#endif +} diff --git a/android/java/templates/ProductConfig.template b/android/java/templates/ProductConfig.template new file mode 100644 index 000000000000..d6e1236bef54 --- /dev/null +++ b/android/java/templates/ProductConfig.template @@ -0,0 +1,31 @@ +// Copyright 2019 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package PACKAGE; + +#if defined(USE_FINAL) +#define MAYBE_FINAL final +#define MAYBE_USE_CHROMIUM_LINKER = USE_CHROMIUM_LINKER_VALUE +#define MAYBE_IS_BUNDLE = IS_BUNDLE_VALUE +#else +#define MAYBE_FINAL +#define MAYBE_USE_CHROMIUM_LINKER +#define MAYBE_IS_BUNDLE +#endif + +/** + * Product configuration. Generated on a per-target basis. + */ +public class ProductConfig { + // Sorted list of locales that have an uncompressed .pak within assets. + // Stored as an array because AssetManager.list() is slow. +#if defined(LOCALE_LIST) + public static final String[] LOCALES = LOCALE_LIST; +#else + public static final String[] LOCALES = {}; +#endif + + public static MAYBE_FINAL boolean USE_CHROMIUM_LINKER MAYBE_USE_CHROMIUM_LINKER; + public static MAYBE_FINAL boolean IS_BUNDLE MAYBE_IS_BUNDLE; +} diff --git a/android/java/test/DefaultLocaleLintTest.java b/android/java/test/DefaultLocaleLintTest.java new file mode 100644 index 000000000000..76f9ea53681d --- /dev/null +++ b/android/java/test/DefaultLocaleLintTest.java @@ -0,0 +1,17 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test; + +import android.app.Application; + +/** + * Class which fails 'DefaultLocale' lint check. + */ +public class LintTest extends Application { + public String testTriggerDefaultLocaleCheck(int any) { + // String format with an integer requires a Locale since it may be formatted differently. + return String.format("Test %d", any); + } +} diff --git a/android/java/test/NewApiLintTest.java b/android/java/test/NewApiLintTest.java new file mode 100644 index 000000000000..66d576a436c7 --- /dev/null +++ b/android/java/test/NewApiLintTest.java @@ -0,0 +1,17 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test; + +import android.app.Application; + +/** + * Class which fails 'NewAPI' lint check. + */ +public class NewApiTest extends Application { + public String testTriggerNewApiCheck() { + // This was added in API level 30. + return getApplicationContext().getAttributionTag(); + } +} diff --git a/android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template b/android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template new file mode 100644 index 000000000000..b51a67dc539a --- /dev/null +++ b/android/java/test/NoSignatureChangeIncrementalJavacTestHelper.template @@ -0,0 +1,18 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test; + +public class NoSignatureChangeIncrementalJavacTestHelper { + private NoSignatureChangeIncrementalJavacTestHelper2 mHelper2 = + new NoSignatureChangeIncrementalJavacTestHelper2(); + + public String foo() { + return "{{foo_return_value}}"; + } + + public String bar() { + return mHelper2.bar(); + } +} diff --git a/android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java b/android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java new file mode 100644 index 000000000000..9694f3f1cdc5 --- /dev/null +++ b/android/java/test/NoSignatureChangeIncrementalJavacTestHelper2.java @@ -0,0 +1,11 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test; + +public class NoSignatureChangeIncrementalJavacTestHelper2 { + public String bar() { + return "bar"; + } +} diff --git a/android/java/test/missing_symbol/B.java b/android/java/test/missing_symbol/B.java new file mode 100644 index 000000000000..639a744b33ca --- /dev/null +++ b/android/java/test/missing_symbol/B.java @@ -0,0 +1,9 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol; + +public class B { + public void foo() {} +} diff --git a/android/java/test/missing_symbol/D.template b/android/java/test/missing_symbol/D.template new file mode 100644 index 000000000000..3f7eef3d1eab --- /dev/null +++ b/android/java/test/missing_symbol/D.template @@ -0,0 +1,9 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol; + +public class D { + public void foo() {} +} diff --git a/android/java/test/missing_symbol/Importer.template b/android/java/test/missing_symbol/Importer.template new file mode 100644 index 000000000000..a1fd881316b4 --- /dev/null +++ b/android/java/test/missing_symbol/Importer.template @@ -0,0 +1,13 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package _IMPORTER_PACKAGE; + +import _IMPORTEE_PACKAGE._IMPORTEE_CLASS_NAME; + +public class Importer { + public Importer() { + new _IMPORTEE_CLASS_NAME().foo(); + } +} diff --git a/android/java/test/missing_symbol/ImportsSubB.java b/android/java/test/missing_symbol/ImportsSubB.java new file mode 100644 index 000000000000..2422b4add60a --- /dev/null +++ b/android/java/test/missing_symbol/ImportsSubB.java @@ -0,0 +1,13 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol; + +import test.missing_symbol.sub.SubB; + +public class ImportsSubB { + public ImportsSubB() { + new SubB().foo(); + } +} diff --git a/android/java/test/missing_symbol/c.jar b/android/java/test/missing_symbol/c.jar new file mode 100644 index 0000000000000000000000000000000000000000..5f30be80a773500fa5c009ffd5fcc8b2bc26acff GIT binary patch literal 393 zcmWIWW@Zs#U|`^2kes3+d1pz9dEOlb_U|KakpCkRY^c z=ZPZyiIHlTGv?o&6mWE&|BESd`h1s8M7O6Go-k}UCO>6iFgG*i)q*DuEm)i-M{wOn(*m!^#i;andF#pg^2_(WFTNcBZ$TuLadMwLJOS$Z&o&t ORz@I91kyr`ARYi{lZCDT literal 0 HcmV?d00001 diff --git a/android/java/test/missing_symbol/sub/BInMethodSignature.java b/android/java/test/missing_symbol/sub/BInMethodSignature.java new file mode 100644 index 000000000000..36b6ba253fa3 --- /dev/null +++ b/android/java/test/missing_symbol/sub/BInMethodSignature.java @@ -0,0 +1,13 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol.sub; + +import test.missing_symbol.B; + +public class BInMethodSignature { + public B foo() { + return new B(); + } +} diff --git a/android/java/test/missing_symbol/sub/SubB.java b/android/java/test/missing_symbol/sub/SubB.java new file mode 100644 index 000000000000..1e583786417c --- /dev/null +++ b/android/java/test/missing_symbol/sub/SubB.java @@ -0,0 +1,9 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package test.missing_symbol.sub; + +public class SubB { + public void foo() {} +} diff --git a/android/junit/AndroidManifest_mergetest.xml b/android/junit/AndroidManifest_mergetest.xml new file mode 100644 index 000000000000..2541b8d7c6de --- /dev/null +++ b/android/junit/AndroidManifest_mergetest.xml @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/android/junit/res/values/strings.xml b/android/junit/res/values/strings.xml new file mode 100644 index 000000000000..9b9c0787b8f8 --- /dev/null +++ b/android/junit/res/values/strings.xml @@ -0,0 +1,8 @@ + + + + Hello World + diff --git a/android/junit/src/org/chromium/build/AndroidAssetsTest.java b/android/junit/src/org/chromium/build/AndroidAssetsTest.java new file mode 100644 index 000000000000..8ff149e90965 --- /dev/null +++ b/android/junit/src/org/chromium/build/AndroidAssetsTest.java @@ -0,0 +1,58 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build; + +import android.content.Context; +import android.content.pm.ApplicationInfo; +import android.content.pm.PackageManager; +import android.content.pm.PackageManager.NameNotFoundException; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.robolectric.RobolectricTestRunner; +import org.robolectric.RuntimeEnvironment; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Checks that Robolectric tests can use android assets. + */ +@RunWith(RobolectricTestRunner.class) +public class AndroidAssetsTest { + private static final String TEST_ASSET_NAME = "AndroidAssetsTest.java"; + + public String readTestAsset() throws IOException { + try (InputStream stream = + RuntimeEnvironment.getApplication().getAssets().open(TEST_ASSET_NAME)) { + byte[] buffer = new byte[stream.available()]; + stream.read(buffer); + return new String(buffer); + } + } + + @Test + public void testAssetsExist() throws IOException { + String myselfAsAssetData = readTestAsset(); + Assert.assertTrue("asset not correct. It had length=" + myselfAsAssetData.length(), + myselfAsAssetData.contains("String myselfAsAssetData = ")); + } + + @Test + public void testResourcesExist() { + String actual = RuntimeEnvironment.getApplication().getString(R.string.test_string); + Assert.assertEquals("Hello World", actual); + } + + @Test + public void testManifestMerged() throws NameNotFoundException { + Context context = RuntimeEnvironment.getApplication(); + ApplicationInfo info = context.getPackageManager().getApplicationInfo( + context.getPackageName(), PackageManager.GET_META_DATA); + String actual = info.metaData.getString("test-metadata"); + Assert.assertEquals("Hello World", actual); + } +} diff --git a/android/junit/src/org/chromium/build/IncrementalJavacTest.java b/android/junit/src/org/chromium/build/IncrementalJavacTest.java new file mode 100644 index 000000000000..b15b7df26f4b --- /dev/null +++ b/android/junit/src/org/chromium/build/IncrementalJavacTest.java @@ -0,0 +1,33 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.robolectric.RobolectricTestRunner; +import org.robolectric.annotation.Config; + +import test.NoSignatureChangeIncrementalJavacTestHelper; + +/** + * Checks that build picked up changes to + * {@link NoSignatureChangeIncrementalJavacTestHelper#foo()}. + */ +@RunWith(RobolectricTestRunner.class) +@Config(manifest = Config.NONE) +public final class IncrementalJavacTest { + @Test + public void testNoSignatureChange() { + NoSignatureChangeIncrementalJavacTestHelper helper = + new NoSignatureChangeIncrementalJavacTestHelper(); + // #foo() should return updated value. + assertEquals("foo2", helper.foo()); + + // #bar() should not crash. + assertEquals("bar", helper.bar()); + } +} diff --git a/android/lighttpd_server.py b/android/lighttpd_server.py new file mode 100755 index 000000000000..9950253a6b8c --- /dev/null +++ b/android/lighttpd_server.py @@ -0,0 +1,263 @@ +#!/usr/bin/env python3 +# +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provides a convenient wrapper for spawning a test lighttpd instance. + +Usage: + lighttpd_server PATH_TO_DOC_ROOT +""" + + +import codecs +import contextlib +import os +import random +import shutil +import socket +import subprocess +import sys +import tempfile +import time + +from six.moves import http_client +from six.moves import input # pylint: disable=redefined-builtin + +from pylib import constants +from pylib import pexpect + + +class LighttpdServer: + """Wraps lighttpd server, providing robust startup. + + Args: + document_root: Path to root of this server's hosted files. + port: TCP port on the _host_ machine that the server will listen on. If + omitted it will attempt to use 9000, or if unavailable it will find + a free port from 8001 - 8999. + lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries. + base_config_path: If supplied this file will replace the built-in default + lighttpd config file. + extra_config_contents: If specified, this string will be appended to the + base config (default built-in, or from base_config_path). + config_path, error_log, access_log: Optional paths where the class should + place temporary files for this session. + """ + + def __init__(self, document_root, port=None, + lighttpd_path=None, lighttpd_module_path=None, + base_config_path=None, extra_config_contents=None, + config_path=None, error_log=None, access_log=None): + self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android') + self.document_root = os.path.abspath(document_root) + self.fixed_port = port + self.port = port or constants.LIGHTTPD_DEFAULT_PORT + self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999)) + self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd' + self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd' + self.base_config_path = base_config_path + self.extra_config_contents = extra_config_contents + self.config_path = config_path or self._Mktmp('config') + self.error_log = error_log or self._Mktmp('error_log') + self.access_log = access_log or self._Mktmp('access_log') + self.pid_file = self._Mktmp('pid_file') + self.process = None + + def _Mktmp(self, name): + return os.path.join(self.temp_dir, name) + + @staticmethod + def _GetRandomPort(): + # The ports of test server is arranged in constants.py. + return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST, + constants.LIGHTTPD_RANDOM_PORT_LAST) + + def StartupHttpServer(self): + """Starts up a http server with specified document root and port.""" + # If we want a specific port, make sure no one else is listening on it. + if self.fixed_port: + self._KillProcessListeningOnPort(self.fixed_port) + while True: + if self.base_config_path: + # Read the config + with codecs.open(self.base_config_path, 'r', 'utf-8') as f: + config_contents = f.read() + else: + config_contents = self._GetDefaultBaseConfig() + if self.extra_config_contents: + config_contents += self.extra_config_contents + # Write out the config, filling in placeholders from the members of |self| + with codecs.open(self.config_path, 'w', 'utf-8') as f: + f.write(config_contents % self.__dict__) + if (not os.path.exists(self.lighttpd_path) or + not os.access(self.lighttpd_path, os.X_OK)): + raise EnvironmentError( + 'Could not find lighttpd at %s.\n' + 'It may need to be installed (e.g. sudo apt-get install lighttpd)' + % self.lighttpd_path) + # pylint: disable=no-member + self.process = pexpect.spawn(self.lighttpd_path, + ['-D', '-f', self.config_path, + '-m', self.lighttpd_module_path], + cwd=self.temp_dir) + client_error, server_error = self._TestServerConnection() + if not client_error: + assert int(open(self.pid_file, 'r').read()) == self.process.pid + break + self.process.close() + + if self.fixed_port or 'in use' not in server_error: + print('Client error:', client_error) + print('Server error:', server_error) + return False + self.port = self._GetRandomPort() + return True + + def ShutdownHttpServer(self): + """Shuts down our lighttpd processes.""" + if self.process: + self.process.terminate() + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def _TestServerConnection(self): + # Wait for server to start + server_msg = '' + for timeout in range(1, 5): + client_error = None + try: + with contextlib.closing( + http_client.HTTPConnection('127.0.0.1', self.port, + timeout=timeout)) as http: + http.set_debuglevel(timeout > 3) + http.request('HEAD', '/') + r = http.getresponse() + r.read() + if (r.status == 200 and r.reason == 'OK' and + r.getheader('Server') == self.server_tag): + return (None, server_msg) + client_error = ('Bad response: %s %s version %s\n ' % + (r.status, r.reason, r.version) + + '\n '.join([': '.join(h) for h in r.getheaders()])) + except (http_client.HTTPException, socket.error) as client_error: + pass # Probably too quick connecting: try again + # Check for server startup error messages + # pylint: disable=no-member + ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'], + timeout=timeout) + if ix == 2: # stdout spew from the server + server_msg += self.process.match.group(0) # pylint: disable=no-member + elif ix == 1: # EOF -- server has quit so giveup. + client_error = client_error or 'Server exited' + break + return (client_error or 'Timeout', server_msg) + + @staticmethod + def _KillProcessListeningOnPort(port): + """Checks if there is a process listening on port number |port| and + terminates it if found. + + Args: + port: Port number to check. + """ + if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0: + # Give the process some time to terminate and check that it is gone. + time.sleep(2) + assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \ + 'Unable to kill process listening on port %d.' % port + + @staticmethod + def _GetDefaultBaseConfig(): + return """server.tag = "%(server_tag)s" +server.modules = ( "mod_access", + "mod_accesslog", + "mod_alias", + "mod_cgi", + "mod_rewrite" ) + +# default document root required +#server.document-root = "." + +# files to check for if .../ is requested +index-file.names = ( "index.php", "index.pl", "index.cgi", + "index.html", "index.htm", "default.htm" ) +# mimetype mapping +mimetype.assign = ( + ".gif" => "image/gif", + ".jpg" => "image/jpeg", + ".jpeg" => "image/jpeg", + ".png" => "image/png", + ".svg" => "image/svg+xml", + ".css" => "text/css", + ".html" => "text/html", + ".htm" => "text/html", + ".xhtml" => "application/xhtml+xml", + ".xhtmlmp" => "application/vnd.wap.xhtml+xml", + ".js" => "application/x-javascript", + ".log" => "text/plain", + ".conf" => "text/plain", + ".text" => "text/plain", + ".txt" => "text/plain", + ".dtd" => "text/xml", + ".xml" => "text/xml", + ".manifest" => "text/cache-manifest", + ) + +# Use the "Content-Type" extended attribute to obtain mime type if possible +mimetype.use-xattr = "enable" + +## +# which extensions should not be handle via static-file transfer +# +# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi +static-file.exclude-extensions = ( ".php", ".pl", ".cgi" ) + +server.bind = "127.0.0.1" +server.port = %(port)s + +## virtual directory listings +dir-listing.activate = "enable" +#dir-listing.encoding = "iso-8859-2" +#dir-listing.external-css = "style/oldstyle.css" + +## enable debugging +#debug.log-request-header = "enable" +#debug.log-response-header = "enable" +#debug.log-request-handling = "enable" +#debug.log-file-not-found = "enable" + +#### SSL engine +#ssl.engine = "enable" +#ssl.pemfile = "server.pem" + +# Autogenerated test-specific config follows. + +cgi.assign = ( ".cgi" => "/usr/bin/env", + ".pl" => "/usr/bin/env", + ".asis" => "/bin/cat", + ".php" => "/usr/bin/php-cgi" ) + +server.errorlog = "%(error_log)s" +accesslog.filename = "%(access_log)s" +server.upload-dirs = ( "/tmp" ) +server.pid-file = "%(pid_file)s" +server.document-root = "%(document_root)s" + +""" + + +def main(argv): + server = LighttpdServer(*argv[1:]) + try: + if server.StartupHttpServer(): + input('Server running at http://127.0.0.1:%s -' + ' press Enter to exit it.' % server.port) + else: + print('Server exit code:', server.process.exitstatus) + finally: + server.ShutdownHttpServer() + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/android/list_class_verification_failures.py b/android/list_class_verification_failures.py new file mode 100755 index 000000000000..9c94e308aa61 --- /dev/null +++ b/android/list_class_verification_failures.py @@ -0,0 +1,279 @@ +#!/usr/bin/env vpython3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A helper script to list class verification errors. + +This is a wrapper around the device's oatdump executable, parsing desired output +and accommodating API-level-specific details, such as file paths. +""" + + + +import argparse +import dataclasses # pylint: disable=wrong-import-order +import logging +import os +import re + +import devil_chromium +from devil.android import device_errors +from devil.android import device_temp_file +from devil.android import device_utils +from devil.android.ndk import abis +from devil.android.sdk import version_codes +from devil.android.tools import script_common +from devil.utils import logging_common +from py_utils import tempfile_ext + +STATUSES = [ + 'NotReady', + 'RetryVerificationAtRuntime', + 'Verified', + 'Initialized', + 'SuperclassValidated', +] + + +def DetermineDeviceToUse(devices): + """Like DeviceUtils.HealthyDevices(), but only allow a single device. + + Args: + devices: A (possibly empty) list of serial numbers, such as from the + --device flag. + Returns: + A single device_utils.DeviceUtils instance. + Raises: + device_errors.NoDevicesError: Raised when no non-denylisted devices exist. + device_errors.MultipleDevicesError: Raise when multiple devices exist, but + |devices| does not distinguish which to use. + """ + if not devices: + # If the user did not specify which device, we let HealthyDevices raise + # MultipleDevicesError. + devices = None + usable_devices = device_utils.DeviceUtils.HealthyDevices(device_arg=devices) + # If the user specified more than one device, we still only want to support a + # single device, so we explicitly raise MultipleDevicesError. + if len(usable_devices) > 1: + raise device_errors.MultipleDevicesError(usable_devices) + return usable_devices[0] + + +class DeviceOSError(Exception): + """Raised when a file is missing from the device, or something similar.""" + + +class UnsupportedDeviceError(Exception): + """Raised when the device is not supported by this script.""" + + +def _GetFormattedArch(device): + abi = device.product_cpu_abi + # Some architectures don't map 1:1 with the folder names. + return {abis.ARM_64: 'arm64', abis.ARM: 'arm'}.get(abi, abi) + + +def FindOdexFiles(device, package_name): + """Gets the full paths to the dex files on the device.""" + sdk_level = device.build_version_sdk + paths_to_apk = device.GetApplicationPaths(package_name) + if not paths_to_apk: + raise DeviceOSError( + 'Could not find data directory for {}. Is it installed?'.format( + package_name)) + + ret = [] + for path_to_apk in paths_to_apk: + if version_codes.LOLLIPOP <= sdk_level <= version_codes.LOLLIPOP_MR1: + # Of the form "com.example.foo-\d", where \d is a digit (usually 1 or 2). + package_with_suffix = os.path.basename(os.path.dirname(path_to_apk)) + arch = _GetFormattedArch(device) + dalvik_prefix = '/data/dalvik-cache/{arch}'.format(arch=arch) + odex_file = '{prefix}/data@app@{package}@base.apk@classes.dex'.format( + prefix=dalvik_prefix, package=package_with_suffix) + elif sdk_level >= version_codes.MARSHMALLOW: + arch = _GetFormattedArch(device) + odex_file = '{data_dir}/oat/{arch}/base.odex'.format( + data_dir=os.path.dirname(path_to_apk), arch=arch) + else: + raise UnsupportedDeviceError( + 'Unsupported API level: {}'.format(sdk_level)) + + odex_file_exists = device.FileExists(odex_file) + if odex_file_exists: + ret.append(odex_file) + elif sdk_level >= version_codes.PIE: + raise DeviceOSError( + 'Unable to find odex file: you must run dex2oat on debuggable apps ' + 'on >= P after installation.') + else: + raise DeviceOSError('Unable to find odex file ' + odex_file) + return ret + + +def _AdbOatDump(device, odex_file, out_file): + """Runs oatdump on the device.""" + # Get the path to the odex file. + with device_temp_file.DeviceTempFile(device.adb) as device_file: + device.RunShellCommand( + ['oatdump', '--oat-file=' + odex_file, '--output=' + device_file.name], + timeout=420, + shell=True, + check_return=True) + device.PullFile(device_file.name, out_file, timeout=220) + + +@dataclasses.dataclass(order=True, frozen=True) +class JavaClass: + """This represents a Java Class and its ART Class Verification status.""" + name: str + verification_status: str + + +def _ParseMappingFile(proguard_map_file): + """Creates a map of obfuscated names to deobfuscated names.""" + mappings = {} + with open(proguard_map_file, 'r') as f: + pattern = re.compile(r'^(\S+) -> (\S+):') + for line in f: + m = pattern.match(line) + if m is not None: + deobfuscated_name = m.group(1) + obfuscated_name = m.group(2) + mappings[obfuscated_name] = deobfuscated_name + return mappings + + +def _DeobfuscateJavaClassName(dex_code_name, proguard_mappings): + return proguard_mappings.get(dex_code_name, dex_code_name) + + +def FormatJavaClassName(dex_code_name, proguard_mappings): + obfuscated_name = dex_code_name.replace('/', '.') + if proguard_mappings is not None: + return _DeobfuscateJavaClassName(obfuscated_name, proguard_mappings) + return obfuscated_name + + +def ParseOatdump(oatdump_output, proguard_mappings): + """Lists all Java classes in the dex along with verification status.""" + java_classes = [] + pattern = re.compile(r'\d+: L([^;]+).*\(type_idx=[^(]+\((\w+)\).*') + for line in oatdump_output: + m = pattern.match(line) + if m is not None: + name = FormatJavaClassName(m.group(1), proguard_mappings) + # Some platform levels prefix this with "Status" while other levels do + # not. Strip this for consistency. + verification_status = m.group(2).replace('Status', '') + java_classes.append(JavaClass(name, verification_status)) + return java_classes + + +def _PrintVerificationResults(target_status, java_classes, show_summary): + """Prints results for user output.""" + # Sort to keep output consistent between runs. + java_classes.sort(key=lambda c: c.name) + d = {} + for status in STATUSES: + d[status] = 0 + + for java_class in java_classes: + if java_class.verification_status == target_status: + print(java_class.name) + if java_class.verification_status not in d: + raise RuntimeError('Unexpected status: {0}'.format( + java_class.verification_status)) + d[java_class.verification_status] += 1 + + if show_summary: + for status in d: + count = d[status] + print('Total {status} classes: {num}'.format( + status=status, num=count)) + print('Total number of classes: {num}'.format( + num=len(java_classes))) + + +def RealMain(mapping, device_arg, package, status, hide_summary, workdir): + if mapping is None: + logging.warning('Skipping deobfuscation because no map file was provided.') + proguard_mappings = None + else: + proguard_mappings = _ParseMappingFile(mapping) + device = DetermineDeviceToUse(device_arg) + host_tempfile = os.path.join(workdir, 'out.dump') + device.EnableRoot() + odex_files = FindOdexFiles(device, package) + java_classes = set() + for odex_file in odex_files: + _AdbOatDump(device, odex_file, host_tempfile) + with open(host_tempfile, 'r') as f: + java_classes.update(ParseOatdump(f, proguard_mappings)) + _PrintVerificationResults(status, sorted(java_classes), not hide_summary) + + +def main(): + parser = argparse.ArgumentParser(description=""" +List Java classes in an APK which fail ART class verification. +""") + parser.add_argument( + '--package', + '-P', + type=str, + default=None, + required=True, + help='Specify the full application package name') + parser.add_argument( + '--mapping', + '-m', + type=os.path.realpath, + default=None, + help='Mapping file for the desired APK to deobfuscate class names') + parser.add_argument( + '--hide-summary', + default=False, + action='store_true', + help='Do not output the total number of classes in each Status.') + parser.add_argument( + '--status', + type=str, + default='RetryVerificationAtRuntime', + choices=STATUSES, + help='Which category of classes to list at the end of the script') + parser.add_argument( + '--workdir', + '-w', + type=os.path.realpath, + default=None, + help=('Work directory for oatdump output (default = temporary ' + 'directory). If specified, this will not be cleaned up at the end ' + 'of the script (useful if you want to inspect oatdump output ' + 'manually)')) + + script_common.AddEnvironmentArguments(parser) + script_common.AddDeviceArguments(parser) + logging_common.AddLoggingArguments(parser) + + args = parser.parse_args() + devil_chromium.Initialize(adb_path=args.adb_path) + logging_common.InitializeLogging(args) + + if args.workdir: + if not os.path.isdir(args.workdir): + raise RuntimeError('Specified working directory does not exist') + RealMain(args.mapping, args.devices, args.package, args.status, + args.hide_summary, args.workdir) + # Assume the user wants the workdir to persist (useful for debugging). + logging.warning('Not cleaning up explicitly-specified workdir: %s', + args.workdir) + else: + with tempfile_ext.NamedTemporaryDirectory() as workdir: + RealMain(args.mapping, args.devices, args.package, args.status, + args.hide_summary, workdir) + + +if __name__ == '__main__': + main() diff --git a/android/list_class_verification_failures_test.py b/android/list_class_verification_failures_test.py new file mode 100755 index 000000000000..149943664233 --- /dev/null +++ b/android/list_class_verification_failures_test.py @@ -0,0 +1,232 @@ +#!/usr/bin/env vpython3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest + +import list_class_verification_failures as list_verification + +import devil_chromium # pylint: disable=unused-import +from devil.android import device_errors +from devil.android import device_utils +from devil.android.ndk import abis +from devil.android.sdk import version_codes + +import mock # pylint: disable=import-error + + +def _CreateOdexLine(java_class_name, type_idx, verification_status): + """Create a rough approximation of a line of oatdump output.""" + return ('{type_idx}: L{java_class}; (offset=0xac) (type_idx={type_idx}) ' + '({verification}) ' + '(OatClassNoneCompiled)'.format(type_idx=type_idx, + java_class=java_class_name, + verification=verification_status)) + + +def _ClassForName(name, classes): + return next(c for c in classes if c.name == name) + + +class _DetermineDeviceToUseTest(unittest.TestCase): + + def testDetermineDeviceToUse_emptyListWithOneAttachedDevice(self): + fake_attached_devices = ['123'] + user_specified_devices = [] + device_utils.DeviceUtils.HealthyDevices = mock.MagicMock( + return_value=fake_attached_devices) + result = list_verification.DetermineDeviceToUse(user_specified_devices) + self.assertEqual(result, fake_attached_devices[0]) + # pylint: disable=no-member + device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None) + # pylint: enable=no-member + + def testDetermineDeviceToUse_emptyListWithNoAttachedDevices(self): + user_specified_devices = [] + device_utils.DeviceUtils.HealthyDevices = mock.MagicMock( + side_effect=device_errors.NoDevicesError()) + with self.assertRaises(device_errors.NoDevicesError) as _: + list_verification.DetermineDeviceToUse(user_specified_devices) + # pylint: disable=no-member + device_utils.DeviceUtils.HealthyDevices.assert_called_with(device_arg=None) + # pylint: enable=no-member + + def testDetermineDeviceToUse_oneElementListWithOneAttachedDevice(self): + user_specified_devices = ['123'] + fake_attached_devices = ['123'] + device_utils.DeviceUtils.HealthyDevices = mock.MagicMock( + return_value=fake_attached_devices) + result = list_verification.DetermineDeviceToUse(user_specified_devices) + self.assertEqual(result, fake_attached_devices[0]) + # pylint: disable=no-member + device_utils.DeviceUtils.HealthyDevices.assert_called_with( + device_arg=user_specified_devices) + # pylint: enable=no-member + + +class _ListClassVerificationFailuresTest(unittest.TestCase): + + def testPathToDexForPlatformVersion_noPaths(self): + sdk_int = version_codes.LOLLIPOP + paths_to_apk = [] + package_name = 'package.name' + arch = abis.ARM_64 + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + + with self.assertRaises(list_verification.DeviceOSError) as cm: + list_verification.FindOdexFiles(device, package_name) + message = str(cm.exception) + self.assertIn('Could not find data directory', message) + + def testPathToDexForPlatformVersion_multiplePaths(self): + sdk_int = version_codes.LOLLIPOP + paths_to_apk = ['/first/path', '/second/path'] + package_name = 'package.name' + arch = abis.ARM_64 + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual(odex_files, [ + '/data/dalvik-cache/arm64/data@app@first@base.apk@classes.dex', + '/data/dalvik-cache/arm64/data@app@second@base.apk@classes.dex' + ]) + + def testPathToDexForPlatformVersion_dalvikApiLevel(self): + sdk_int = version_codes.KITKAT + paths_to_apk = ['/some/path'] + package_name = 'package.name' + arch = abis.ARM_64 + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + + with self.assertRaises(list_verification.UnsupportedDeviceError) as _: + list_verification.FindOdexFiles(device, package_name) + + def testPathToDexForPlatformVersion_lollipopArm(self): + sdk_int = version_codes.LOLLIPOP + package_name = 'package.name' + paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)] + arch = 'arm' + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + device.FileExists = mock.MagicMock(return_value=True) + + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual( + odex_files, + ['/data/dalvik-cache/arm/data@app@package.name-1@base.apk@classes.dex']) + + def testPathToDexForPlatformVersion_mashmallowArm(self): + sdk_int = version_codes.MARSHMALLOW + package_name = 'package.name' + paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)] + arch = 'arm' + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + device.FileExists = mock.MagicMock(return_value=True) + + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual(odex_files, + ['/some/path/package.name-1/oat/arm/base.odex']) + + def testPathToDexForPlatformVersion_mashmallowArm64(self): + sdk_int = version_codes.MARSHMALLOW + package_name = 'package.name' + paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)] + arch = abis.ARM_64 + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + device.FileExists = mock.MagicMock(return_value=True) + + odex_files = list_verification.FindOdexFiles(device, package_name) + self.assertEqual(odex_files, + ['/some/path/package.name-1/oat/arm64/base.odex']) + + def testPathToDexForPlatformVersion_pieNoOdexFile(self): + sdk_int = version_codes.PIE + package_name = 'package.name' + paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)] + arch = abis.ARM_64 + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + device.FileExists = mock.MagicMock(return_value=False) + + with self.assertRaises(list_verification.DeviceOSError) as cm: + list_verification.FindOdexFiles(device, package_name) + message = str(cm.exception) + self.assertIn('you must run dex2oat on debuggable apps on >= P', message) + + def testPathToDexForPlatformVersion_lowerApiLevelNoOdexFile(self): + sdk_int = version_codes.MARSHMALLOW + package_name = 'package.name' + paths_to_apk = ['/some/path/{}-1/base.apk'.format(package_name)] + arch = abis.ARM_64 + + device = mock.Mock(build_version_sdk=sdk_int, product_cpu_abi=arch) + device.GetApplicationPaths = mock.MagicMock(return_value=paths_to_apk) + device.FileExists = mock.MagicMock(return_value=False) + + with self.assertRaises(list_verification.DeviceOSError) as _: + list_verification.FindOdexFiles(device, package_name) + + def testListClasses_noProguardMap(self): + oatdump_output = [ + _CreateOdexLine('a.b.JavaClass1', 6, 'StatusVerified'), + _CreateOdexLine('a.b.JavaClass2', 7, + 'StatusRetryVerificationAtRuntime'), + ] + + classes = list_verification.ParseOatdump(oatdump_output, None) + self.assertEqual(2, len(classes)) + java_class_1 = _ClassForName('a.b.JavaClass1', classes) + java_class_2 = _ClassForName('a.b.JavaClass2', classes) + self.assertEqual(java_class_1.verification_status, 'Verified') + self.assertEqual(java_class_2.verification_status, + 'RetryVerificationAtRuntime') + + def testListClasses_proguardMap(self): + oatdump_output = [ + _CreateOdexLine('a.b.ObfuscatedJavaClass1', 6, 'StatusVerified'), + _CreateOdexLine('a.b.ObfuscatedJavaClass2', 7, + 'StatusRetryVerificationAtRuntime'), + ] + + mapping = { + 'a.b.ObfuscatedJavaClass1': 'a.b.JavaClass1', + 'a.b.ObfuscatedJavaClass2': 'a.b.JavaClass2', + } + classes = list_verification.ParseOatdump(oatdump_output, mapping) + self.assertEqual(2, len(classes)) + java_class_1 = _ClassForName('a.b.JavaClass1', classes) + java_class_2 = _ClassForName('a.b.JavaClass2', classes) + self.assertEqual(java_class_1.verification_status, 'Verified') + self.assertEqual(java_class_2.verification_status, + 'RetryVerificationAtRuntime') + + def testListClasses_noStatusPrefix(self): + oatdump_output = [ + _CreateOdexLine('a.b.JavaClass1', 6, 'Verified'), + _CreateOdexLine('a.b.JavaClass2', 7, 'RetryVerificationAtRuntime'), + ] + + classes = list_verification.ParseOatdump(oatdump_output, None) + self.assertEqual(2, len(classes)) + java_class_1 = _ClassForName('a.b.JavaClass1', classes) + java_class_2 = _ClassForName('a.b.JavaClass2', classes) + self.assertEqual(java_class_1.verification_status, 'Verified') + self.assertEqual(java_class_2.verification_status, + 'RetryVerificationAtRuntime') + +if __name__ == '__main__': + # Suppress logging messages. + unittest.main(buffer=True) diff --git a/android/list_java_targets.py b/android/list_java_targets.py new file mode 100755 index 000000000000..b135b0fcab10 --- /dev/null +++ b/android/list_java_targets.py @@ -0,0 +1,297 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Lint as: python3 +"""Prints out available java targets. + +Examples: +# List GN target for bundles: +build/android/list_java_targets.py -C out/Default --type android_app_bundle \ +--gn-labels + +# List all android targets with types: +build/android/list_java_targets.py -C out/Default --print-types + +# Build all apk targets: +build/android/list_java_targets.py -C out/Default --type android_apk | xargs \ +autoninja -C out/Default + +# Show how many of each target type exist: +build/android/list_java_targets.py -C out/Default --stats + +""" + +import argparse +import collections +import json +import logging +import os +import shutil +import subprocess +import sys + +_SRC_ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', + '..')) +sys.path.append(os.path.join(_SRC_ROOT, 'build', 'android')) +from pylib import constants + +_VALID_TYPES = ( + 'android_apk', + 'android_app_bundle', + 'android_app_bundle_module', + 'android_assets', + 'android_resources', + 'dist_aar', + 'dist_jar', + 'group', + 'java_annotation_processor', + 'java_binary', + 'java_library', + 'robolectric_binary', + 'system_java_library', +) + + +def _resolve_ninja(): + # Prefer the version on PATH, but fallback to known version if PATH doesn't + # have one (e.g. on bots). + if shutil.which('ninja') is None: + return os.path.join(_SRC_ROOT, 'third_party', 'ninja', 'ninja') + return 'ninja' + + +def _resolve_autoninja(): + # Prefer the version on PATH, but fallback to known version if PATH doesn't + # have one (e.g. on bots). + if shutil.which('autoninja') is None: + return os.path.join(_SRC_ROOT, 'third_party', 'depot_tools', 'autoninja') + return 'autoninja' + + +def _run_ninja(output_dir, args, j_value=None, quiet=False): + if j_value: + cmd = [_resolve_ninja(), '-j', j_value] + else: + cmd = [_resolve_autoninja()] + cmd += [ + '-C', + output_dir, + ] + cmd.extend(args) + logging.info('Running: %r', cmd) + if quiet: + subprocess.run(cmd, check=True, capture_output=True) + else: + subprocess.run(cmd, check=True, stdout=sys.stderr) + + +def _query_for_build_config_targets(output_dir): + # Query ninja rather than GN since it's faster. + # Use ninja rather than autoninja to avoid extra output if user has set the + # NINJA_SUMMARIZE_BUILD environment variable. + cmd = [_resolve_ninja(), '-C', output_dir, '-t', 'targets'] + logging.info('Running: %r', cmd) + ninja_output = subprocess.run(cmd, + check=True, + capture_output=True, + encoding='ascii').stdout + ret = [] + SUFFIX = '__build_config_crbug_908819' + SUFFIX_LEN = len(SUFFIX) + for line in ninja_output.splitlines(): + ninja_target = line.rsplit(':', 1)[0] + # Ignore root aliases by ensuring a : exists. + if ':' in ninja_target and ninja_target.endswith(SUFFIX): + ret.append(f'//{ninja_target[:-SUFFIX_LEN]}') + return ret + + +def _query_json(*, json_dict: dict, query: str, path: str): + """Traverses through the json dictionary according to the query. + + If at any point a key does not exist, return the empty string, but raise an + error if a key exists but is the wrong type. + + This is roughly equivalent to returning + json_dict[queries[0]]?[queries[1]]?...[queries[N]]? where the ? means that if + the key doesn't exist, the empty string is returned. + + Example: + Given json_dict = {'a': {'b': 'c'}} + - If queries = ['a', 'b'] + Return: 'c' + - If queries = ['a', 'd'] + Return '' + - If queries = ['x'] + Return '' + - If queries = ['a', 'b', 'x'] + Raise an error since json_dict['a']['b'] is the string 'c' instead of an + expected dict that can be indexed into. + + Returns the final result after exhausting all the queries. + """ + queries = query.split('.') + value = json_dict + try: + for key in queries: + value = value.get(key) + if value is None: + return '' + except AttributeError as e: + raise Exception( + f'Failed when attempting to get {queries} from {path}') from e + return value + + +class _TargetEntry: + + def __init__(self, gn_target): + assert gn_target.startswith('//'), f'{gn_target} does not start with //' + assert ':' in gn_target, f'Non-root {gn_target} required' + self.gn_target = gn_target + self._build_config = None + + @property + def ninja_target(self): + return self.gn_target[2:] + + @property + def ninja_build_config_target(self): + return self.ninja_target + '__build_config_crbug_908819' + + @property + def build_config_path(self): + """Returns the filepath of the project's .build_config.json.""" + ninja_target = self.ninja_target + # Support targets at the root level. e.g. //:foo + if ninja_target[0] == ':': + ninja_target = ninja_target[1:] + subpath = ninja_target.replace(':', os.path.sep) + '.build_config.json' + return os.path.join(constants.GetOutDirectory(), 'gen', subpath) + + def build_config(self): + """Reads and returns the project's .build_config.json JSON.""" + if not self._build_config: + with open(self.build_config_path) as jsonfile: + self._build_config = json.load(jsonfile) + return self._build_config + + def get_type(self): + """Returns the target type from its .build_config.json.""" + return self.build_config()['deps_info']['type'] + + def proguard_enabled(self): + """Returns whether proguard runs for this target.""" + # Modules set proguard_enabled, but the proguarding happens only once at the + # bundle level. + if self.get_type() == 'android_app_bundle_module': + return False + return self.build_config()['deps_info'].get('proguard_enabled', False) + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument('-C', + '--output-directory', + help='If outdir is not provided, will attempt to guess.') + parser.add_argument('--gn-labels', + action='store_true', + help='Print GN labels rather than ninja targets') + parser.add_argument( + '--nested', + action='store_true', + help='Do not convert nested targets to their top-level equivalents. ' + 'E.g. Without this, foo_test__apk -> foo_test') + parser.add_argument('--print-types', + action='store_true', + help='Print type of each target') + parser.add_argument( + '--print-build-config-paths', + action='store_true', + help='Print path to the .build_config.json of each target') + parser.add_argument('--build', + action='store_true', + help='Build all .build_config.json files.') + parser.add_argument('--type', + action='append', + help='Restrict to targets of given type', + choices=_VALID_TYPES) + parser.add_argument('--stats', + action='store_true', + help='Print counts of each target type.') + parser.add_argument('--proguard-enabled', + action='store_true', + help='Restrict to targets that have proguard enabled.') + parser.add_argument('--query', + help='A dot separated string specifying a query for a ' + 'build config json value of each target. Example: Use ' + '--query deps_info.unprocessed_jar_path to show a list ' + 'of all targets that have a non-empty deps_info dict and ' + 'non-empty "unprocessed_jar_path" value in that dict.') + parser.add_argument('-j', help='Use -j with ninja instead of autoninja.') + parser.add_argument('-v', '--verbose', default=0, action='count') + parser.add_argument('-q', '--quiet', default=0, action='count') + args = parser.parse_args() + + args.build |= bool(args.type or args.proguard_enabled or args.print_types + or args.stats or args.query) + + logging.basicConfig(level=logging.WARNING + 10 * (args.quiet - args.verbose), + format='%(levelname).1s %(relativeCreated)6d %(message)s') + + if args.output_directory: + constants.SetOutputDirectory(args.output_directory) + constants.CheckOutputDirectory() + output_dir = constants.GetOutDirectory() + + # Query ninja for all __build_config_crbug_908819 targets. + targets = _query_for_build_config_targets(output_dir) + entries = [_TargetEntry(t) for t in targets] + + if args.build: + logging.warning('Building %d .build_config.json files...', len(entries)) + _run_ninja(output_dir, [e.ninja_build_config_target for e in entries], + j_value=args.j, + quiet=args.quiet) + + if args.type: + entries = [e for e in entries if e.get_type() in args.type] + + if args.proguard_enabled: + entries = [e for e in entries if e.proguard_enabled()] + + if args.stats: + counts = collections.Counter(e.get_type() for e in entries) + for entry_type, count in sorted(counts.items()): + print(f'{entry_type}: {count}') + else: + for e in entries: + if args.gn_labels: + to_print = e.gn_target + else: + to_print = e.ninja_target + + # Convert to top-level target + if not args.nested: + to_print = to_print.replace('__test_apk', '').replace('__apk', '') + + if args.print_types: + to_print = f'{to_print}: {e.get_type()}' + elif args.print_build_config_paths: + to_print = f'{to_print}: {e.build_config_path}' + elif args.query: + value = _query_json(json_dict=e.build_config(), + query=args.query, + path=e.build_config_path) + if not value: + continue + to_print = f'{to_print}: {value}' + + print(to_print) + + +if __name__ == '__main__': + main() diff --git a/android/main_dex_classes.flags b/android/main_dex_classes.flags new file mode 100644 index 000000000000..7e0475634e57 --- /dev/null +++ b/android/main_dex_classes.flags @@ -0,0 +1,47 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Proguard flags for what should be kept in the main dex. Only used +# during main dex list determination, not during actual proguarding. + +-keep @org.chromium.build.annotations.MainDex class * { + *; +} + +-keepclasseswithmembers class * { + @org.chromium.build.annotations.MainDex ; +} + +# Assume all IDL-generated classes should be kept. They can't reference other +# non-framework classes, so fairly low-risk. +-keepclasseswithmembers class * { + public static ** asInterface(android.os.IBinder); +} + +# Required when code coverage is enabled. +-keep class com.vladium.** { + *; +} + +# Renderers / GPU process don't load secondary dex. +-keep public class * extends org.chromium.base.process_launcher.ChildProcessService { + *; +} + +# The following are based on $SDK_BUILD_TOOLS/mainDexClasses.rules +# Ours differ in that: +# 1. It omits -keeps for application / instrumentation / backupagents (these are +# redundant since they are added by aapt's main dex list rules output). +# 2. Omits keep for Application.attachBaseContext(), which is overly broad. +# 3. Omits keep for all annotations, which is also overly broad (and pulls in +# any class that has an @IntDef). + +######## START mainDexClasses.rules ######## + +# Keep old fashion tests in the main dex or they'll be silently ignored by InstrumentationTestRunner +-keep public class * extends android.test.InstrumentationTestCase { + (); +} + +######## END mainDexClasses.rules ######## diff --git a/android/method_count.py b/android/method_count.py new file mode 100755 index 000000000000..8556b22c801a --- /dev/null +++ b/android/method_count.py @@ -0,0 +1,117 @@ +#! /usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import os +import re +import zipfile + +from pylib.dex import dex_parser + + +class DexStatsCollector: + """Tracks count of method/field/string/type as well as unique methods.""" + + def __init__(self): + # Signatures of all methods from all seen dex files. + self._unique_methods = set() + # Map of label -> { metric -> count }. + self._counts_by_label = {} + + def _CollectFromDexfile(self, label, dexfile): + assert label not in self._counts_by_label, 'exists: ' + label + self._counts_by_label[label] = { + 'fields': dexfile.header.field_ids_size, + 'methods': dexfile.header.method_ids_size, + 'strings': dexfile.header.string_ids_size, + 'types': dexfile.header.type_ids_size, + } + self._unique_methods.update(dexfile.IterMethodSignatureParts()) + + def CollectFromZip(self, label, path): + """Add dex stats from an .apk/.jar/.aab/.zip.""" + with zipfile.ZipFile(path, 'r') as z: + for subpath in z.namelist(): + if not re.match(r'.*classes\d*\.dex$', subpath): + continue + dexfile = dex_parser.DexFile(bytearray(z.read(subpath))) + self._CollectFromDexfile('{}!{}'.format(label, subpath), dexfile) + + def CollectFromDex(self, label, path): + """Add dex stats from a .dex file.""" + with open(path, 'rb') as f: + dexfile = dex_parser.DexFile(bytearray(f.read())) + self._CollectFromDexfile(label, dexfile) + + def MergeFrom(self, parent_label, other): + """Add dex stats from another DexStatsCollector.""" + # pylint: disable=protected-access + for label, other_counts in other._counts_by_label.items(): + new_label = '{}-{}'.format(parent_label, label) + self._counts_by_label[new_label] = other_counts.copy() + self._unique_methods.update(other._unique_methods) + # pylint: enable=protected-access + + def GetUniqueMethodCount(self): + """Returns total number of unique methods across encountered dex files.""" + return len(self._unique_methods) + + def GetCountsByLabel(self): + """Returns dict of label -> {metric -> count}.""" + return self._counts_by_label + + def GetTotalCounts(self): + """Returns dict of {metric -> count}, where |count| is sum(metric).""" + ret = {} + for metric in ('fields', 'methods', 'strings', 'types'): + ret[metric] = sum(x[metric] for x in self._counts_by_label.values()) + return ret + + def GetDexCacheSize(self, pre_oreo): + """Returns number of bytes of dirty RAM is consumed from all dex files.""" + # Dex Cache was optimized in Android Oreo: + # https://source.android.com/devices/tech/dalvik/improvements#dex-cache-removal + if pre_oreo: + total = sum(self.GetTotalCounts().values()) + else: + total = sum(c['methods'] for c in self._counts_by_label.values()) + return total * 4 # 4 bytes per entry. + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('paths', nargs='+') + args = parser.parse_args() + + collector = DexStatsCollector() + for path in args.paths: + if os.path.splitext(path)[1] in ('.zip', '.apk', '.jar', '.aab'): + collector.CollectFromZip(path, path) + else: + collector.CollectFromDex(path, path) + + counts_by_label = collector.GetCountsByLabel() + for label, counts in sorted(counts_by_label.items()): + print('{}:'.format(label)) + for metric, count in sorted(counts.items()): + print(' {}:'.format(metric), count) + print() + + if len(counts_by_label) > 1: + print('Totals:') + for metric, count in sorted(collector.GetTotalCounts().items()): + print(' {}:'.format(metric), count) + print() + + print('Unique Methods:', collector.GetUniqueMethodCount()) + print('DexCache (Pre-Oreo):', collector.GetDexCacheSize(pre_oreo=True), + 'bytes of dirty memory') + print('DexCache (Oreo+):', collector.GetDexCacheSize(pre_oreo=False), + 'bytes of dirty memory') + + +if __name__ == '__main__': + main() diff --git a/android/native_flags/BUILD.gn b/android/native_flags/BUILD.gn new file mode 100644 index 000000000000..317103026163 --- /dev/null +++ b/android/native_flags/BUILD.gn @@ -0,0 +1,37 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +if (current_toolchain == default_toolchain) { + import("//build/toolchain/toolchain.gni") + + # A toolchain that will capture compiler and linker arguments to a file. + toolchain("flagcapture") { + tool("cxx") { + cxx = rebase_path("argcapture.py", root_build_dir) + command = "$cxx {{output}} {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}" + outputs = [ "{{root_out_dir}}/{{label_name}}.txt" ] + } + tool("solink") { + solink = rebase_path("argcapture.py", root_build_dir) + command = "$solink {{output}} {{ldflags}}" + outputs = [ "{{root_out_dir}}/{{label_name}}.txt" ] + } + tool("alink") { + command = "this-should-never-run" + outputs = [ "this-will-never-exist" ] + } + tool("stamp") { + command = stamp_command + description = stamp_description + } + } +} else if (current_toolchain == "//build/android/native_flags:flagcapture") { + # This will record flags from all default configs of the default toolchain. + source_set("default_ccflags") { + sources = [ "empty.cc" ] + } + shared_library("default_ldflags") { + no_default_deps = true + } +} diff --git a/android/native_flags/argcapture.py b/android/native_flags/argcapture.py new file mode 100755 index 000000000000..b590fff207ef --- /dev/null +++ b/android/native_flags/argcapture.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Writes arguments to a file.""" + +import sys + + +def main(): + with open(sys.argv[1], 'w') as f: + f.write('\n'.join(sys.argv[2:])) + f.write('\n') + + +if __name__ == '__main__': + main() diff --git a/android/native_flags/empty.cc b/android/native_flags/empty.cc new file mode 100644 index 000000000000..29dfc78a94ac --- /dev/null +++ b/android/native_flags/empty.cc @@ -0,0 +1,5 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file just needs to exist to appease GN. diff --git a/android/print_cipd_version.py b/android/print_cipd_version.py new file mode 100755 index 000000000000..581295dcdbac --- /dev/null +++ b/android/print_cipd_version.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import pathlib +import re +import subprocess + +_DIR_SOURCE_ROOT = str(pathlib.Path(__file__).absolute().parents[2]) + + +def main(): + parser = argparse.ArgumentParser() + # Hide args set by wrappers so that using --help with the wrappers does not + # show them. + parser.add_argument('--subdir', required=True, help=argparse.SUPPRESS) + parser.add_argument('--cipd-package', required=True, help=argparse.SUPPRESS) + parser.add_argument('--git-log-url', help=argparse.SUPPRESS) + parser.add_argument('--cipd-instance', help='Uses value from DEPS by default') + args = parser.parse_args() + + if not args.cipd_instance: + args.cipd_instance = subprocess.check_output( + ['gclient', 'getdep', '-r', f'src/{args.subdir}:{args.cipd_package}'], + cwd=_DIR_SOURCE_ROOT, + text=True) + + cmd = ['cipd', 'describe', args.cipd_package, '-version', args.cipd_instance] + print(' '.join(cmd)) + output = subprocess.check_output(cmd, text=True) + print(output, end='') + if args.git_log_url: + git_hashes = re.findall(r'version:.*?@(\w+)', output) + if not git_hashes: + print('Could not find git hash from output.') + else: + # Multiple version tags exist when multiple versions have the same sha1. + last_version = git_hashes[-1] + print() + print('Recent commits:', args.git_log_url.format(last_version)) + + +if __name__ == '__main__': + main() diff --git a/android/provision_devices.py b/android/provision_devices.py new file mode 100755 index 000000000000..428d9b3d5083 --- /dev/null +++ b/android/provision_devices.py @@ -0,0 +1,562 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Provisions Android devices with settings required for bots. + +Usage: + ./provision_devices.py [-d ] +""" + +import argparse +import datetime +import json +import logging +import os +import posixpath +import re +import subprocess +import sys +import time + +# Import _strptime before threaded code. datetime.datetime.strptime is +# threadsafe except for the initial import of the _strptime module. +# See crbug.com/584730 and https://bugs.python.org/issue7980. +import _strptime # pylint: disable=unused-import + +import devil_chromium +from devil.android import battery_utils +from devil.android import device_denylist +from devil.android import device_errors +from devil.android import device_temp_file +from devil.android import device_utils +from devil.android.sdk import keyevent +from devil.android.sdk import version_codes +from devil.constants import exit_codes +from devil.utils import run_tests_helper +from devil.utils import timeout_retry +from pylib import constants +from pylib import device_settings +from pylib.constants import host_paths + +_SYSTEM_WEBVIEW_PATHS = ['/system/app/webview', '/system/app/WebViewGoogle'] +_CHROME_PACKAGE_REGEX = re.compile('.*chrom.*') +_TOMBSTONE_REGEX = re.compile('tombstone.*') + + +class _DEFAULT_TIMEOUTS: + # L can take a while to reboot after a wipe. + LOLLIPOP = 600 + PRE_LOLLIPOP = 180 + + HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP) + + +class _PHASES: + WIPE = 'wipe' + PROPERTIES = 'properties' + FINISH = 'finish' + + ALL = [WIPE, PROPERTIES, FINISH] + + +def ProvisionDevices(args): + denylist = (device_denylist.Denylist(args.denylist_file) + if args.denylist_file else None) + devices = [ + d for d in device_utils.DeviceUtils.HealthyDevices(denylist) + if not args.emulators or d.is_emulator + ] + if args.device: + devices = [d for d in devices if d == args.device] + if not devices: + raise device_errors.DeviceUnreachableError(args.device) + parallel_devices = device_utils.DeviceUtils.parallel(devices) + if args.emulators: + parallel_devices.pMap(SetProperties, args) + else: + parallel_devices.pMap(ProvisionDevice, denylist, args) + if args.auto_reconnect: + _LaunchHostHeartbeat() + denylisted_devices = denylist.Read() if denylist else [] + if args.output_device_denylist: + with open(args.output_device_denylist, 'w') as f: + json.dump(denylisted_devices, f) + if all(d in denylisted_devices for d in devices): + raise device_errors.NoDevicesError + return 0 + + +def ProvisionDevice(device, denylist, options): + def should_run_phase(phase_name): + return not options.phases or phase_name in options.phases + + def run_phase(phase_func, reboot_timeout, reboot=True): + try: + device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0) + except device_errors.CommandTimeoutError: + logging.error('Device did not finish booting. Will try to reboot.') + device.Reboot(timeout=reboot_timeout) + phase_func(device, options) + if reboot: + device.Reboot(False, retries=0) + device.adb.WaitForDevice() + + try: + if options.reboot_timeout: + reboot_timeout = options.reboot_timeout + elif device.build_version_sdk >= version_codes.LOLLIPOP: + reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP + else: + reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP + + if should_run_phase(_PHASES.WIPE): + if (options.chrome_specific_wipe or device.IsUserBuild() or + device.build_version_sdk >= version_codes.MARSHMALLOW): + run_phase(WipeChromeData, reboot_timeout) + else: + run_phase(WipeDevice, reboot_timeout) + + if should_run_phase(_PHASES.PROPERTIES): + run_phase(SetProperties, reboot_timeout) + + if should_run_phase(_PHASES.FINISH): + run_phase(FinishProvisioning, reboot_timeout, reboot=False) + + if options.chrome_specific_wipe: + package = "com.google.android.gms" + version_name = device.GetApplicationVersion(package) + logging.info("Version name for %s is %s", package, version_name) + + CheckExternalStorage(device) + + except device_errors.CommandTimeoutError: + logging.exception('Timed out waiting for device %s. Adding to denylist.', + str(device)) + if denylist: + denylist.Extend([str(device)], reason='provision_timeout') + + except (device_errors.CommandFailedError, + device_errors.DeviceUnreachableError): + logging.exception('Failed to provision device %s. Adding to denylist.', + str(device)) + if denylist: + denylist.Extend([str(device)], reason='provision_failure') + + +def CheckExternalStorage(device): + """Checks that storage is writable and if not makes it writable. + + Arguments: + device: The device to check. + """ + try: + with device_temp_file.DeviceTempFile( + device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f: + device.WriteFile(f.name, 'test') + except device_errors.CommandFailedError: + logging.info('External storage not writable. Remounting / as RW') + device.RunShellCommand(['mount', '-o', 'remount,rw', '/'], + check_return=True, as_root=True) + device.EnableRoot() + with device_temp_file.DeviceTempFile( + device.adb, suffix='.sh', dir=device.GetExternalStoragePath()) as f: + device.WriteFile(f.name, 'test') + +def WipeChromeData(device, options): + """Wipes chrome specific data from device + + (1) uninstall any app whose name matches *chrom*, except + com.android.chrome, which is the chrome stable package. Doing so also + removes the corresponding dirs under /data/data/ and /data/app/ + (2) remove any dir under /data/app-lib/ whose name matches *chrom* + (3) remove any files under /data/tombstones/ whose name matches "tombstone*" + (4) remove /data/local.prop if there is any + (5) remove /data/local/chrome-command-line if there is any + (6) remove anything under /data/local/.config/ if the dir exists + (this is telemetry related) + (7) remove anything under /data/local/tmp/ + + Arguments: + device: the device to wipe + """ + if options.skip_wipe: + return + + try: + if device.IsUserBuild(): + _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX, + constants.PACKAGE_INFO['chrome_stable'].package) + device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(), + check_return=True) + device.RunShellCommand('rm -rf /data/local/tmp/*', check_return=True) + else: + device.EnableRoot() + _UninstallIfMatch(device, _CHROME_PACKAGE_REGEX, + constants.PACKAGE_INFO['chrome_stable'].package) + _WipeUnderDirIfMatch(device, '/data/app-lib/', _CHROME_PACKAGE_REGEX) + _WipeUnderDirIfMatch(device, '/data/tombstones/', _TOMBSTONE_REGEX) + + _WipeFileOrDir(device, '/data/local.prop') + _WipeFileOrDir(device, '/data/local/chrome-command-line') + _WipeFileOrDir(device, '/data/local/.config/') + _WipeFileOrDir(device, '/data/local/tmp/') + device.RunShellCommand('rm -rf %s/*' % device.GetExternalStoragePath(), + check_return=True) + except device_errors.CommandFailedError: + logging.exception('Possible failure while wiping the device. ' + 'Attempting to continue.') + + +def WipeDevice(device, options): + """Wipes data from device, keeping only the adb_keys for authorization. + + After wiping data on a device that has been authorized, adb can still + communicate with the device, but after reboot the device will need to be + re-authorized because the adb keys file is stored in /data/misc/adb/. + Thus, adb_keys file is rewritten so the device does not need to be + re-authorized. + + Arguments: + device: the device to wipe + """ + if options.skip_wipe: + return + + try: + device.EnableRoot() + device_authorized = device.FileExists(constants.ADB_KEYS_FILE) + if device_authorized: + adb_keys = device.ReadFile(constants.ADB_KEYS_FILE, + as_root=True).splitlines() + device.RunShellCommand(['wipe', 'data'], + as_root=True, check_return=True) + device.adb.WaitForDevice() + + if device_authorized: + adb_keys_set = set(adb_keys) + for adb_key_file in options.adb_key_files or []: + try: + with open(adb_key_file, 'r') as f: + adb_public_keys = f.readlines() + adb_keys_set.update(adb_public_keys) + except IOError: + logging.warning('Unable to find adb keys file %s.', adb_key_file) + _WriteAdbKeysFile(device, '\n'.join(adb_keys_set)) + except device_errors.CommandFailedError: + logging.exception('Possible failure while wiping the device. ' + 'Attempting to continue.') + + +def _WriteAdbKeysFile(device, adb_keys_string): + dir_path = posixpath.dirname(constants.ADB_KEYS_FILE) + device.RunShellCommand(['mkdir', '-p', dir_path], + as_root=True, check_return=True) + device.RunShellCommand(['restorecon', dir_path], + as_root=True, check_return=True) + device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True) + device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE], + as_root=True, check_return=True) + + +def SetProperties(device, options): + try: + device.EnableRoot() + except device_errors.CommandFailedError as e: + logging.warning(str(e)) + + if not device.IsUserBuild(): + _ConfigureLocalProperties(device, options.enable_java_debug) + else: + logging.warning('Cannot configure properties in user builds.') + device_settings.ConfigureContentSettings( + device, device_settings.DETERMINISTIC_DEVICE_SETTINGS) + if options.disable_location: + device_settings.ConfigureContentSettings( + device, device_settings.DISABLE_LOCATION_SETTINGS) + else: + device_settings.ConfigureContentSettings( + device, device_settings.ENABLE_LOCATION_SETTINGS) + + if options.disable_mock_location: + device_settings.ConfigureContentSettings( + device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS) + else: + device_settings.ConfigureContentSettings( + device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS) + + device_settings.SetLockScreenSettings(device) + if options.disable_network: + device_settings.ConfigureContentSettings( + device, device_settings.NETWORK_DISABLED_SETTINGS) + if device.build_version_sdk >= version_codes.MARSHMALLOW: + # Ensure that NFC is also switched off. + device.RunShellCommand(['svc', 'nfc', 'disable'], + as_root=True, check_return=True) + + if options.disable_system_chrome: + # The system chrome version on the device interferes with some tests. + device.RunShellCommand(['pm', 'disable', 'com.android.chrome'], + check_return=True) + + if options.remove_system_webview: + if any(device.PathExists(p) for p in _SYSTEM_WEBVIEW_PATHS): + logging.info('System WebView exists and needs to be removed') + if device.HasRoot(): + # Disabled Marshmallow's Verity security feature + if device.build_version_sdk >= version_codes.MARSHMALLOW: + device.adb.DisableVerity() + device.Reboot() + device.WaitUntilFullyBooted() + device.EnableRoot() + + # This is required, e.g., to replace the system webview on a device. + device.adb.Remount() + device.RunShellCommand(['stop'], check_return=True) + device.RunShellCommand(['rm', '-rf'] + _SYSTEM_WEBVIEW_PATHS, + check_return=True) + device.RunShellCommand(['start'], check_return=True) + else: + logging.warning('Cannot remove system webview from a non-rooted device') + else: + logging.info('System WebView already removed') + + # Some device types can momentarily disappear after setting properties. + device.adb.WaitForDevice() + + +def _ConfigureLocalProperties(device, java_debug=True): + """Set standard readonly testing device properties prior to reboot.""" + local_props = [ + 'persist.sys.usb.config=adb', + 'ro.monkey=1', + 'ro.test_harness=1', + 'ro.audio.silent=1', + 'ro.setupwizard.mode=DISABLED', + ] + if java_debug: + local_props.append( + '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY) + local_props.append('debug.checkjni=1') + try: + device.WriteFile( + device.LOCAL_PROPERTIES_PATH, + '\n'.join(local_props), as_root=True) + # Android will not respect the local props file if it is world writable. + device.RunShellCommand( + ['chmod', '644', device.LOCAL_PROPERTIES_PATH], + as_root=True, check_return=True) + except device_errors.CommandFailedError: + logging.exception('Failed to configure local properties.') + + +def FinishProvisioning(device, options): + # The lockscreen can't be disabled on user builds, so send a keyevent + # to unlock it. + if device.IsUserBuild(): + device.SendKeyEvent(keyevent.KEYCODE_MENU) + + if options.min_battery_level is not None: + battery = battery_utils.BatteryUtils(device) + try: + battery.ChargeDeviceToLevel(options.min_battery_level) + except device_errors.DeviceChargingError: + device.Reboot() + battery.ChargeDeviceToLevel(options.min_battery_level) + + if options.max_battery_temp is not None: + try: + battery = battery_utils.BatteryUtils(device) + battery.LetBatteryCoolToTemperature(options.max_battery_temp) + except device_errors.CommandFailedError: + logging.exception('Unable to let battery cool to specified temperature.') + + def _set_and_verify_date(): + if device.build_version_sdk >= version_codes.MARSHMALLOW: + date_format = '%m%d%H%M%Y.%S' + set_date_command = ['date', '-u'] + get_date_command = ['date', '-u'] + else: + date_format = '%Y%m%d.%H%M%S' + set_date_command = ['date', '-s'] + get_date_command = ['date'] + + # TODO(jbudorick): This is wrong on pre-M devices -- get/set are + # dealing in local time, but we're setting based on GMT. + strgmtime = time.strftime(date_format, time.gmtime()) + set_date_command.append(strgmtime) + device.RunShellCommand(set_date_command, as_root=True, check_return=True) + + get_date_command.append('+"%Y%m%d.%H%M%S"') + device_time = device.RunShellCommand( + get_date_command, as_root=True, single_line=True).replace('"', '') + device_time = datetime.datetime.strptime(device_time, "%Y%m%d.%H%M%S") + correct_time = datetime.datetime.strptime(strgmtime, date_format) + tdelta = abs(correct_time - device_time).seconds + if tdelta <= 1: + logging.info('Date/time successfully set on %s', device) + return True + logging.error('Date mismatch. Device: %s Correct: %s', + device_time.isoformat(), correct_time.isoformat()) + return False + + # Sometimes the date is not set correctly on the devices. Retry on failure. + if device.IsUserBuild(): + # TODO(bpastene): Figure out how to set the date & time on user builds. + pass + else: + if not timeout_retry.WaitFor( + _set_and_verify_date, wait_period=1, max_tries=2): + raise device_errors.CommandFailedError( + 'Failed to set date & time.', device_serial=str(device)) + + props = device.RunShellCommand('getprop', check_return=True) + for prop in props: + logging.info(' %s', prop) + if options.auto_reconnect: + _PushAndLaunchAdbReboot(device, options.target) + + +def _UninstallIfMatch(device, pattern, app_to_keep): + installed_packages = device.RunShellCommand(['pm', 'list', 'packages']) + installed_system_packages = [ + pkg.split(':')[1] for pkg in device.RunShellCommand(['pm', 'list', + 'packages', '-s'])] + for package_output in installed_packages: + package = package_output.split(":")[1] + if pattern.match(package) and not package == app_to_keep: + if not device.IsUserBuild() or package not in installed_system_packages: + device.Uninstall(package) + + +def _WipeUnderDirIfMatch(device, path, pattern): + for filename in device.ListDirectory(path): + if pattern.match(filename): + _WipeFileOrDir(device, posixpath.join(path, filename)) + + +def _WipeFileOrDir(device, path): + if device.PathExists(path): + device.RunShellCommand(['rm', '-rf', path], check_return=True) + + +def _PushAndLaunchAdbReboot(device, target): + """Pushes and launches the adb_reboot binary on the device. + + Arguments: + device: The DeviceUtils instance for the device to which the adb_reboot + binary should be pushed. + target: The build target (example, Debug or Release) which helps in + locating the adb_reboot binary. + """ + logging.info('Will push and launch adb_reboot on %s', str(device)) + # Kill if adb_reboot is already running. + device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True) + # Push adb_reboot + logging.info(' Pushing adb_reboot ...') + adb_reboot = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'out/%s/adb_reboot' % target) + device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')]) + # Launch adb_reboot + logging.info(' Launching adb_reboot ...') + device.RunShellCommand( + ['/data/local/tmp/adb_reboot'], + check_return=True) + + +def _LaunchHostHeartbeat(): + # Kill if existing host_heartbeat + KillHostHeartbeat() + # Launch a new host_heartbeat + logging.info('Spawning host heartbeat...') + subprocess.Popen([os.path.join(host_paths.DIR_SOURCE_ROOT, + 'build/android/host_heartbeat.py')]) + +def KillHostHeartbeat(): + ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) + stdout, _ = ps.communicate() + matches = re.findall('\\n.*host_heartbeat.*', stdout) + for match in matches: + logging.info('An instance of host heart beart running... will kill') + pid = re.findall(r'(\S+)', match)[1] + subprocess.call(['kill', str(pid)]) + +def main(): + # Recommended options on perf bots: + # --disable-network + # TODO(tonyg): We eventually want network on. However, currently radios + # can cause perfbots to drain faster than they charge. + # --min-battery-level 95 + # Some perf bots run benchmarks with USB charging disabled which leads + # to gradual draining of the battery. We must wait for a full charge + # before starting a run in order to keep the devices online. + + parser = argparse.ArgumentParser( + description='Provision Android devices with settings required for bots.') + parser.add_argument('-d', '--device', metavar='SERIAL', + help='the serial number of the device to be provisioned' + ' (the default is to provision all devices attached)') + parser.add_argument('--adb-path', + help='Absolute path to the adb binary to use.') + parser.add_argument('--denylist-file', help='Device denylist JSON file.') + parser.add_argument('--phase', action='append', choices=_PHASES.ALL, + dest='phases', + help='Phases of provisioning to run. ' + '(If omitted, all phases will be run.)') + parser.add_argument('--skip-wipe', action='store_true', default=False, + help="don't wipe device data during provisioning") + parser.add_argument('--reboot-timeout', metavar='SECS', type=int, + help='when wiping the device, max number of seconds to' + ' wait after each reboot ' + '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT) + parser.add_argument('--min-battery-level', type=int, metavar='NUM', + help='wait for the device to reach this minimum battery' + ' level before trying to continue') + parser.add_argument('--disable-location', action='store_true', + help='disable Google location services on devices') + parser.add_argument('--disable-mock-location', action='store_true', + default=False, help='Set ALLOW_MOCK_LOCATION to false') + parser.add_argument('--disable-network', action='store_true', + help='disable network access on devices') + parser.add_argument('--disable-java-debug', action='store_false', + dest='enable_java_debug', default=True, + help='disable Java property asserts and JNI checking') + parser.add_argument('--disable-system-chrome', action='store_true', + help='Disable the system chrome from devices.') + parser.add_argument('--remove-system-webview', action='store_true', + help='Remove the system webview from devices.') + parser.add_argument('-t', '--target', default='Debug', + help='the build target (default: %(default)s)') + parser.add_argument('-r', '--auto-reconnect', action='store_true', + help='push binary which will reboot the device on adb' + ' disconnections') + parser.add_argument('--adb-key-files', type=str, nargs='+', + help='list of adb keys to push to device') + parser.add_argument('-v', '--verbose', action='count', default=1, + help='Log more information.') + parser.add_argument('--max-battery-temp', type=int, metavar='NUM', + help='Wait for the battery to have this temp or lower.') + parser.add_argument('--output-device-denylist', + help='Json file to output the device denylist.') + parser.add_argument('--chrome-specific-wipe', action='store_true', + help='only wipe chrome specific data during provisioning') + parser.add_argument('--emulators', action='store_true', + help='provision only emulators and ignore usb devices') + args = parser.parse_args() + constants.SetBuildType(args.target) + + run_tests_helper.SetLogLevel(args.verbose) + + devil_chromium.Initialize(adb_path=args.adb_path) + + try: + return ProvisionDevices(args) + except (device_errors.DeviceUnreachableError, device_errors.NoDevicesError): + logging.exception('Unable to provision local devices.') + return exit_codes.INFRA + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/pylib/__init__.py b/android/pylib/__init__.py new file mode 100644 index 000000000000..2e6d65f82b40 --- /dev/null +++ b/android/pylib/__init__.py @@ -0,0 +1,45 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import os +import sys + + +_SRC_PATH = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', '..')) + +_THIRD_PARTY_PATH = os.path.join(_SRC_PATH, 'third_party') + +_CATAPULT_PATH = os.path.join(_THIRD_PARTY_PATH, 'catapult') + +_DEVIL_PATH = os.path.join(_CATAPULT_PATH, 'devil') + +_PYTRACE_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_trace_event') + +_PY_UTILS_PATH = os.path.join(_CATAPULT_PATH, 'common', 'py_utils') + +_SIX_PATH = os.path.join(_THIRD_PARTY_PATH, 'six', 'src') + +_TRACE2HTML_PATH = os.path.join(_CATAPULT_PATH, 'tracing') + +_BUILD_UTIL_PATH = os.path.join(_SRC_PATH, 'build', 'util') + +if _DEVIL_PATH not in sys.path: + sys.path.append(_DEVIL_PATH) + +if _PYTRACE_PATH not in sys.path: + sys.path.append(_PYTRACE_PATH) + +if _PY_UTILS_PATH not in sys.path: + sys.path.append(_PY_UTILS_PATH) + +if _TRACE2HTML_PATH not in sys.path: + sys.path.append(_TRACE2HTML_PATH) + +if _SIX_PATH not in sys.path: + sys.path.append(_SIX_PATH) + +if _BUILD_UTIL_PATH not in sys.path: + sys.path.insert(0, _BUILD_UTIL_PATH) diff --git a/android/pylib/android/__init__.py b/android/pylib/android/__init__.py new file mode 100644 index 000000000000..68130d5941d9 --- /dev/null +++ b/android/pylib/android/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/android/logcat_symbolizer.py b/android/pylib/android/logcat_symbolizer.py new file mode 100644 index 000000000000..84d812ca6525 --- /dev/null +++ b/android/pylib/android/logcat_symbolizer.py @@ -0,0 +1,99 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import re + +from devil.android import logcat_monitor + +BACKTRACE_LINE_RE = re.compile(r'#\d+') +THREADTIME_RE = re.compile( + logcat_monitor.LogcatMonitor.THREADTIME_RE_FORMAT % ( + r' *\S* *', r' *\S* *', r' *\S* *', r' *\S* *', r'.*')) + +def SymbolizeLogcat(logcat, dest, symbolizer, abi): + """Symbolize stack trace in the logcat. + + Symbolize the logcat and write the symbolized logcat to a new file. + + Args: + logcat: Path to logcat file. + dest: Path to where to write the symbolized logcat. + symbolizer: The stack symbolizer to symbolize stack trace in logcat. + abi: The device's product_cpu_abi. Symbolizer needs it to symbolize. + + A sample logcat that needs to be symbolized, after stripping the prefix, + such as '08-07 18:39:37.692 28649 28649 E Ion : ', would be: + Build fingerprint: 'google/shamu/shamu:7.1.1/NMF20B/3370:userdebug/dev-keys' + Revision: '0' + ABI: 'arm' + pid: 28936, tid: 28936, name: chromium.chrome >>> org.chromium.chrome <<< + signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- + Abort message: '[FATAL:debug_urls.cc(151)] Check failed: false. + #00 0x63e16c41 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0006cc4 + #01 0x63f19be3 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016fbe + #02 0x63f19737 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016f73 + #03 0x63f18ddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016edd + #04 0x63f18b79 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so+0x0016eb7 + #05 0xab53f319 /system/lib/libart.so+0x000a3319 + #06 + r0 00000000 r1 00007108 r2 00000006 r3 00000008 + r4 ae60258c r5 00000006 r6 ae602534 r7 0000010c + r8 bede5cd0 r9 00000030 sl 00000000 fp 9265a800 + ip 0000000b sp bede5c38 lr ac8e5537 pc ac8e7da0 cpsr 600f0010 + + backtrace: + #00 pc 00049da0 /system/lib/libc.so (tgkill+12) + #01 pc 00047533 /system/lib/libc.so (pthread_kill+34) + #02 pc 0001d635 /system/lib/libc.so (raise+10) + #03 pc 00019181 /system/lib/libc.so (__libc_android_abort+34) + #04 pc 00017048 /system/lib/libc.so (abort+4) + #05 pc 00948605 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so + #06 pc 002c9f73 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so + #07 pc 003ccbe1 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so + #08 pc 003cc735 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so + #09 pc 003cbddf /data/app/org.chromium.chrome-1/lib/arm/libchrome.so + #10 pc 003cbb77 /data/app/org.chromium.chrome-1/lib/arm/libchrome.so + """ + + with open(logcat) as logcat_file: + with open(dest, 'w') as dest_file: + # The current stack script will only print out the symbolized stack, + # and completely ignore logs other than the crash log that is used for + # symbolization, if any exists. Thus the code here extracts the + # crash log inside the logcat and pass only the crash log to the script, + # because we don't want to lose other information in the logcat that, + # if passed to the stack script, will just be ignored by it. + # TODO(crbug.com/755225): Rewrite the logic here. + outside_of_crash_log = True + in_lower_half_crash = False + data_to_symbolize = [] + + for line in logcat_file: + if outside_of_crash_log: + # Check whether it is the start of crash log. + if 'Build fingerprint: ' in line: + outside_of_crash_log = False + # Only include necessary information for symbolization. + # The logic here that removes date, time, proc_id etc. + # should be in sync with _THREADTIME_RE_FORMAT in logcat_monitor. + data_to_symbolize.append( + re.search(THREADTIME_RE, line).group(7)) + else: + dest_file.write(line) + else: + # Once we have reached the end of the backtrace section, + # we will start symbolizing. + if in_lower_half_crash and not bool(BACKTRACE_LINE_RE.search(line)): + outside_of_crash_log = True + in_lower_half_crash = False + symbolized_lines = symbolizer.ExtractAndResolveNativeStackTraces( + data_to_symbolize, abi) + dest_file.write('\n'.join(symbolized_lines) + '\n' + line) + data_to_symbolize = [] + else: + if not in_lower_half_crash and 'backtrace:' in line: + in_lower_half_crash = True + data_to_symbolize.append( + re.search(THREADTIME_RE, line).group(7)) diff --git a/android/pylib/base/__init__.py b/android/pylib/base/__init__.py new file mode 100644 index 000000000000..5ffa28413724 --- /dev/null +++ b/android/pylib/base/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/base/base_test_result.py b/android/pylib/base/base_test_result.py new file mode 100644 index 000000000000..e5fbab54f0d3 --- /dev/null +++ b/android/pylib/base/base_test_result.py @@ -0,0 +1,300 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Module containing base test results classes.""" + + +import functools +import sys +import threading + +from lib.results import result_types # pylint: disable=import-error + +# This must match the source adding the suffix: bit.ly/3Zmwwyx +_MULTIPROCESS_SUFFIX = '__multiprocess_mode' + + +class ResultType: + """Class enumerating test types. + + Wraps the results defined in //build/util/lib/results/. + """ + PASS = result_types.PASS + SKIP = result_types.SKIP + FAIL = result_types.FAIL + CRASH = result_types.CRASH + TIMEOUT = result_types.TIMEOUT + UNKNOWN = result_types.UNKNOWN + NOTRUN = result_types.NOTRUN + + @staticmethod + def GetTypes(): + """Get a list of all test types.""" + return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL, + ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN, + ResultType.NOTRUN] + + +@functools.total_ordering +class BaseTestResult: + """Base class for a single test result.""" + + def __init__(self, name, test_type, duration=0, log='', failure_reason=None): + """Construct a BaseTestResult. + + Args: + name: Name of the test which defines uniqueness. + test_type: Type of the test result as defined in ResultType. + duration: Time it took for the test to run in milliseconds. + log: An optional string listing any errors. + """ + assert name + assert test_type in ResultType.GetTypes() + self._name = name + self._test_type = test_type + self._duration = duration + self._log = log + self._failure_reason = failure_reason + self._links = {} + self._webview_multiprocess_mode = name.endswith(_MULTIPROCESS_SUFFIX) + + def __str__(self): + return self._name + + def __repr__(self): + return self._name + + def __eq__(self, other): + return self.GetName() == other.GetName() + + def __lt__(self, other): + return self.GetName() == other.GetName() + + def __hash__(self): + return hash(self._name) + + def SetName(self, name): + """Set the test name. + + Because we're putting this into a set, this should only be used if moving + this test result into another set. + """ + self._name = name + + def GetName(self): + """Get the test name.""" + return self._name + + def GetNameForResultSink(self): + """Get the test name to be reported to resultsink.""" + raw_name = self.GetName() + if self._webview_multiprocess_mode: + assert raw_name.endswith( + _MULTIPROCESS_SUFFIX + ), 'multiprocess mode test raw name should have the corresponding suffix' + return raw_name[:-len(_MULTIPROCESS_SUFFIX)] + return raw_name + + def SetType(self, test_type): + """Set the test result type.""" + assert test_type in ResultType.GetTypes() + self._test_type = test_type + + def GetType(self): + """Get the test result type.""" + return self._test_type + + def GetDuration(self): + """Get the test duration.""" + return self._duration + + def SetLog(self, log): + """Set the test log.""" + self._log = log + + def GetLog(self): + """Get the test log.""" + return self._log + + def SetFailureReason(self, failure_reason): + """Set the reason the test failed. + + This should be the first failure the test encounters and exclude any stack + trace. + """ + self._failure_reason = failure_reason + + def GetFailureReason(self): + """Get the reason the test failed. + + Returns None if the test did not fail or if the reason the test failed is + unknown. + """ + return self._failure_reason + + def SetLink(self, name, link_url): + """Set link with test result data.""" + self._links[name] = link_url + + def GetLinks(self): + """Get dict containing links to test result data.""" + return self._links + + def GetVariantForResultSink(self): + """Get the variant dict to be reported to result sink.""" + if self._webview_multiprocess_mode: + return {'webview_multiprocess_mode': 'Yes'} + return None + + +class TestRunResults: + """Set of results for a test run.""" + + def __init__(self): + self._links = {} + self._results = set() + self._results_lock = threading.RLock() + + def SetLink(self, name, link_url): + """Add link with test run results data.""" + self._links[name] = link_url + + def GetLinks(self): + """Get dict containing links to test run result data.""" + return self._links + + def GetLogs(self): + """Get the string representation of all test logs.""" + with self._results_lock: + s = [] + for test_type in ResultType.GetTypes(): + if test_type != ResultType.PASS: + for t in sorted(self._GetType(test_type)): + log = t.GetLog() + if log: + s.append('[%s] %s:' % (test_type, t)) + s.append(log) + if sys.version_info.major == 2: + decoded = [u.decode(encoding='utf-8', errors='ignore') for u in s] + return '\n'.join(decoded) + return '\n'.join(s) + + def GetGtestForm(self): + """Get the gtest string representation of this object.""" + with self._results_lock: + s = [] + plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s) + tests = lambda n: plural(n, 'test', 'tests') + + s.append('[==========] %s ran.' % (tests(len(self.GetAll())))) + s.append('[ PASSED ] %s.' % (tests(len(self.GetPass())))) + + skipped = self.GetSkip() + if skipped: + s.append('[ SKIPPED ] Skipped %s, listed below:' % tests(len(skipped))) + for t in sorted(skipped): + s.append('[ SKIPPED ] %s' % str(t)) + + all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(), + self.GetUnknown()) + if all_failures: + s.append('[ FAILED ] %s, listed below:' % tests(len(all_failures))) + for t in sorted(self.GetFail()): + s.append('[ FAILED ] %s' % str(t)) + for t in sorted(self.GetCrash()): + s.append('[ FAILED ] %s (CRASHED)' % str(t)) + for t in sorted(self.GetTimeout()): + s.append('[ FAILED ] %s (TIMEOUT)' % str(t)) + for t in sorted(self.GetUnknown()): + s.append('[ FAILED ] %s (UNKNOWN)' % str(t)) + s.append('') + s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS')) + return '\n'.join(s) + + def GetShortForm(self): + """Get the short string representation of this object.""" + with self._results_lock: + s = [] + s.append('ALL: %d' % len(self._results)) + for test_type in ResultType.GetTypes(): + s.append('%s: %d' % (test_type, len(self._GetType(test_type)))) + return ''.join([x.ljust(15) for x in s]) + + def __str__(self): + return self.GetGtestForm() + + def AddResult(self, result): + """Add |result| to the set. + + Args: + result: An instance of BaseTestResult. + """ + assert isinstance(result, BaseTestResult) + with self._results_lock: + self._results.discard(result) + self._results.add(result) + + def AddResults(self, results): + """Add |results| to the set. + + Args: + results: An iterable of BaseTestResult objects. + """ + with self._results_lock: + for t in results: + self.AddResult(t) + + def AddTestRunResults(self, results): + """Add the set of test results from |results|. + + Args: + results: An instance of TestRunResults. + """ + assert isinstance(results, TestRunResults), ( + 'Expected TestRunResult object: %s' % type(results)) + with self._results_lock: + # pylint: disable=W0212 + self._results.update(results._results) + + def GetAll(self): + """Get the set of all test results.""" + with self._results_lock: + return self._results.copy() + + def _GetType(self, test_type): + """Get the set of test results with the given test type.""" + with self._results_lock: + return set(t for t in self._results if t.GetType() == test_type) + + def GetPass(self): + """Get the set of all passed test results.""" + return self._GetType(ResultType.PASS) + + def GetSkip(self): + """Get the set of all skipped test results.""" + return self._GetType(ResultType.SKIP) + + def GetFail(self): + """Get the set of all failed test results.""" + return self._GetType(ResultType.FAIL) + + def GetCrash(self): + """Get the set of all crashed test results.""" + return self._GetType(ResultType.CRASH) + + def GetTimeout(self): + """Get the set of all timed out test results.""" + return self._GetType(ResultType.TIMEOUT) + + def GetUnknown(self): + """Get the set of all unknown test results.""" + return self._GetType(ResultType.UNKNOWN) + + def GetNotPass(self): + """Get the set of all non-passed test results.""" + return self.GetAll() - self.GetPass() + + def DidRunPass(self): + """Return whether the test run was successful.""" + return not self.GetNotPass() - self.GetSkip() diff --git a/android/pylib/base/base_test_result_unittest.py b/android/pylib/base/base_test_result_unittest.py new file mode 100644 index 000000000000..955a59f3a9fe --- /dev/null +++ b/android/pylib/base/base_test_result_unittest.py @@ -0,0 +1,83 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unittests for TestRunResults.""" + + +import unittest + +from pylib.base.base_test_result import BaseTestResult +from pylib.base.base_test_result import TestRunResults +from pylib.base.base_test_result import ResultType + + +class TestTestRunResults(unittest.TestCase): + def setUp(self): + self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1') + other_p1 = BaseTestResult('p1', ResultType.PASS) + self.p2 = BaseTestResult('p2', ResultType.PASS) + self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1') + self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1') + self.u1 = BaseTestResult('u1', ResultType.UNKNOWN) + self.tr = TestRunResults() + self.tr.AddResult(self.p1) + self.tr.AddResult(other_p1) + self.tr.AddResult(self.p2) + self.tr.AddResults(set([self.f1, self.c1, self.u1])) + + def testGetAll(self): + self.assertFalse( + self.tr.GetAll().symmetric_difference( + [self.p1, self.p2, self.f1, self.c1, self.u1])) + + def testGetPass(self): + self.assertFalse(self.tr.GetPass().symmetric_difference( + [self.p1, self.p2])) + + def testGetNotPass(self): + self.assertFalse(self.tr.GetNotPass().symmetric_difference( + [self.f1, self.c1, self.u1])) + + def testGetAddTestRunResults(self): + tr2 = TestRunResults() + other_p1 = BaseTestResult('p1', ResultType.PASS) + f2 = BaseTestResult('f2', ResultType.FAIL) + tr2.AddResult(other_p1) + tr2.AddResult(f2) + tr2.AddTestRunResults(self.tr) + self.assertFalse( + tr2.GetAll().symmetric_difference( + [self.p1, self.p2, self.f1, self.c1, self.u1, f2])) + + def testGetLogs(self): + log_print = ('[FAIL] f1:\n' + 'failure1\n' + '[CRASH] c1:\n' + 'crash1') + self.assertEqual(self.tr.GetLogs(), log_print) + + def testGetShortForm(self): + short_print = ('ALL: 5 PASS: 2 FAIL: 1 ' + 'CRASH: 1 TIMEOUT: 0 UNKNOWN: 1 ') + self.assertEqual(self.tr.GetShortForm(), short_print) + + def testGetGtestForm(self): + gtest_print = ('[==========] 5 tests ran.\n' + '[ PASSED ] 2 tests.\n' + '[ FAILED ] 3 tests, listed below:\n' + '[ FAILED ] f1\n' + '[ FAILED ] c1 (CRASHED)\n' + '[ FAILED ] u1 (UNKNOWN)\n' + '\n' + '3 FAILED TESTS') + self.assertEqual(gtest_print, self.tr.GetGtestForm()) + + def testRunPassed(self): + self.assertFalse(self.tr.DidRunPass()) + tr2 = TestRunResults() + self.assertTrue(tr2.DidRunPass()) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/pylib/base/environment.py b/android/pylib/base/environment.py new file mode 100644 index 000000000000..0c4326a0b395 --- /dev/null +++ b/android/pylib/base/environment.py @@ -0,0 +1,52 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +# TODO(1262303): After Telemetry is supported by python3 we can remove +# object inheritance from this script. +# pylint: disable=useless-object-inheritance +class Environment(object): + """An environment in which tests can be run. + + This is expected to handle all logic that is applicable to an entire specific + environment but is independent of the test type. + + Examples include: + - The local device environment, for running tests on devices attached to + the local machine. + - The local machine environment, for running tests directly on the local + machine. + """ + + def __init__(self, output_manager): + """Environment constructor. + + Args: + output_manager: Instance of |output_manager.OutputManager| used to + save test output. + """ + self._output_manager = output_manager + + # Some subclasses have different teardown behavior on receiving SIGTERM. + self._received_sigterm = False + + def SetUp(self): + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError + + def __enter__(self): + self.SetUp() + return self + + def __exit__(self, _exc_type, _exc_val, _exc_tb): + self.TearDown() + + @property + def output_manager(self): + return self._output_manager + + def ReceivedSigterm(self): + self._received_sigterm = True diff --git a/android/pylib/base/environment_factory.py b/android/pylib/base/environment_factory.py new file mode 100644 index 000000000000..377e0f7081d2 --- /dev/null +++ b/android/pylib/base/environment_factory.py @@ -0,0 +1,35 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib import constants +from pylib.local.device import local_device_environment +from pylib.local.machine import local_machine_environment + +try: + # local_emulator_environment depends on //tools. + # If a client pulls in the //build subtree but not the //tools + # one, fail at emulator environment creation time. + from pylib.local.emulator import local_emulator_environment +except ImportError: + local_emulator_environment = None + + +def CreateEnvironment(args, output_manager, error_func): + + if args.environment == 'local': + if args.command not in constants.LOCAL_MACHINE_TESTS: + if args.avd_config: + if not local_emulator_environment: + error_func('emulator environment requested but not available.') + raise RuntimeError('error_func must call exit inside.') + return local_emulator_environment.LocalEmulatorEnvironment( + args, output_manager, error_func) + return local_device_environment.LocalDeviceEnvironment( + args, output_manager, error_func) + return local_machine_environment.LocalMachineEnvironment( + args, output_manager, error_func) + + error_func('Unable to create %s environment.' % args.environment) + raise RuntimeError('error_func must call exit inside.') diff --git a/android/pylib/base/mock_environment.py b/android/pylib/base/mock_environment.py new file mode 100644 index 000000000000..c537f05b3e3b --- /dev/null +++ b/android/pylib/base/mock_environment.py @@ -0,0 +1,11 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib.base import environment + +import mock # pylint: disable=import-error + + +MockEnvironment = mock.MagicMock(environment.Environment) diff --git a/android/pylib/base/mock_test_instance.py b/android/pylib/base/mock_test_instance.py new file mode 100644 index 000000000000..547a84b569f0 --- /dev/null +++ b/android/pylib/base/mock_test_instance.py @@ -0,0 +1,11 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib.base import test_instance + +import mock # pylint: disable=import-error + + +MockTestInstance = mock.MagicMock(test_instance.TestInstance) diff --git a/android/pylib/base/output_manager.py b/android/pylib/base/output_manager.py new file mode 100644 index 000000000000..f562be85f55c --- /dev/null +++ b/android/pylib/base/output_manager.py @@ -0,0 +1,179 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import contextlib +import logging +import os +import tempfile + +from devil.utils import reraiser_thread + + +class Datatype: + HTML = 'text/html' + JSON = 'application/json' + PNG = 'image/png' + TEXT = 'text/plain' + + +class OutputManager: + + def __init__(self): + """OutputManager Constructor. + + This class provides a simple interface to save test output. Subclasses + of this will allow users to save test results in the cloud or locally. + """ + self._allow_upload = False + self._thread_group = None + + @contextlib.contextmanager + def ArchivedTempfile( + self, out_filename, out_subdir, datatype=Datatype.TEXT): + """Archive file contents asynchonously and then deletes file. + + Args: + out_filename: Name for saved file. + out_subdir: Directory to save |out_filename| to. + datatype: Datatype of file. + + Returns: + An ArchivedFile file. This file will be uploaded async when the context + manager exits. AFTER the context manager exits, you can get the link to + where the file will be stored using the Link() API. You can use typical + file APIs to write and flish the ArchivedFile. You can also use file.name + to get the local filepath to where the underlying file exists. If you do + this, you are responsible of flushing the file before exiting the context + manager. + """ + if not self._allow_upload: + raise Exception('Must run |SetUp| before attempting to upload!') + + f = self.CreateArchivedFile(out_filename, out_subdir, datatype) + try: + yield f + finally: + self.ArchiveArchivedFile(f, delete=True) + + def CreateArchivedFile(self, out_filename, out_subdir, + datatype=Datatype.TEXT): + """Returns an instance of ArchivedFile.""" + return self._CreateArchivedFile(out_filename, out_subdir, datatype) + + def _CreateArchivedFile(self, out_filename, out_subdir, datatype): + raise NotImplementedError + + def ArchiveArchivedFile(self, archived_file, delete=False): + """Archive an ArchivedFile instance and optionally delete it.""" + if not isinstance(archived_file, ArchivedFile): + raise Exception('Excepting an instance of ArchivedFile, got %s.' % + type(archived_file)) + archived_file.PrepareArchive() + + def archive(): + try: + archived_file.Archive() + finally: + if delete: + archived_file.Delete() + + thread = reraiser_thread.ReraiserThread(func=archive) + thread.start() + self._thread_group.Add(thread) + + def SetUp(self): + self._allow_upload = True + self._thread_group = reraiser_thread.ReraiserThreadGroup() + + def TearDown(self): + self._allow_upload = False + logging.info('Finishing archiving output.') + self._thread_group.JoinAll() + + def __enter__(self): + self.SetUp() + return self + + def __exit__(self, _exc_type, _exc_val, _exc_tb): + self.TearDown() + + +class ArchivedFile: + + def __init__(self, out_filename, out_subdir, datatype): + self._out_filename = out_filename + self._out_subdir = out_subdir + self._datatype = datatype + + mode = 'w+' + if datatype == Datatype.PNG: + mode = 'w+b' + self._f = tempfile.NamedTemporaryFile(mode=mode, delete=False) + self._ready_to_archive = False + + @property + def name(self): + return self._f.name + + def fileno(self, *args, **kwargs): + if self._ready_to_archive: + raise Exception('Cannot retrieve the integer file descriptor ' + 'after archiving has begun!') + return self._f.fileno(*args, **kwargs) + + def write(self, *args, **kwargs): + if self._ready_to_archive: + raise Exception('Cannot write to file after archiving has begun!') + self._f.write(*args, **kwargs) + + def flush(self, *args, **kwargs): + if self._ready_to_archive: + raise Exception('Cannot flush file after archiving has begun!') + self._f.flush(*args, **kwargs) + + def Link(self): + """Returns location of archived file.""" + if not self._ready_to_archive: + raise Exception('Cannot get link to archived file before archiving ' + 'has begun') + return self._Link() + + def _Link(self): + """Note for when overriding this function. + + This function will certainly be called before the file + has finished being archived. Therefore, this needs to be able to know the + exact location of the archived file before it is finished being archived. + """ + raise NotImplementedError + + def PrepareArchive(self): + """Meant to be called synchronously to prepare file for async archiving.""" + self.flush() + self._ready_to_archive = True + self._PrepareArchive() + + def _PrepareArchive(self): + """Note for when overriding this function. + + This function is needed for things such as computing the location of + content addressed files. This is called after the file is written but + before archiving has begun. + """ + + def Archive(self): + """Archives file.""" + if not self._ready_to_archive: + raise Exception('File is not ready to archive. Be sure you are not ' + 'writing to the file and PrepareArchive has been called') + self._Archive() + + def _Archive(self): + raise NotImplementedError + + def Delete(self): + """Deletes the backing file.""" + self._f.close() + os.remove(self.name) diff --git a/android/pylib/base/output_manager_factory.py b/android/pylib/base/output_manager_factory.py new file mode 100644 index 000000000000..378a89a2e05d --- /dev/null +++ b/android/pylib/base/output_manager_factory.py @@ -0,0 +1,17 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib import constants +from pylib.output import local_output_manager +from pylib.output import remote_output_manager +from pylib.utils import local_utils + + +def CreateOutputManager(args): + if args.local_output or not local_utils.IsOnSwarming(): + return local_output_manager.LocalOutputManager( + output_dir=constants.GetOutDirectory()) + return remote_output_manager.RemoteOutputManager( + bucket=args.gs_results_bucket) diff --git a/android/pylib/base/output_manager_test_case.py b/android/pylib/base/output_manager_test_case.py new file mode 100644 index 000000000000..7349fd171517 --- /dev/null +++ b/android/pylib/base/output_manager_test_case.py @@ -0,0 +1,15 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import os.path +import unittest + + +class OutputManagerTestCase(unittest.TestCase): + + def assertUsableTempFile(self, archived_tempfile): + self.assertTrue(bool(archived_tempfile.name)) + self.assertTrue(os.path.exists(archived_tempfile.name)) + self.assertTrue(os.path.isfile(archived_tempfile.name)) diff --git a/android/pylib/base/test_collection.py b/android/pylib/base/test_collection.py new file mode 100644 index 000000000000..3b9fec047e53 --- /dev/null +++ b/android/pylib/base/test_collection.py @@ -0,0 +1,82 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import threading + + +class TestCollection: + """A threadsafe collection of tests. + + Args: + tests: List of tests to put in the collection. + """ + + def __init__(self, tests=None): + if not tests: + tests = [] + self._lock = threading.Lock() + self._tests = [] + self._tests_in_progress = 0 + # Used to signal that an item is available or all items have been handled. + self._item_available_or_all_done = threading.Event() + for t in tests: + self.add(t) + + def _pop(self): + """Pop a test from the collection. + + Waits until a test is available or all tests have been handled. + + Returns: + A test or None if all tests have been handled. + """ + while True: + # Wait for a test to be available or all tests to have been handled. + self._item_available_or_all_done.wait() + with self._lock: + # Check which of the two conditions triggered the signal. + if self._tests_in_progress == 0: + return None + try: + return self._tests.pop(0) + except IndexError: + # Another thread beat us to the available test, wait again. + self._item_available_or_all_done.clear() + + def add(self, test): + """Add a test to the collection. + + Args: + test: A test to add. + """ + with self._lock: + self._tests.append(test) + self._item_available_or_all_done.set() + self._tests_in_progress += 1 + + def test_completed(self): + """Indicate that a test has been fully handled.""" + with self._lock: + self._tests_in_progress -= 1 + if self._tests_in_progress == 0: + # All tests have been handled, signal all waiting threads. + self._item_available_or_all_done.set() + + def __iter__(self): + """Iterate through tests in the collection until all have been handled.""" + while True: + r = self._pop() + if r is None: + break + yield r + + def __len__(self): + """Return the number of tests currently in the collection.""" + return len(self._tests) + + def test_names(self): + """Return a list of the names of the tests currently in the collection.""" + with self._lock: + return list(t.test for t in self._tests) diff --git a/android/pylib/base/test_exception.py b/android/pylib/base/test_exception.py new file mode 100644 index 000000000000..6dd31cdf8bc8 --- /dev/null +++ b/android/pylib/base/test_exception.py @@ -0,0 +1,7 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class TestException(Exception): + """Base class for exceptions thrown by the test runner.""" diff --git a/android/pylib/base/test_instance.py b/android/pylib/base/test_instance.py new file mode 100644 index 000000000000..9a4e922ea66b --- /dev/null +++ b/android/pylib/base/test_instance.py @@ -0,0 +1,40 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class TestInstance: + """A type of test. + + This is expected to handle all logic that is test-type specific but + independent of the environment or device. + + Examples include: + - gtests + - instrumentation tests + """ + + def __init__(self): + pass + + def TestType(self): + raise NotImplementedError + + # pylint: disable=no-self-use + def GetPreferredAbis(self): + return None + + # pylint: enable=no-self-use + + def SetUp(self): + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError + + def __enter__(self): + self.SetUp() + return self + + def __exit__(self, _exc_type, _exc_val, _exc_tb): + self.TearDown() diff --git a/android/pylib/base/test_instance_factory.py b/android/pylib/base/test_instance_factory.py new file mode 100644 index 000000000000..3b129742271a --- /dev/null +++ b/android/pylib/base/test_instance_factory.py @@ -0,0 +1,27 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib.gtest import gtest_test_instance +from pylib.instrumentation import instrumentation_test_instance +from pylib.junit import junit_test_instance +from pylib.monkey import monkey_test_instance +from pylib.utils import device_dependencies + + +def CreateTestInstance(args, error_func): + + if args.command == 'gtest': + return gtest_test_instance.GtestTestInstance( + args, device_dependencies.GetDataDependencies, error_func) + if args.command == 'instrumentation': + return instrumentation_test_instance.InstrumentationTestInstance( + args, device_dependencies.GetDataDependencies, error_func) + if args.command == 'junit': + return junit_test_instance.JunitTestInstance(args, error_func) + if args.command == 'monkey': + return monkey_test_instance.MonkeyTestInstance(args, error_func) + + error_func('Unable to create %s test instance.' % args.command) + raise RuntimeError('error_func must call exit inside.') diff --git a/android/pylib/base/test_run.py b/android/pylib/base/test_run.py new file mode 100644 index 000000000000..36aca96c6d88 --- /dev/null +++ b/android/pylib/base/test_run.py @@ -0,0 +1,55 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class TestRun: + """An execution of a particular test on a particular device. + + This is expected to handle all logic that is specific to the combination of + environment and test type. + + Examples include: + - local gtests + - local instrumentation tests + """ + + def __init__(self, env, test_instance): + self._env = env + self._test_instance = test_instance + + # Some subclasses have different teardown behavior on receiving SIGTERM. + self._received_sigterm = False + + def TestPackage(self): + raise NotImplementedError + + def SetUp(self): + raise NotImplementedError + + def RunTests(self, results, raw_logs_fh=None): + """Runs Tests and populates |results|. + + Args: + results: An array that should be populated with + |base_test_result.TestRunResults| objects. + raw_logs_fh: An optional file handle to write raw logs to. + """ + raise NotImplementedError + + def GetTestsForListing(self): + """Returns a list of test names.""" + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError + + def __enter__(self): + self.SetUp() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.TearDown() + + def ReceivedSigterm(self): + self._received_sigterm = True diff --git a/android/pylib/base/test_run_factory.py b/android/pylib/base/test_run_factory.py new file mode 100644 index 000000000000..5806a4fe934c --- /dev/null +++ b/android/pylib/base/test_run_factory.py @@ -0,0 +1,37 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib.gtest import gtest_test_instance +from pylib.instrumentation import instrumentation_test_instance +from pylib.junit import junit_test_instance +from pylib.monkey import monkey_test_instance +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_gtest_run +from pylib.local.device import local_device_instrumentation_test_run +from pylib.local.device import local_device_monkey_test_run +from pylib.local.machine import local_machine_environment +from pylib.local.machine import local_machine_junit_test_run + + +def CreateTestRun(env, test_instance, error_func): + if isinstance(env, local_device_environment.LocalDeviceEnvironment): + if isinstance(test_instance, gtest_test_instance.GtestTestInstance): + return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance) + if isinstance(test_instance, + instrumentation_test_instance.InstrumentationTestInstance): + return (local_device_instrumentation_test_run + .LocalDeviceInstrumentationTestRun(env, test_instance)) + if isinstance(test_instance, monkey_test_instance.MonkeyTestInstance): + return (local_device_monkey_test_run + .LocalDeviceMonkeyTestRun(env, test_instance)) + + if isinstance(env, local_machine_environment.LocalMachineEnvironment): + if isinstance(test_instance, junit_test_instance.JunitTestInstance): + return (local_machine_junit_test_run + .LocalMachineJunitTestRun(env, test_instance)) + + error_func('Unable to create test run for %s tests in %s environment' + % (str(test_instance), str(env))) + raise RuntimeError('error_func must call exit inside.') diff --git a/android/pylib/base/test_server.py b/android/pylib/base/test_server.py new file mode 100644 index 000000000000..d1fda4b7fb4b --- /dev/null +++ b/android/pylib/base/test_server.py @@ -0,0 +1,19 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class TestServer: + """Base class for any server that needs to be set up for the tests.""" + + def __init__(self, *args, **kwargs): + pass + + def SetUp(self): + raise NotImplementedError + + def Reset(self): + raise NotImplementedError + + def TearDown(self): + raise NotImplementedError diff --git a/android/pylib/constants/__init__.py b/android/pylib/constants/__init__.py new file mode 100644 index 000000000000..cf57d9fe91bb --- /dev/null +++ b/android/pylib/constants/__init__.py @@ -0,0 +1,287 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Defines a set of constants shared by test runners and other scripts.""" + +# TODO(jbudorick): Split these constants into coherent modules. + +# pylint: disable=W0212 + + +import glob +import logging +import os + +import devil.android.sdk.keyevent +from devil.android.constants import chrome +from devil.android.sdk import version_codes +from devil.constants import exit_codes + + +keyevent = devil.android.sdk.keyevent + + +DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT', + os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir, os.pardir))) +JAVA_HOME = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current') + +PACKAGE_INFO = dict(chrome.PACKAGE_INFO) +PACKAGE_INFO.update({ + 'legacy_browser': + chrome.PackageInfo('com.google.android.browser', + 'com.android.browser.BrowserActivity', None, None), + 'chromecast_shell': + chrome.PackageInfo('com.google.android.apps.mediashell', + 'com.google.android.apps.mediashell.MediaShellActivity', + 'castshell-command-line', None), + 'android_webview_shell': + chrome.PackageInfo('org.chromium.android_webview.shell', + 'org.chromium.android_webview.shell.AwShellActivity', + 'android-webview-command-line', None), + 'gtest': + chrome.PackageInfo('org.chromium.native_test', + 'org.chromium.native_test.NativeUnitTestActivity', + 'chrome-native-tests-command-line', None), + 'android_browsertests': + chrome.PackageInfo('org.chromium.android_browsertests_apk', + ('org.chromium.android_browsertests_apk' + + '.ChromeBrowserTestsActivity'), + 'chrome-native-tests-command-line', None), + 'components_browsertests': + chrome.PackageInfo('org.chromium.components_browsertests_apk', + ('org.chromium.components_browsertests_apk' + + '.ComponentsBrowserTestsActivity'), + 'chrome-native-tests-command-line', None), + 'content_browsertests': + chrome.PackageInfo( + 'org.chromium.content_browsertests_apk', + 'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity', + 'chrome-native-tests-command-line', None), + 'chromedriver_webview_shell': + chrome.PackageInfo('org.chromium.chromedriver_webview_shell', + 'org.chromium.chromedriver_webview_shell.Main', None, + None), + 'android_webview_cts': + chrome.PackageInfo('com.android.webview', + 'com.android.cts.webkit.WebViewStartupCtsActivity', + 'webview-command-line', None), + 'android_google_webview_cts': + chrome.PackageInfo('com.google.android.webview', + 'com.android.cts.webkit.WebViewStartupCtsActivity', + 'webview-command-line', None), + 'android_google_webview_cts_debug': + chrome.PackageInfo('com.google.android.webview.debug', + 'com.android.cts.webkit.WebViewStartupCtsActivity', + 'webview-command-line', None), + 'android_webview_ui_test': + chrome.PackageInfo('org.chromium.webview_ui_test', + 'org.chromium.webview_ui_test.WebViewUiTestActivity', + 'webview-command-line', None), + 'weblayer_browsertests': + chrome.PackageInfo( + 'org.chromium.weblayer_browsertests_apk', + 'org.chromium.weblayer_browsertests_apk.WebLayerBrowserTestsActivity', + 'chrome-native-tests-command-line', None), +}) + + +# Ports arrangement for various test servers used in Chrome for Android. +# Lighttpd server will attempt to use 9000 as default port, if unavailable it +# will find a free port from 8001 - 8999. +LIGHTTPD_DEFAULT_PORT = 9000 +LIGHTTPD_RANDOM_PORT_FIRST = 8001 +LIGHTTPD_RANDOM_PORT_LAST = 8999 +TEST_SYNC_SERVER_PORT = 9031 +TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041 +TEST_POLICY_SERVER_PORT = 9051 + + +TEST_EXECUTABLE_DIR = '/data/local/tmp' +# Directories for common java libraries for SDK build. +# These constants are defined in build/android/ant/common.xml +SDK_BUILD_JAVALIB_DIR = 'lib.java' +SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java' +SDK_BUILD_APKS_DIR = 'apks' + +ADB_KEYS_FILE = '/data/misc/adb/adb_keys' + +PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results') +# The directory on the device where perf test output gets saved to. +DEVICE_PERF_OUTPUT_DIR = ( + '/data/data/' + PACKAGE_INFO['chrome'].package + '/files') + +SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots') + +ANDROID_SDK_BUILD_TOOLS_VERSION = '33.0.0' +ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'android_sdk', + 'public') +ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT, + 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION) +ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT, + 'third_party', 'android_ndk') + +BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT, + os.environ.get('CHROMIUM_OUT_DIR', 'out'), + 'bad_devices.json') + +UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com' + +# TODO(jbudorick): Remove once unused. +DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop' + +# Configure ubsan to print stack traces in the format understood by "stack" so +# that they will be symbolized, and disable signal handlers because they +# interfere with the breakpad and sandbox tests. +# This value is duplicated in +# base/android/java/src/org/chromium/base/library_loader/LibraryLoader.java +UBSAN_OPTIONS = ( + 'print_stacktrace=1 stack_trace_format=\'#%n pc %o %m\' ' + 'handle_segv=0 handle_sigbus=0 handle_sigfpe=0') + +# TODO(jbudorick): Rework this into testing/buildbot/ +PYTHON_UNIT_TEST_SUITES = { + 'pylib_py_unittests': { + 'path': + os.path.join(DIR_SOURCE_ROOT, 'build', 'android'), + 'test_modules': [ + 'devil.android.device_utils_test', + 'devil.android.md5sum_test', + 'devil.utils.cmd_helper_test', + 'pylib.results.json_results_test', + ] + }, + 'gyp_py_unittests': { + 'path': + os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'), + 'test_modules': [ + 'create_unwind_table_tests', + 'java_cpp_enum_tests', + 'java_cpp_strings_tests', + 'java_google_api_keys_tests', + 'extract_unwind_tables_tests', + ] + }, +} + +LOCAL_MACHINE_TESTS = ['junit', 'python'] +VALID_ENVIRONMENTS = ['local'] +VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey', + 'perf', 'python'] +VALID_DEVICE_TYPES = ['Android', 'iOS'] + + +def SetBuildType(build_type): + """Set the BUILDTYPE environment variable. + + NOTE: Using this function is deprecated, in favor of SetOutputDirectory(), + it is still maintained for a few scripts that typically call it + to implement their --release and --debug command-line options. + + When writing a new script, consider supporting an --output-dir or + --chromium-output-dir option instead, and calling SetOutputDirectory() + instead. + + NOTE: If CHROMIUM_OUTPUT_DIR if defined, or if SetOutputDirectory() was + called previously, this will be completely ignored. + """ + chromium_output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR') + if chromium_output_dir: + logging.warning( + 'SetBuildType("%s") ignored since CHROMIUM_OUTPUT_DIR is already ' + 'defined as (%s)', build_type, chromium_output_dir) + os.environ['BUILDTYPE'] = build_type + + +def SetOutputDirectory(output_directory): + """Set the Chromium output directory. + + This must be called early by scripts that rely on GetOutDirectory() or + CheckOutputDirectory(). Typically by providing an --output-dir or + --chromium-output-dir option. + """ + os.environ['CHROMIUM_OUTPUT_DIR'] = os.path.abspath(output_directory) + + +# The message that is printed when the Chromium output directory cannot +# be found. Note that CHROMIUM_OUT_DIR and BUILDTYPE are not mentioned +# intentionally to encourage the use of CHROMIUM_OUTPUT_DIR instead. +_MISSING_OUTPUT_DIR_MESSAGE = '\ +The Chromium output directory could not be found. Please use an option such as \ +--output-directory to provide it (see --help for details). Otherwise, \ +define the CHROMIUM_OUTPUT_DIR environment variable.' + + +def GetOutDirectory(): + """Returns the Chromium build output directory. + + NOTE: This is determined in the following way: + - From a previous call to SetOutputDirectory() + - Otherwise, from the CHROMIUM_OUTPUT_DIR env variable, if it is defined. + - Otherwise, from the current Chromium source directory, and a previous + call to SetBuildType() or the BUILDTYPE env variable, in combination + with the optional CHROMIUM_OUT_DIR env variable. + """ + if 'CHROMIUM_OUTPUT_DIR' in os.environ: + return os.path.abspath(os.path.join( + DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR'))) + + build_type = os.environ.get('BUILDTYPE') + if not build_type: + raise EnvironmentError(_MISSING_OUTPUT_DIR_MESSAGE) + + return os.path.abspath(os.path.join( + DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'), + build_type)) + + +def CheckOutputDirectory(): + """Checks that the Chromium output directory is set, or can be found. + + If it is not already set, this will also perform a little auto-detection: + + - If the current directory contains a build.ninja file, use it as + the output directory. + + - If CHROME_HEADLESS is defined in the environment (e.g. on a bot), + look if there is a single output directory under DIR_SOURCE_ROOT/out/, + and if so, use it as the output directory. + + Raises: + Exception: If no output directory is detected. + """ + output_dir = os.environ.get('CHROMIUM_OUTPUT_DIR') + if output_dir: + return + + build_type = os.environ.get('BUILDTYPE') + if build_type and len(build_type) > 1: + return + + # If CWD is an output directory, then assume it's the desired one. + if os.path.exists('build.ninja'): + output_dir = os.getcwd() + SetOutputDirectory(output_dir) + return + + # When running on bots, see if the output directory is obvious. + # TODO(http://crbug.com/833808): Get rid of this by ensuring bots always set + # CHROMIUM_OUTPUT_DIR correctly. + if os.environ.get('CHROME_HEADLESS'): + dirs = glob.glob(os.path.join(DIR_SOURCE_ROOT, 'out', '*', 'build.ninja')) + if len(dirs) == 1: + SetOutputDirectory(dirs[0]) + return + + raise Exception( + 'Chromium output directory not set, and CHROME_HEADLESS detected. ' + + 'However, multiple out dirs exist: %r' % dirs) + + raise Exception(_MISSING_OUTPUT_DIR_MESSAGE) + + +# Exit codes +ERROR_EXIT_CODE = exit_codes.ERROR +INFRA_EXIT_CODE = exit_codes.INFRA +WARNING_EXIT_CODE = exit_codes.WARNING diff --git a/android/pylib/constants/host_paths.py b/android/pylib/constants/host_paths.py new file mode 100644 index 000000000000..4b712649e763 --- /dev/null +++ b/android/pylib/constants/host_paths.py @@ -0,0 +1,97 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import contextlib +import os +import sys + +from pylib import constants + +DIR_SOURCE_ROOT = os.environ.get( + 'CHECKOUT_SOURCE_ROOT', + os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir, os.pardir))) + +BUILD_COMMON_PATH = os.path.join( + DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common') + +# third-party libraries +ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH = os.path.join( + DIR_SOURCE_ROOT, 'third_party', 'android_platform', 'development', + 'scripts') +BUILD_PATH = os.path.join(DIR_SOURCE_ROOT, 'build') +DEVIL_PATH = os.path.join( + DIR_SOURCE_ROOT, 'third_party', 'catapult', 'devil') +JAVA_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current', + 'bin') +TRACING_PATH = os.path.join( + DIR_SOURCE_ROOT, 'third_party', 'catapult', 'tracing') + +@contextlib.contextmanager +def SysPath(path, position=None): + if position is None: + sys.path.append(path) + else: + sys.path.insert(position, path) + try: + yield + finally: + if sys.path[-1] == path: + sys.path.pop() + else: + sys.path.remove(path) + + +# Map of CPU architecture name to (toolchain_name, binprefix) pairs. +# TODO(digit): Use the build_vars.json file generated by gn. +_TOOL_ARCH_MAP = { + 'arm': ('arm-linux-androideabi-4.9', 'arm-linux-androideabi'), + 'arm64': ('aarch64-linux-android-4.9', 'aarch64-linux-android'), + 'x86': ('x86-4.9', 'i686-linux-android'), + 'x86_64': ('x86_64-4.9', 'x86_64-linux-android'), + 'x64': ('x86_64-4.9', 'x86_64-linux-android'), + 'mips': ('mipsel-linux-android-4.9', 'mipsel-linux-android'), +} + +# Cache used to speed up the results of ToolPath() +# Maps (arch, tool_name) pairs to fully qualified program paths. +# Useful because ToolPath() is called repeatedly for demangling C++ symbols. +_cached_tool_paths = {} + + +def ToolPath(tool, cpu_arch): + """Return a fully qualifed path to an arch-specific toolchain program. + + Args: + tool: Unprefixed toolchain program name (e.g. 'objdump') + cpu_arch: Target CPU architecture (e.g. 'arm64') + Returns: + Fully qualified path (e.g. ..../aarch64-linux-android-objdump') + Raises: + Exception if the toolchain could not be found. + """ + tool_path = _cached_tool_paths.get((tool, cpu_arch)) + if tool_path: + return tool_path + + toolchain_source, toolchain_prefix = _TOOL_ARCH_MAP.get( + cpu_arch, (None, None)) + if not toolchain_source: + raise Exception('Could not find tool chain for ' + cpu_arch) + + toolchain_subdir = ( + 'toolchains/%s/prebuilt/linux-x86_64/bin' % toolchain_source) + + tool_path = os.path.join(constants.ANDROID_NDK_ROOT, + toolchain_subdir, + toolchain_prefix + '-' + tool) + + _cached_tool_paths[(tool, cpu_arch)] = tool_path + return tool_path + + +def GetAaptPath(): + """Returns the path to the 'aapt' executable.""" + return os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt') diff --git a/android/pylib/constants/host_paths_unittest.py b/android/pylib/constants/host_paths_unittest.py new file mode 100755 index 000000000000..3ce406f758dd --- /dev/null +++ b/android/pylib/constants/host_paths_unittest.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import logging +import os +import unittest + +import six +import pylib.constants as constants +import pylib.constants.host_paths as host_paths + +# This map corresponds to the binprefix of NDK prebuilt toolchains for various +# target CPU architectures. Note that 'x86_64' and 'x64' are the same. +_EXPECTED_NDK_TOOL_SUBDIR_MAP = { + 'arm': 'toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/' + + 'arm-linux-androideabi-', + 'arm64': + 'toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/' + + 'aarch64-linux-android-', + 'x86': 'toolchains/x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-', + 'x86_64': + 'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-', + 'x64': + 'toolchains/x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-', + 'mips': + 'toolchains/mipsel-linux-android-4.9/prebuilt/linux-x86_64/bin/' + + 'mipsel-linux-android-' +} + + +class HostPathsTest(unittest.TestCase): + def setUp(self): + logging.getLogger().setLevel(logging.ERROR) + + def test_GetAaptPath(self): + _EXPECTED_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt') + self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH) + self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH) + + def test_ToolPath(self): + for cpu_arch, binprefix in six.iteritems(_EXPECTED_NDK_TOOL_SUBDIR_MAP): + expected_binprefix = os.path.join(constants.ANDROID_NDK_ROOT, binprefix) + expected_path = expected_binprefix + 'foo' + self.assertEqual(host_paths.ToolPath('foo', cpu_arch), expected_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/pylib/content_settings.py b/android/pylib/content_settings.py new file mode 100644 index 000000000000..ddd663f06b4e --- /dev/null +++ b/android/pylib/content_settings.py @@ -0,0 +1,80 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +class ContentSettings(dict): + + """A dict interface to interact with device content settings. + + System properties are key/value pairs as exposed by adb shell content. + """ + + def __init__(self, table, device): + super().__init__() + self._table = table + self._device = device + + @staticmethod + def _GetTypeBinding(value): + if isinstance(value, bool): + return 'b' + if isinstance(value, float): + return 'f' + if isinstance(value, int): + return 'i' + if isinstance(value, int): + return 'l' + if isinstance(value, str): + return 's' + raise ValueError('Unsupported type %s' % type(value)) + + def iteritems(self): + # Example row: + # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05' + for row in self._device.RunShellCommand( + 'content query --uri content://%s' % self._table, as_root=True): + fields = row.split(', ') + key = None + value = None + for field in fields: + k, _, v = field.partition('=') + if k == 'name': + key = v + elif k == 'value': + value = v + if not key: + continue + if not value: + value = '' + yield key, value + + def __getitem__(self, key): + return self._device.RunShellCommand( + 'content query --uri content://%s --where "name=\'%s\'" ' + '--projection value' % (self._table, key), as_root=True).strip() + + def __setitem__(self, key, value): + if key in self: + self._device.RunShellCommand( + 'content update --uri content://%s ' + '--bind value:%s:%s --where "name=\'%s\'"' % ( + self._table, + self._GetTypeBinding(value), value, key), + as_root=True) + else: + self._device.RunShellCommand( + 'content insert --uri content://%s ' + '--bind name:%s:%s --bind value:%s:%s' % ( + self._table, + self._GetTypeBinding(key), key, + self._GetTypeBinding(value), value), + as_root=True) + + def __delitem__(self, key): + self._device.RunShellCommand( + 'content delete --uri content://%s ' + '--bind name:%s:%s' % ( + self._table, + self._GetTypeBinding(key), key), + as_root=True) diff --git a/android/pylib/device/__init__.py b/android/pylib/device/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/android/pylib/device/commands/BUILD.gn b/android/pylib/device/commands/BUILD.gn new file mode 100644 index 000000000000..2f0273487ec1 --- /dev/null +++ b/android/pylib/device/commands/BUILD.gn @@ -0,0 +1,20 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +group("commands") { + data_deps = [ ":chromium_commands_java" ] +} + +android_library("unzip_java") { + jacoco_never_instrument = true + sources = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ] +} + +dist_dex("chromium_commands_java") { + deps = [ ":unzip_java" ] + output = "$root_build_dir/lib.java/chromium_commands.dex.jar" + data = [ output ] +} diff --git a/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java new file mode 100644 index 000000000000..b322e32c3863 --- /dev/null +++ b/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java @@ -0,0 +1,93 @@ +// Copyright 2014 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.android.commands.unzip; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +/** + * Minimal implementation of the command-line unzip utility for Android. + */ +public class Unzip { + + private static final String TAG = "Unzip"; + + public static void main(String[] args) { + try { + (new Unzip()).run(args); + } catch (RuntimeException e) { + e.printStackTrace(); + System.exit(1); + } + } + + private void showUsage(PrintStream s) { + s.println("Usage:"); + s.println("unzip [zipfile]"); + } + + @SuppressWarnings("Finally") + private void unzip(String[] args) { + ZipInputStream zis = null; + try { + String zipfile = args[0]; + zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile))); + ZipEntry ze = null; + + byte[] bytes = new byte[1024]; + while ((ze = zis.getNextEntry()) != null) { + File outputFile = new File(ze.getName()); + if (ze.isDirectory()) { + if (!outputFile.exists() && !outputFile.mkdirs()) { + throw new RuntimeException( + "Failed to create directory: " + outputFile.toString()); + } + } else { + File parentDir = outputFile.getParentFile(); + if (!parentDir.exists() && !parentDir.mkdirs()) { + throw new RuntimeException( + "Failed to create directory: " + parentDir.toString()); + } + OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile)); + int actual_bytes = 0; + int total_bytes = 0; + while ((actual_bytes = zis.read(bytes)) != -1) { + out.write(bytes, 0, actual_bytes); + total_bytes += actual_bytes; + } + out.close(); + } + zis.closeEntry(); + } + + } catch (IOException e) { + throw new RuntimeException("Error while unzipping", e); + } finally { + try { + if (zis != null) zis.close(); + } catch (IOException e) { + throw new RuntimeException("Error while closing zip: " + e.toString()); + } + } + } + + public void run(String[] args) { + if (args.length != 1) { + showUsage(System.err); + throw new RuntimeException("Incorrect usage!"); + } + + unzip(args); + } +} + diff --git a/android/pylib/device_settings.py b/android/pylib/device_settings.py new file mode 100644 index 000000000000..2e1abe8b8541 --- /dev/null +++ b/android/pylib/device_settings.py @@ -0,0 +1,201 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import logging +import six + +from pylib import content_settings + +_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db' +_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = ( + '/data/data/com.android.providers.settings/databases/settings.db') +PASSWORD_QUALITY_UNSPECIFIED = '0' +_COMPATIBLE_BUILD_TYPES = ['userdebug', 'eng'] + + +def ConfigureContentSettings(device, desired_settings): + """Configures device content setings from a list. + + Many settings are documented at: + http://developer.android.com/reference/android/provider/Settings.Global.html + http://developer.android.com/reference/android/provider/Settings.Secure.html + http://developer.android.com/reference/android/provider/Settings.System.html + + Many others are undocumented. + + Args: + device: A DeviceUtils instance for the device to configure. + desired_settings: A list of (table, [(key: value), ...]) for all + settings to configure. + """ + for table, key_value in desired_settings: + settings = content_settings.ContentSettings(table, device) + for key, value in key_value: + settings[key] = value + logging.info('\n%s %s', table, (80 - len(table)) * '-') + for key, value in sorted(six.iteritems(settings)): + logging.info('\t%s: %s', key, value) + + +def SetLockScreenSettings(device): + """Sets lock screen settings on the device. + + On certain device/Android configurations we need to disable the lock screen in + a different database. Additionally, the password type must be set to + DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED. + Lock screen settings are stored in sqlite on the device in: + /data/system/locksettings.db + + IMPORTANT: The first column is used as a primary key so that all rows with the + same value for that column are removed from the table prior to inserting the + new values. + + Args: + device: A DeviceUtils instance for the device to configure. + + Raises: + Exception if the setting was not properly set. + """ + if device.build_type not in _COMPATIBLE_BUILD_TYPES: + logging.warning('Unable to disable lockscreen on %s builds.', + device.build_type) + return + + def get_lock_settings(table): + return [(table, 'lockscreen.disabled', '1'), + (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED), + (table, 'lockscreen.password_type_alternate', + PASSWORD_QUALITY_UNSPECIFIED)] + + if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH): + db = _LOCK_SCREEN_SETTINGS_PATH + locksettings = get_lock_settings('locksettings') + columns = ['name', 'user', 'value'] + generate_values = lambda k, v: [k, '0', v] + elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH): + db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH + locksettings = get_lock_settings('secure') + get_lock_settings('system') + columns = ['name', 'value'] + generate_values = lambda k, v: [k, v] + else: + logging.warning('Unable to find database file to set lock screen settings.') + return + + for table, key, value in locksettings: + # Set the lockscreen setting for default user '0' + values = generate_values(key, value) + + cmd = """begin transaction; +delete from '%(table)s' where %(primary_key)s='%(primary_value)s'; +insert into '%(table)s' (%(columns)s) values (%(values)s); +commit transaction;""" % { + 'table': table, + 'primary_key': columns[0], + 'primary_value': values[0], + 'columns': ', '.join(columns), + 'values': ', '.join(["'%s'" % value for value in values]) + } + output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd), + as_root=True) + if output_msg: + logging.info(' '.join(output_msg)) + + +ENABLE_LOCATION_SETTINGS = [ + # Note that setting these in this order is required in order for all of + # them to take and stick through a reboot. + ('com.google.settings/partner', [ + ('use_location_for_services', 1), + ]), + ('settings/secure', [ + # Ensure Geolocation is enabled and allowed for tests. + ('location_providers_allowed', 'gps,network'), + ]), + ('com.google.settings/partner', [ + ('network_location_opt_in', 1), + ]) +] + +DISABLE_LOCATION_SETTINGS = [ + ('com.google.settings/partner', [ + ('use_location_for_services', 0), + ]), + ('settings/secure', [ + # Ensure Geolocation is disabled. + ('location_providers_allowed', ''), + ]), +] + +ENABLE_MOCK_LOCATION_SETTINGS = [ + ('settings/secure', [ + ('mock_location', 1), + ]), +] + +DISABLE_MOCK_LOCATION_SETTINGS = [ + ('settings/secure', [ + ('mock_location', 0), + ]), +] + +DETERMINISTIC_DEVICE_SETTINGS = [ + ('settings/global', [ + ('assisted_gps_enabled', 0), + + # Disable "auto time" and "auto time zone" to avoid network-provided time + # to overwrite the device's datetime and timezone synchronized from host + # when running tests later. See b/6569849. + ('auto_time', 0), + ('auto_time_zone', 0), + + ('development_settings_enabled', 1), + + # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents + # on application crashes and ANRs. If this is disabled, the crash/ANR dialog + # will never display the "Report" button. + # Type: int ( 0 = disallow, 1 = allow ) + ('send_action_app_error', 0), + + ('stay_on_while_plugged_in', 3), + + ('verifier_verify_adb_installs', 0), + ]), + ('settings/secure', [ + ('allowed_geolocation_origins', + 'http://www.google.co.uk http://www.google.com'), + + # Ensure that we never get random dialogs like "Unfortunately the process + # android.process.acore has stopped", which steal the focus, and make our + # automation fail (because the dialog steals the focus then mistakenly + # receives the injected user input events). + ('anr_show_background', 0), + + ('lockscreen.disabled', 1), + + ('screensaver_enabled', 0), + + ('skip_first_use_hints', 1), + ]), + ('settings/system', [ + # Don't want devices to accidentally rotate the screen as that could + # affect performance measurements. + ('accelerometer_rotation', 0), + + ('lockscreen.disabled', 1), + + # Turn down brightness and disable auto-adjust so that devices run cooler. + ('screen_brightness', 5), + ('screen_brightness_mode', 0), + + ('user_rotation', 0), + ]), +] + +NETWORK_DISABLED_SETTINGS = [ + ('settings/global', [ + ('airplane_mode_on', 1), + ('wifi_on', 0), + ]), +] diff --git a/android/pylib/dex/__init__.py b/android/pylib/dex/__init__.py new file mode 100644 index 000000000000..401c54b0d9c6 --- /dev/null +++ b/android/pylib/dex/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/dex/dex_parser.py b/android/pylib/dex/dex_parser.py new file mode 100755 index 000000000000..90029177e55d --- /dev/null +++ b/android/pylib/dex/dex_parser.py @@ -0,0 +1,532 @@ +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Utilities for optimistically parsing dex files. + +This file is not meant to provide a generic tool for analyzing dex files. +A DexFile class that exposes access to several memory items in the dex format +is provided, but it does not include error handling or validation. +""" + +import argparse +import collections +import errno +import os +import re +import struct +import sys +import zipfile + +# https://source.android.com/devices/tech/dalvik/dex-format#header-item +_DEX_HEADER_FMT = ( + ('magic', '8s'), + ('checksum', 'I'), + ('signature', '20s'), + ('file_size', 'I'), + ('header_size', 'I'), + ('endian_tag', 'I'), + ('link_size', 'I'), + ('link_off', 'I'), + ('map_off', 'I'), + ('string_ids_size', 'I'), + ('string_ids_off', 'I'), + ('type_ids_size', 'I'), + ('type_ids_off', 'I'), + ('proto_ids_size', 'I'), + ('proto_ids_off', 'I'), + ('field_ids_size', 'I'), + ('field_ids_off', 'I'), + ('method_ids_size', 'I'), + ('method_ids_off', 'I'), + ('class_defs_size', 'I'), + ('class_defs_off', 'I'), + ('data_size', 'I'), + ('data_off', 'I'), +) + +DexHeader = collections.namedtuple('DexHeader', + ','.join(t[0] for t in _DEX_HEADER_FMT)) + +# Simple memory items. +_TypeIdItem = collections.namedtuple('TypeIdItem', 'descriptor_idx') +_ProtoIdItem = collections.namedtuple( + 'ProtoIdItem', 'shorty_idx,return_type_idx,parameters_off') +_MethodIdItem = collections.namedtuple('MethodIdItem', + 'type_idx,proto_idx,name_idx') +_TypeItem = collections.namedtuple('TypeItem', 'type_idx') +_StringDataItem = collections.namedtuple('StringItem', 'utf16_size,data') +_ClassDefItem = collections.namedtuple( + 'ClassDefItem', + 'class_idx,access_flags,superclass_idx,interfaces_off,source_file_idx,' + 'annotations_off,class_data_off,static_values_off') + + +class _MemoryItemList: + """Base class for repeated memory items.""" + + def __init__(self, + reader, + offset, + size, + factory, + alignment=None, + first_item_offset=None): + """Creates the item list using the specific item factory. + + Args: + reader: _DexReader used for decoding the memory item. + offset: Offset from start of the file to the item list, serving as the + key for some item types. + size: Number of memory items in the list. + factory: Function to extract each memory item from a _DexReader. + alignment: Optional integer specifying the alignment for the memory + section represented by this list. + first_item_offset: Optional, specifies a different offset to use for + extracting memory items (default is to use offset). + """ + self.offset = offset + self.size = size + reader.Seek(first_item_offset or offset) + self._items = [factory(reader) for _ in range(size)] + + if alignment: + reader.AlignUpTo(alignment) + + def __iter__(self): + return iter(self._items) + + def __getitem__(self, key): + return self._items[key] + + def __len__(self): + return len(self._items) + + def __repr__(self): + item_type_part = '' + if self.size != 0: + item_type = type(self._items[0]) + item_type_part = ', item type={}'.format(item_type.__name__) + + return '{}(offset={:#x}, size={}{})'.format( + type(self).__name__, self.offset, self.size, item_type_part) + + +class _TypeIdItemList(_MemoryItemList): + def __init__(self, reader, offset, size): + factory = lambda x: _TypeIdItem(x.ReadUInt()) + super().__init__(reader, offset, size, factory) + + +class _ProtoIdItemList(_MemoryItemList): + def __init__(self, reader, offset, size): + factory = lambda x: _ProtoIdItem(x.ReadUInt(), x.ReadUInt(), x.ReadUInt()) + super().__init__(reader, offset, size, factory) + + +class _MethodIdItemList(_MemoryItemList): + def __init__(self, reader, offset, size): + factory = ( + lambda x: _MethodIdItem(x.ReadUShort(), x.ReadUShort(), x.ReadUInt())) + super().__init__(reader, offset, size, factory) + + +class _StringItemList(_MemoryItemList): + def __init__(self, reader, offset, size): + reader.Seek(offset) + string_item_offsets = iter([reader.ReadUInt() for _ in range(size)]) + + def factory(x): + data_offset = next(string_item_offsets) + string = x.ReadString(data_offset) + return _StringDataItem(len(string), string) + + super().__init__(reader, offset, size, factory) + + +class _TypeListItem(_MemoryItemList): + def __init__(self, reader): + offset = reader.Tell() + size = reader.ReadUInt() + factory = lambda x: _TypeItem(x.ReadUShort()) + # This is necessary because we need to extract the size of the type list + # (in other cases the list size is provided in the header). + first_item_offset = reader.Tell() + super().__init__(reader, + offset, + size, + factory, + alignment=4, + first_item_offset=first_item_offset) + + +class _TypeListItemList(_MemoryItemList): + def __init__(self, reader, offset, size): + super().__init__(reader, offset, size, _TypeListItem) + + +class _ClassDefItemList(_MemoryItemList): + def __init__(self, reader, offset, size): + reader.Seek(offset) + + def factory(x): + return _ClassDefItem(*(x.ReadUInt() + for _ in range(len(_ClassDefItem._fields)))) + + super().__init__(reader, offset, size, factory) + + +class _DexMapItem: + def __init__(self, reader): + self.type = reader.ReadUShort() + reader.ReadUShort() + self.size = reader.ReadUInt() + self.offset = reader.ReadUInt() + + def __repr__(self): + return '_DexMapItem(type={}, size={}, offset={:#x})'.format( + self.type, self.size, self.offset) + + +class _DexMapList: + # Full list of type codes: + # https://source.android.com/devices/tech/dalvik/dex-format#type-codes + TYPE_TYPE_LIST = 0x1001 + + def __init__(self, reader, offset): + self._map = {} + reader.Seek(offset) + self._size = reader.ReadUInt() + for _ in range(self._size): + item = _DexMapItem(reader) + self._map[item.type] = item + + def __getitem__(self, key): + return self._map[key] + + def __contains__(self, key): + return key in self._map + + def __repr__(self): + return '_DexMapList(size={}, items={})'.format(self._size, self._map) + + +class _DexReader: + def __init__(self, data): + self._data = data + self._pos = 0 + + def Seek(self, offset): + self._pos = offset + + def Tell(self): + return self._pos + + def ReadUByte(self): + return self._ReadData(' 1: + raise ValueError('Platform mode currently supports only 1 gtest suite') + self._coverage_dir = args.coverage_dir + self._exe_dist_dir = None + self._external_shard_index = args.test_launcher_shard_index + self._extract_test_list_from_filter = args.extract_test_list_from_filter + self._filter_tests_lock = threading.Lock() + self._gs_test_artifacts_bucket = args.gs_test_artifacts_bucket + self._isolated_script_test_output = args.isolated_script_test_output + self._isolated_script_test_perf_output = ( + args.isolated_script_test_perf_output) + self._render_test_output_dir = args.render_test_output_dir + self._shard_timeout = args.shard_timeout + self._store_tombstones = args.store_tombstones + self._suite = args.suite_name[0] + self._symbolizer = stack_symbolizer.Symbolizer(None) + self._total_external_shards = args.test_launcher_total_shards + self._wait_for_java_debugger = args.wait_for_java_debugger + self._use_existing_test_data = args.use_existing_test_data + + # GYP: + if args.executable_dist_dir: + self._exe_dist_dir = os.path.abspath(args.executable_dist_dir) + else: + # TODO(agrieve): Remove auto-detection once recipes pass flag explicitly. + exe_dist_dir = os.path.join(constants.GetOutDirectory(), + '%s__dist' % self._suite) + + if os.path.exists(exe_dist_dir): + self._exe_dist_dir = exe_dist_dir + + incremental_part = '' + if args.test_apk_incremental_install_json: + incremental_part = '_incremental' + + self._test_launcher_batch_limit = MAX_SHARDS + if (args.test_launcher_batch_limit + and 0 < args.test_launcher_batch_limit < MAX_SHARDS): + self._test_launcher_batch_limit = args.test_launcher_batch_limit + + apk_path = os.path.join( + constants.GetOutDirectory(), '%s_apk' % self._suite, + '%s-debug%s.apk' % (self._suite, incremental_part)) + self._test_apk_incremental_install_json = ( + args.test_apk_incremental_install_json) + if not os.path.exists(apk_path): + self._apk_helper = None + else: + self._apk_helper = apk_helper.ApkHelper(apk_path) + self._extras = { + _EXTRA_NATIVE_TEST_ACTIVITY: self._apk_helper.GetActivityName(), + } + if self._suite in RUN_IN_SUB_THREAD_TEST_SUITES: + self._extras[_EXTRA_RUN_IN_SUB_THREAD] = 1 + if self._suite in BROWSER_TEST_SUITES: + self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1 + self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e9 * self._shard_timeout) + self._shard_timeout = 10 * self._shard_timeout + if args.wait_for_java_debugger: + self._extras[EXTRA_SHARD_NANO_TIMEOUT] = int(1e15) # Forever + + if not self._apk_helper and not self._exe_dist_dir: + error_func('Could not find apk or executable for %s' % self._suite) + + self._data_deps = [] + self._gtest_filters = test_filter.InitializeFiltersFromArgs(args) + self._run_disabled = args.run_disabled + + self._data_deps_delegate = data_deps_delegate + self._runtime_deps_path = args.runtime_deps_path + if not self._runtime_deps_path: + logging.warning('No data dependencies will be pushed.') + + if args.app_data_files: + self._app_data_files = args.app_data_files + if args.app_data_file_dir: + self._app_data_file_dir = args.app_data_file_dir + else: + self._app_data_file_dir = tempfile.mkdtemp() + logging.critical('Saving app files to %s', self._app_data_file_dir) + else: + self._app_data_files = None + self._app_data_file_dir = None + + self._flags = None + self._initializeCommandLineFlags(args) + + # TODO(jbudorick): Remove this once it's deployed. + self._enable_xml_result_parsing = args.enable_xml_result_parsing + + def _initializeCommandLineFlags(self, args): + self._flags = [] + if args.command_line_flags: + self._flags.extend(args.command_line_flags) + if args.device_flags_file: + with open(args.device_flags_file) as f: + stripped_lines = (l.strip() for l in f) + self._flags.extend(flag for flag in stripped_lines if flag) + if args.run_disabled: + self._flags.append('--gtest_also_run_disabled_tests') + + @property + def activity(self): + return self._apk_helper and self._apk_helper.GetActivityName() + + @property + def apk(self): + return self._apk_helper and self._apk_helper.path + + @property + def apk_helper(self): + return self._apk_helper + + @property + def app_file_dir(self): + return self._app_data_file_dir + + @property + def app_files(self): + return self._app_data_files + + @property + def coverage_dir(self): + return self._coverage_dir + + @property + def enable_xml_result_parsing(self): + return self._enable_xml_result_parsing + + @property + def exe_dist_dir(self): + return self._exe_dist_dir + + @property + def external_shard_index(self): + return self._external_shard_index + + @property + def extract_test_list_from_filter(self): + return self._extract_test_list_from_filter + + @property + def extras(self): + return self._extras + + @property + def flags(self): + return self._flags + + @property + def gs_test_artifacts_bucket(self): + return self._gs_test_artifacts_bucket + + @property + def gtest_filters(self): + return self._gtest_filters + + @property + def isolated_script_test_output(self): + return self._isolated_script_test_output + + @property + def isolated_script_test_perf_output(self): + return self._isolated_script_test_perf_output + + @property + def render_test_output_dir(self): + return self._render_test_output_dir + + @property + def package(self): + return self._apk_helper and self._apk_helper.GetPackageName() + + @property + def permissions(self): + return self._apk_helper and self._apk_helper.GetPermissions() + + @property + def runner(self): + return self._apk_helper and self._apk_helper.GetInstrumentationName() + + @property + def shard_timeout(self): + return self._shard_timeout + + @property + def store_tombstones(self): + return self._store_tombstones + + @property + def suite(self): + return self._suite + + @property + def symbolizer(self): + return self._symbolizer + + @property + def test_apk_incremental_install_json(self): + return self._test_apk_incremental_install_json + + @property + def test_launcher_batch_limit(self): + return self._test_launcher_batch_limit + + @property + def total_external_shards(self): + return self._total_external_shards + + @property + def wait_for_java_debugger(self): + return self._wait_for_java_debugger + + @property + def use_existing_test_data(self): + return self._use_existing_test_data + + #override + def TestType(self): + return 'gtest' + + #override + def GetPreferredAbis(self): + if not self._apk_helper: + return None + return self._apk_helper.GetAbis() + + #override + def SetUp(self): + """Map data dependencies via isolate.""" + self._data_deps.extend( + self._data_deps_delegate(self._runtime_deps_path)) + + def GetDataDependencies(self): + """Returns the test suite's data dependencies. + + Returns: + A list of (host_path, device_path) tuples to push. If device_path is + None, the client is responsible for determining where to push the file. + """ + return self._data_deps + + def FilterTests(self, test_list, disabled_prefixes=None): + """Filters |test_list| based on prefixes and, if present, a filter string. + + Args: + test_list: The list of tests to filter. + disabled_prefixes: A list of test prefixes to filter. Defaults to + DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_ + Returns: + A filtered list of tests to run. + """ + gtest_filter_strings = [ + self._GenerateDisabledFilterString(disabled_prefixes)] + if self._gtest_filters: + gtest_filter_strings.extend(self._gtest_filters) + + filtered_test_list = test_list + # This lock is required because on older versions of Python + # |unittest_util.FilterTestNames| use of |fnmatch| is not threadsafe. + with self._filter_tests_lock: + for gtest_filter_string in gtest_filter_strings: + logging.debug('Filtering tests using: %s', gtest_filter_string) + filtered_test_list = unittest_util.FilterTestNames( + filtered_test_list, gtest_filter_string) + + if self._run_disabled and self._gtest_filters: + out_filtered_test_list = list(set(test_list)-set(filtered_test_list)) + for test in out_filtered_test_list: + test_name_no_disabled = TestNameWithoutDisabledPrefix(test) + if test_name_no_disabled == test: + continue + if all( + unittest_util.FilterTestNames([test_name_no_disabled], + gtest_filter) + for gtest_filter in self._gtest_filters): + filtered_test_list.append(test) + return filtered_test_list + + def _GenerateDisabledFilterString(self, disabled_prefixes): + disabled_filter_items = [] + + if disabled_prefixes is None: + disabled_prefixes = ['FAILS_', 'PRE_'] + if '--run-manual' not in self._flags: + disabled_prefixes += ['MANUAL_'] + if not self._run_disabled: + disabled_prefixes += ['DISABLED_', 'FLAKY_'] + + disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes] + disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes] + + disabled_tests_file_path = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest', + 'filter', '%s_disabled' % self._suite) + if disabled_tests_file_path and os.path.exists(disabled_tests_file_path): + with open(disabled_tests_file_path) as disabled_tests_file: + disabled_filter_items += [ + '%s' % l for l in (line.strip() for line in disabled_tests_file) + if l and not l.startswith('#')] + + return '*-%s' % ':'.join(disabled_filter_items) + + #override + def TearDown(self): + """Do nothing.""" diff --git a/android/pylib/gtest/gtest_test_instance_test.py b/android/pylib/gtest/gtest_test_instance_test.py new file mode 100755 index 000000000000..c714ba0cfc9e --- /dev/null +++ b/android/pylib/gtest/gtest_test_instance_test.py @@ -0,0 +1,348 @@ +#!/usr/bin/env vpython3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import unittest + +from pylib.base import base_test_result +from pylib.gtest import gtest_test_instance + + +class GtestTestInstanceTests(unittest.TestCase): + + def testParseGTestListTests_simple(self): + raw_output = [ + 'TestCaseOne.', + ' testOne', + ' testTwo', + 'TestCaseTwo.', + ' testThree', + ' testFour', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'TestCaseOne.testOne', + 'TestCaseOne.testTwo', + 'TestCaseTwo.testThree', + 'TestCaseTwo.testFour', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_typeParameterized_old(self): + raw_output = [ + 'TPTestCase/WithTypeParam/0.', + ' testOne', + ' testTwo', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'TPTestCase/WithTypeParam/0.testOne', + 'TPTestCase/WithTypeParam/0.testTwo', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_typeParameterized_new(self): + raw_output = [ + 'TPTestCase/WithTypeParam/0. # TypeParam = TypeParam0', + ' testOne', + ' testTwo', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'TPTestCase/WithTypeParam/0.testOne', + 'TPTestCase/WithTypeParam/0.testTwo', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_valueParameterized_old(self): + raw_output = [ + 'VPTestCase.', + ' testWithValueParam/0', + ' testWithValueParam/1', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'VPTestCase.testWithValueParam/0', + 'VPTestCase.testWithValueParam/1', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_valueParameterized_new(self): + raw_output = [ + 'VPTestCase.', + ' testWithValueParam/0 # GetParam() = 0', + ' testWithValueParam/1 # GetParam() = 1', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'VPTestCase.testWithValueParam/0', + 'VPTestCase.testWithValueParam/1', + ] + self.assertEqual(expected, actual) + + def testParseGTestListTests_emptyTestName(self): + raw_output = [ + 'TestCase.', + ' ', + ' nonEmptyTestName', + ] + actual = gtest_test_instance.ParseGTestListTests(raw_output) + expected = [ + 'TestCase.nonEmptyTestName', + ] + self.assertEqual(expected, actual) + + def testParseGTestOutput_pass(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ OK ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testParseGTestOutput_fail(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ FAILED ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_crash(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ CRASHED ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_errorCrash(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ERROR:blah] Currently running: FooTest.Bar', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertIsNone(actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_fatalDcheck(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[0324/183029.116334:FATAL:test_timeouts.cc(103)] Check failed: !init', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertIsNone(actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_unknown(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(0, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_nonterminalUnknown(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ RUN ] FooTest.Baz', + '[ OK ] FooTest.Baz (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(2, len(actual)) + + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(0, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) + + self.assertEqual('FooTest.Baz', actual[1].GetName()) + self.assertEqual(1, actual[1].GetDuration()) + self.assertEqual(base_test_result.ResultType.PASS, actual[1].GetType()) + + def testParseGTestOutput_deathTestCrashOk(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ CRASHED ]', + '[ OK ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testParseGTestOutput_typeParameterized(self): + raw_output = [ + '[ RUN ] Baz/FooTest.Bar/0', + '[ FAILED ] Baz/FooTest.Bar/0, where TypeParam = (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_valueParameterized(self): + raw_output = [ + '[ RUN ] Baz/FooTest.Bar/0', + '[ FAILED ] Baz/FooTest.Bar/0,' + + ' where GetParam() = 4-byte object <00-00 00-00> (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_typeAndValueParameterized(self): + raw_output = [ + '[ RUN ] Baz/FooTest.Bar/0', + '[ FAILED ] Baz/FooTest.Bar/0,' + + ' where TypeParam = and GetParam() = (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_skippedTest(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + '[ SKIPPED ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) + self.assertEqual(1, len(actual)) + self.assertEqual('FooTest.Bar', actual[0].GetName()) + self.assertEqual(1, actual[0].GetDuration()) + self.assertEqual(base_test_result.ResultType.SKIP, actual[0].GetType()) + + def testParseGTestXML_none(self): + actual = gtest_test_instance.ParseGTestXML(None) + self.assertEqual([], actual) + + def testParseGTestJSON_none(self): + actual = gtest_test_instance.ParseGTestJSON(None) + self.assertEqual([], actual) + + def testParseGTestJSON_example(self): + raw_json = """ + { + "tests": { + "mojom_tests": { + "parse": { + "ast_unittest": { + "ASTTest": { + "testNodeBase": { + "expected": "PASS", + "actual": "PASS", + "artifacts": { + "screenshot": ["screenshots/page.png"] + } + } + } + } + } + } + }, + "interrupted": false, + "path_delimiter": ".", + "version": 3, + "seconds_since_epoch": 1406662283.764424, + "num_failures_by_type": { + "FAIL": 0, + "PASS": 1 + }, + "artifact_types": { + "screenshot": "image/png" + } + }""" + actual = gtest_test_instance.ParseGTestJSON(raw_json) + self.assertEqual(1, len(actual)) + self.assertEqual('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase', + actual[0].GetName()) + self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testParseGTestJSON_skippedTest_example(self): + raw_json = """ + { + "tests": { + "mojom_tests": { + "parse": { + "ast_unittest": { + "ASTTest": { + "testNodeBase": { + "expected": "SKIP", + "actual": "SKIP" + } + } + } + } + } + }, + "interrupted": false, + "path_delimiter": ".", + "version": 3, + "seconds_since_epoch": 1406662283.764424, + "num_failures_by_type": { + "SKIP": 1 + } + }""" + actual = gtest_test_instance.ParseGTestJSON(raw_json) + self.assertEqual(1, len(actual)) + self.assertEqual('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase', + actual[0].GetName()) + self.assertEqual(base_test_result.ResultType.SKIP, actual[0].GetType()) + + def testTestNameWithoutDisabledPrefix_disabled(self): + test_name_list = [ + 'A.DISABLED_B', + 'DISABLED_A.B', + 'DISABLED_A.DISABLED_B', + ] + for test_name in test_name_list: + actual = gtest_test_instance \ + .TestNameWithoutDisabledPrefix(test_name) + expected = 'A.B' + self.assertEqual(expected, actual) + + def testTestNameWithoutDisabledPrefix_flaky(self): + test_name_list = [ + 'A.FLAKY_B', + 'FLAKY_A.B', + 'FLAKY_A.FLAKY_B', + ] + for test_name in test_name_list: + actual = gtest_test_instance \ + .TestNameWithoutDisabledPrefix(test_name) + expected = 'A.B' + self.assertEqual(expected, actual) + + def testTestNameWithoutDisabledPrefix_notDisabledOrFlaky(self): + test_name = 'A.B' + actual = gtest_test_instance \ + .TestNameWithoutDisabledPrefix(test_name) + expected = 'A.B' + self.assertEqual(expected, actual) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/android/pylib/instrumentation/__init__.py b/android/pylib/instrumentation/__init__.py new file mode 100644 index 000000000000..5ffa28413724 --- /dev/null +++ b/android/pylib/instrumentation/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/instrumentation/instrumentation_parser.py b/android/pylib/instrumentation/instrumentation_parser.py new file mode 100644 index 000000000000..700d2415e9c6 --- /dev/null +++ b/android/pylib/instrumentation/instrumentation_parser.py @@ -0,0 +1,112 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import logging +import re + +# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html +STATUS_CODE_START = 1 +STATUS_CODE_OK = 0 +STATUS_CODE_ERROR = -1 +STATUS_CODE_FAILURE = -2 + +# AndroidJUnitRunner would status output -3 to indicate a test is skipped +STATUS_CODE_SKIP = -3 + +# AndroidJUnitRunner outputs -4 to indicate a failed assumption +# "A test for which an assumption fails should not generate a test +# case failure" +# http://junit.org/junit4/javadoc/4.12/org/junit/AssumptionViolatedException.html +STATUS_CODE_ASSUMPTION_FAILURE = -4 + +STATUS_CODE_TEST_DURATION = 1337 + +# When a test batch fails due to post-test Assertion failures (eg. +# LifetimeAssert). +STATUS_CODE_BATCH_FAILURE = 1338 + +# http://developer.android.com/reference/android/app/Activity.html +RESULT_CODE_OK = -1 +RESULT_CODE_CANCELED = 0 + +_INSTR_LINE_RE = re.compile(r'^\s*INSTRUMENTATION_([A-Z_]+): (.*)$') + + +class InstrumentationParser: + + def __init__(self, stream): + """An incremental parser for the output of Android instrumentation tests. + + Example: + + stream = adb.IterShell('am instrument -r ...') + parser = InstrumentationParser(stream) + + for code, bundle in parser.IterStatus(): + # do something with each instrumentation status + print('status:', code, bundle) + + # do something with the final instrumentation result + code, bundle = parser.GetResult() + print('result:', code, bundle) + + Args: + stream: a sequence of lines as produced by the raw output of an + instrumentation test (e.g. by |am instrument -r|). + """ + self._stream = stream + self._code = None + self._bundle = None + + def IterStatus(self): + """Iterate over statuses as they are produced by the instrumentation test. + + Yields: + A tuple (code, bundle) for each instrumentation status found in the + output. + """ + def join_bundle_values(bundle): + for key in bundle: + bundle[key] = '\n'.join(bundle[key]) + return bundle + + bundle = {'STATUS': {}, 'RESULT': {}} + header = None + key = None + for line in self._stream: + m = _INSTR_LINE_RE.match(line) + if m: + header, value = m.groups() + key = None + if header in ['STATUS', 'RESULT'] and '=' in value: + key, value = value.split('=', 1) + bundle[header][key] = [value] + elif header == 'STATUS_CODE': + yield int(value), join_bundle_values(bundle['STATUS']) + bundle['STATUS'] = {} + elif header == 'CODE': + self._code = int(value) + else: + logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value) + elif key is not None: + bundle[header][key].append(line) + + self._bundle = join_bundle_values(bundle['RESULT']) + + def GetResult(self): + """Return the final instrumentation result. + + Returns: + A pair (code, bundle) with the final instrumentation result. The |code| + may be None if no instrumentation result was found in the output. + + Raises: + AssertionError if attempting to get the instrumentation result before + exhausting |IterStatus| first. + """ + assert self._bundle is not None, ( + 'The IterStatus generator must be exhausted before reading the final' + ' instrumentation result.') + return self._code, self._bundle diff --git a/android/pylib/instrumentation/instrumentation_parser_test.py b/android/pylib/instrumentation/instrumentation_parser_test.py new file mode 100755 index 000000000000..dccb58a3fe03 --- /dev/null +++ b/android/pylib/instrumentation/instrumentation_parser_test.py @@ -0,0 +1,135 @@ +#!/usr/bin/env vpython3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +"""Unit tests for instrumentation.InstrumentationParser.""" + + +import unittest + +from pylib.instrumentation import instrumentation_parser + + +class InstrumentationParserTest(unittest.TestCase): + + def testInstrumentationParser_nothing(self): + parser = instrumentation_parser.InstrumentationParser(['']) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + self.assertEqual(None, code) + self.assertEqual({}, bundle) + self.assertEqual([], statuses) + + def testInstrumentationParser_noMatchingStarts(self): + raw_output = [ + '', + 'this.is.a.test.package.TestClass:.', + 'Test result for =.', + 'Time: 1.234', + '', + 'OK (1 test)', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + self.assertEqual(None, code) + self.assertEqual({}, bundle) + self.assertEqual([], statuses) + + def testInstrumentationParser_resultAndCode(self): + raw_output = [ + 'INSTRUMENTATION_RESULT: shortMsg=foo bar', + 'INSTRUMENTATION_RESULT: longMsg=a foo', + 'walked into', + 'a bar', + 'INSTRUMENTATION_CODE: -1', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + self.assertEqual(-1, code) + self.assertEqual( + {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle) + self.assertEqual([], statuses) + + def testInstrumentationParser_oneStatus(self): + raw_output = [ + 'INSTRUMENTATION_STATUS: foo=1', + 'INSTRUMENTATION_STATUS: bar=hello', + 'INSTRUMENTATION_STATUS: world=false', + 'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass', + 'INSTRUMENTATION_STATUS: test=testMethod', + 'INSTRUMENTATION_STATUS_CODE: 0', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + + expected = [ + (0, { + 'foo': '1', + 'bar': 'hello', + 'world': 'false', + 'class': 'this.is.a.test.package.TestClass', + 'test': 'testMethod', + }) + ] + self.assertEqual(expected, statuses) + + def testInstrumentationParser_multiStatus(self): + raw_output = [ + 'INSTRUMENTATION_STATUS: class=foo', + 'INSTRUMENTATION_STATUS: test=bar', + 'INSTRUMENTATION_STATUS_CODE: 1', + 'INSTRUMENTATION_STATUS: test_skipped=true', + 'INSTRUMENTATION_STATUS_CODE: 0', + 'INSTRUMENTATION_STATUS: class=hello', + 'INSTRUMENTATION_STATUS: test=world', + 'INSTRUMENTATION_STATUS: stack=', + 'foo/bar.py (27)', + 'hello/world.py (42)', + 'test/file.py (1)', + 'INSTRUMENTATION_STATUS_CODE: -1', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + + expected = [ + (1, {'class': 'foo', 'test': 'bar',}), + (0, {'test_skipped': 'true'}), + (-1, { + 'class': 'hello', + 'test': 'world', + 'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)', + }), + ] + self.assertEqual(expected, statuses) + + def testInstrumentationParser_statusResultAndCode(self): + raw_output = [ + 'INSTRUMENTATION_STATUS: class=foo', + 'INSTRUMENTATION_STATUS: test=bar', + 'INSTRUMENTATION_STATUS_CODE: 1', + 'INSTRUMENTATION_RESULT: result=hello', + 'world', + '', + '', + 'INSTRUMENTATION_CODE: 0', + ] + + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + + self.assertEqual(0, code) + self.assertEqual({'result': 'hello\nworld\n\n'}, bundle) + self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/android/pylib/instrumentation/instrumentation_test_instance.py b/android/pylib/instrumentation/instrumentation_test_instance.py new file mode 100644 index 000000000000..f520879a60f9 --- /dev/null +++ b/android/pylib/instrumentation/instrumentation_test_instance.py @@ -0,0 +1,1235 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import copy +import logging +import os +import pickle +import re + +import six +from devil.android import apk_helper +from pylib import constants +from pylib.base import base_test_result +from pylib.base import test_exception +from pylib.base import test_instance +from pylib.constants import host_paths +from pylib.instrumentation import test_result +from pylib.instrumentation import instrumentation_parser +from pylib.symbols import deobfuscator +from pylib.symbols import stack_symbolizer +from pylib.utils import dexdump +from pylib.utils import gold_utils +from pylib.utils import shared_preference_utils +from pylib.utils import test_filter + + +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): + import unittest_util # pylint: disable=import-error + +# Ref: http://developer.android.com/reference/android/app/Activity.html +_ACTIVITY_RESULT_CANCELED = 0 +_ACTIVITY_RESULT_OK = -1 + +_COMMAND_LINE_PARAMETER = 'cmdlinearg-parameter' +_DEFAULT_ANNOTATIONS = [ + 'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest', 'IntegrationTest'] +# This annotation is for disabled tests that should not be run in Test Reviver. +_DO_NOT_REVIVE_ANNOTATIONS = ['DoNotRevive', 'Manual'] +_EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS = [ + 'DisabledTest', 'FlakyTest', 'Manual'] +_VALID_ANNOTATIONS = set(_DEFAULT_ANNOTATIONS + _DO_NOT_REVIVE_ANNOTATIONS + + _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS) + +_TEST_LIST_JUNIT4_RUNNERS = [ + 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner'] + +_SKIP_PARAMETERIZATION = 'SkipCommandLineParameterization' +_PARAMETERIZED_COMMAND_LINE_FLAGS = 'ParameterizedCommandLineFlags' +_PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES = ( + 'ParameterizedCommandLineFlags$Switches') +_NATIVE_CRASH_RE = re.compile('(process|native) crash', re.IGNORECASE) +_PICKLE_FORMAT_VERSION = 12 + +# The ID of the bundle value Instrumentation uses to report which test index the +# results are for in a collection of tests. Note that this index is 1-based. +_BUNDLE_CURRENT_ID = 'current' +# The ID of the bundle value Instrumentation uses to report the test class. +_BUNDLE_CLASS_ID = 'class' +# The ID of the bundle value Instrumentation uses to report the test name. +_BUNDLE_TEST_ID = 'test' +# The ID of the bundle value Instrumentation uses to report if a test was +# skipped. +_BUNDLE_SKIPPED_ID = 'test_skipped' +# The ID of the bundle value Instrumentation uses to report the crash stack, if +# the test crashed. +_BUNDLE_STACK_ID = 'stack' + +# The ID of the bundle value Chrome uses to report the test duration. +_BUNDLE_DURATION_ID = 'duration_ms' + +class MissingSizeAnnotationError(test_exception.TestException): + def __init__(self, class_name): + super().__init__( + class_name + + ': Test method is missing required size annotation. Add one of: ' + + ', '.join('@' + a for a in _VALID_ANNOTATIONS)) + + +class CommandLineParameterizationException(test_exception.TestException): + pass + + +class TestListPickleException(test_exception.TestException): + pass + + +# TODO(jbudorick): Make these private class methods of +# InstrumentationTestInstance once the instrumentation junit3_runner_class is +# deprecated. +def ParseAmInstrumentRawOutput(raw_output): + """Parses the output of an |am instrument -r| call. + + Args: + raw_output: the output of an |am instrument -r| call as a list of lines + Returns: + A 3-tuple containing: + - the instrumentation code as an integer + - the instrumentation result as a list of lines + - the instrumentation statuses received as a list of 2-tuples + containing: + - the status code as an integer + - the bundle dump as a dict mapping string keys to a list of + strings, one for each line. + """ + parser = instrumentation_parser.InstrumentationParser(raw_output) + statuses = list(parser.IterStatus()) + code, bundle = parser.GetResult() + return (code, bundle, statuses) + + +def GenerateTestResults(result_code, result_bundle, statuses, duration_ms, + device_abi, symbolizer): + """Generate test results from |statuses|. + + Args: + result_code: The overall status code as an integer. + result_bundle: The summary bundle dump as a dict. + statuses: A list of 2-tuples containing: + - the status code as an integer + - the bundle dump as a dict mapping string keys to string values + Note that this is the same as the third item in the 3-tuple returned by + |_ParseAmInstrumentRawOutput|. + duration_ms: The duration of the test in milliseconds. + device_abi: The device_abi, which is needed for symbolization. + symbolizer: The symbolizer used to symbolize stack. + + Returns: + A list containing an instance of InstrumentationTestResult for each test + parsed. + """ + + results = [] + + current_result = None + cumulative_duration = 0 + + for status_code, bundle in statuses: + # If the last test was a failure already, don't override that failure with + # post-test failures that could be caused by the original failure. + if (status_code == instrumentation_parser.STATUS_CODE_BATCH_FAILURE + and current_result.GetType() != base_test_result.ResultType.FAIL): + current_result.SetType(base_test_result.ResultType.FAIL) + _MaybeSetLog(bundle, current_result, symbolizer, device_abi) + continue + + if status_code == instrumentation_parser.STATUS_CODE_TEST_DURATION: + # For the first result, duration will be set below to the difference + # between the reported and actual durations to account for overhead like + # starting instrumentation. + if results: + current_duration = int(bundle.get(_BUNDLE_DURATION_ID, duration_ms)) + current_result.SetDuration(current_duration) + cumulative_duration += current_duration + continue + + test_class = bundle.get(_BUNDLE_CLASS_ID, '') + test_method = bundle.get(_BUNDLE_TEST_ID, '') + if test_class and test_method: + test_name = '%s#%s' % (test_class, test_method) + else: + continue + + if status_code == instrumentation_parser.STATUS_CODE_START: + if current_result: + results.append(current_result) + current_result = test_result.InstrumentationTestResult( + test_name, base_test_result.ResultType.UNKNOWN, duration_ms) + else: + if status_code == instrumentation_parser.STATUS_CODE_OK: + if bundle.get(_BUNDLE_SKIPPED_ID, '').lower() in ('true', '1', 'yes'): + current_result.SetType(base_test_result.ResultType.SKIP) + elif current_result.GetType() == base_test_result.ResultType.UNKNOWN: + current_result.SetType(base_test_result.ResultType.PASS) + elif status_code == instrumentation_parser.STATUS_CODE_SKIP: + current_result.SetType(base_test_result.ResultType.SKIP) + elif status_code == instrumentation_parser.STATUS_CODE_ASSUMPTION_FAILURE: + current_result.SetType(base_test_result.ResultType.SKIP) + else: + if status_code not in (instrumentation_parser.STATUS_CODE_ERROR, + instrumentation_parser.STATUS_CODE_FAILURE): + logging.error('Unrecognized status code %d. Handling as an error.', + status_code) + current_result.SetType(base_test_result.ResultType.FAIL) + _MaybeSetLog(bundle, current_result, symbolizer, device_abi) + + if current_result: + if current_result.GetType() == base_test_result.ResultType.UNKNOWN: + crashed = (result_code == _ACTIVITY_RESULT_CANCELED and any( + _NATIVE_CRASH_RE.search(l) for l in six.itervalues(result_bundle))) + if crashed: + current_result.SetType(base_test_result.ResultType.CRASH) + + results.append(current_result) + + if results: + logging.info('Adding cumulative overhead to test %s: %dms', + results[0].GetName(), duration_ms - cumulative_duration) + results[0].SetDuration(duration_ms - cumulative_duration) + + return results + + +def _MaybeSetLog(bundle, current_result, symbolizer, device_abi): + if _BUNDLE_STACK_ID in bundle: + stack = bundle[_BUNDLE_STACK_ID] + if symbolizer and device_abi: + current_result.SetLog('%s\n%s' % (stack, '\n'.join( + symbolizer.ExtractAndResolveNativeStackTraces(stack, device_abi)))) + else: + current_result.SetLog(stack) + + current_result.SetFailureReason(_ParseExceptionMessage(stack)) + + +def _ParseExceptionMessage(stack): + """Extracts the exception message from the given stack trace. + """ + # This interprets stack traces reported via InstrumentationResultPrinter: + # https://source.chromium.org/chromium/chromium/src/+/main:third_party/android_support_test_runner/runner/src/main/java/android/support/test/internal/runner/listener/InstrumentationResultPrinter.java;l=181?q=InstrumentationResultPrinter&type=cs + # This is a standard Java stack trace, of the form: + # + # at SomeClass.SomeMethod(...) + # at ... + lines = stack.split('\n') + for i, line in enumerate(lines): + if line.startswith('\tat'): + return '\n'.join(lines[0:i]) + # No call stack found, so assume everything is the exception message. + return stack + + +def FilterTests(tests, + filter_strs=None, + annotations=None, + excluded_annotations=None): + """Filter a list of tests + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, + {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] + filter_strs: list of googletest-style filter string. + annotations: a dict of wanted annotations for test methods. + excluded_annotations: a dict of annotations to exclude. + + Return: + A list of filtered tests + """ + + def test_names_from_pattern(combined_pattern, test_names): + patterns = combined_pattern.split(':') + + hashable_patterns = set() + filename_patterns = [] + for pattern in patterns: + if ('*' in pattern or '?' in pattern or '[' in pattern): + filename_patterns.append(pattern) + else: + hashable_patterns.add(pattern) + + filter_test_names = set( + unittest_util.FilterTestNames(test_names, ':'.join( + filename_patterns))) if len(filename_patterns) > 0 else set() + + for test_name in test_names: + if test_name in hashable_patterns: + filter_test_names.add(test_name) + + return filter_test_names + + def get_test_names(test): + test_names = set() + # Allow fully-qualified name as well as an omitted package. + unqualified_class_test = { + 'class': test['class'].split('.')[-1], + 'method': test['method'] + } + + test_name = GetTestName(test, sep='.') + test_names.add(test_name) + + unqualified_class_test_name = GetTestName(unqualified_class_test, sep='.') + test_names.add(unqualified_class_test_name) + + unique_test_name = GetUniqueTestName(test, sep='.') + test_names.add(unique_test_name) + + if test['is_junit4']: + junit4_test_name = GetTestNameWithoutParameterPostfix(test, sep='.') + test_names.add(junit4_test_name) + + unqualified_junit4_test_name = \ + GetTestNameWithoutParameterPostfix(unqualified_class_test, sep='.') + test_names.add(unqualified_junit4_test_name) + return test_names + + def get_tests_from_names(tests, test_names, tests_to_names): + ''' Returns the tests for which the given names apply + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, + {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] + test_names: a collection of names determining tests to return. + + Return: + A list of tests that match the given test names + ''' + filtered_tests = [] + for t in tests: + current_test_names = tests_to_names[id(t)] + + for current_test_name in current_test_names: + if current_test_name in test_names: + filtered_tests.append(t) + break + + return filtered_tests + + def remove_tests_from_names(tests, remove_test_names, tests_to_names): + ''' Returns the tests from the given list with given names removed + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, + {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] + remove_test_names: a collection of names determining tests to remove. + tests_to_names: a dcitionary of test ids to a collection of applicable + names for that test + + Return: + A list of tests that don't match the given test names + ''' + filtered_tests = [] + + for t in tests: + for name in tests_to_names[id(t)]: + if name in remove_test_names: + break + else: + filtered_tests.append(t) + return filtered_tests + + def gtests_filter(tests, combined_filters): + ''' Returns the tests after the combined_filters have been applied + + Args: + tests: a list of tests. e.g. [ + {'annotations": {}, 'class': 'com.example.TestA', 'method':'test1'}, + {'annotations": {}, 'class': 'com.example.TestB', 'method':'test2'}] + combined_filters: the filter string representing tests to exclude + + Return: + A list of tests that should still be included after the combined_filters + are applied to their names + ''' + + if not combined_filters: + return tests + + # Collect all test names + all_test_names = set() + tests_to_names = {} + for t in tests: + tests_to_names[id(t)] = get_test_names(t) + for name in tests_to_names[id(t)]: + all_test_names.add(name) + + for combined_filter in combined_filters: + pattern_groups = combined_filter.split('-') + negative_pattern = pattern_groups[1] if len(pattern_groups) > 1 else None + positive_pattern = pattern_groups[0] + if positive_pattern: + # Only use the test names that match the positive pattern + positive_test_names = test_names_from_pattern(positive_pattern, + all_test_names) + tests = get_tests_from_names(tests, positive_test_names, tests_to_names) + + if negative_pattern: + # Remove any test the negative filter matches + remove_names = test_names_from_pattern(negative_pattern, all_test_names) + tests = remove_tests_from_names(tests, remove_names, tests_to_names) + + return tests + + def annotation_filter(all_annotations): + if not annotations: + return True + return any_annotation_matches(annotations, all_annotations) + + def excluded_annotation_filter(all_annotations): + if not excluded_annotations: + return True + return not any_annotation_matches(excluded_annotations, + all_annotations) + + def any_annotation_matches(filter_annotations, all_annotations): + return any( + ak in all_annotations + and annotation_value_matches(av, all_annotations[ak]) + for ak, av in filter_annotations) + + def annotation_value_matches(filter_av, av): + if filter_av is None: + return True + if isinstance(av, dict): + tav_from_dict = av['value'] + # If tav_from_dict is an int, the 'in' operator breaks, so convert + # filter_av and manually compare. See https://crbug.com/1019707 + if isinstance(tav_from_dict, int): + return int(filter_av) == tav_from_dict + return filter_av in tav_from_dict + if isinstance(av, list): + return filter_av in av + return filter_av == av + + return_tests = [] + for t in gtests_filter(tests, filter_strs): + # Enforce that all tests declare their size. + if not any(a in _VALID_ANNOTATIONS for a in t['annotations']): + raise MissingSizeAnnotationError(GetTestName(t)) + + if (not annotation_filter(t['annotations']) + or not excluded_annotation_filter(t['annotations'])): + continue + return_tests.append(t) + + return return_tests + + +def GetAllTestsFromApk(test_apk): + pickle_path = '%s-dexdump.pickle' % test_apk + try: + tests = GetTestsFromPickle(pickle_path, os.path.getmtime(test_apk)) + except TestListPickleException as e: + logging.info('Could not get tests from pickle: %s', e) + logging.info('Getting tests from dex via dexdump.') + tests = _GetTestsFromDexdump(test_apk) + SaveTestsToPickle(pickle_path, tests) + return tests + + +def GetTestsFromPickle(pickle_path, test_mtime): + if not os.path.exists(pickle_path): + raise TestListPickleException('%s does not exist.' % pickle_path) + if os.path.getmtime(pickle_path) <= test_mtime: + raise TestListPickleException('File is stale: %s' % pickle_path) + + with open(pickle_path, 'rb') as f: + pickle_data = pickle.load(f) + if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION: + raise TestListPickleException('PICKLE_FORMAT_VERSION has changed.') + return pickle_data['TEST_METHODS'] + + +def _GetTestsFromDexdump(test_apk): + dex_dumps = dexdump.Dump(test_apk) + tests = [] + + def get_test_methods(methods, annotations): + test_methods = [] + + for method in methods: + if method.startswith('test'): + method_annotations = annotations.get(method, {}) + + # Dexdump used to not return any annotation info + # So MediumTest annotation was added to all methods + # Preserving this behaviour by adding MediumTest if none of the + # size annotations are included in these annotations + if not any(valid in method_annotations for valid in _VALID_ANNOTATIONS): + method_annotations.update({'MediumTest': None}) + + test_methods.append({ + 'method': method, + 'annotations': method_annotations + }) + + return test_methods + + for dump in dex_dumps: + for package_name, package_info in six.iteritems(dump): + for class_name, class_info in six.iteritems(package_info['classes']): + if class_name.endswith('Test') and not class_info['is_abstract']: + classAnnotations, methodsAnnotations = class_info['annotations'] + tests.append({ + 'class': + '%s.%s' % (package_name, class_name), + 'annotations': + classAnnotations, + 'methods': + get_test_methods(class_info['methods'], methodsAnnotations), + 'superclass': + class_info['superclass'], + }) + return tests + +def SaveTestsToPickle(pickle_path, tests): + pickle_data = { + 'VERSION': _PICKLE_FORMAT_VERSION, + 'TEST_METHODS': tests, + } + with open(pickle_path, 'wb') as pickle_file: + pickle.dump(pickle_data, pickle_file) + + +class MissingJUnit4RunnerException(test_exception.TestException): + """Raised when JUnit4 runner is not provided or specified in apk manifest""" + + def __init__(self): + super().__init__( + 'JUnit4 runner is not provided or specified in test apk manifest.') + + +def GetTestName(test, sep='#'): + """Gets the name of the given test. + + Note that this may return the same name for more than one test, e.g. if a + test is being run multiple times with different parameters. + + Args: + test: the instrumentation test dict. + sep: the character(s) that should join the class name and the method name. + Returns: + The test name as a string. + """ + test_name = '%s%s%s' % (test['class'], sep, test['method']) + assert ' *-:' not in test_name, ( + 'The test name must not contain any of the characters in " *-:". See ' + 'https://crbug.com/912199') + return test_name + + +def GetTestNameWithoutParameterPostfix( + test, sep='#', parameterization_sep='__'): + """Gets the name of the given JUnit4 test without parameter postfix. + + For most WebView JUnit4 javatests, each test is parameterizatized with + "__sandboxed_mode" to run in both non-sandboxed mode and sandboxed mode. + + This function returns the name of the test without parameterization + so test filters can match both parameterized and non-parameterized tests. + + Args: + test: the instrumentation test dict. + sep: the character(s) that should join the class name and the method name. + parameterization_sep: the character(s) that seperate method name and method + parameterization postfix. + Returns: + The test name without parameter postfix as a string. + """ + name = GetTestName(test, sep=sep) + return name.split(parameterization_sep)[0] + + +def GetUniqueTestName(test, sep='#'): + """Gets the unique name of the given test. + + This will include text to disambiguate between tests for which GetTestName + would return the same name. + + Args: + test: the instrumentation test dict. + sep: the character(s) that should join the class name and the method name. + Returns: + The unique test name as a string. + """ + display_name = GetTestName(test, sep=sep) + if test.get('flags', [None])[0]: + sanitized_flags = [x.replace('-', '_') for x in test['flags']] + display_name = '%s_with_%s' % (display_name, '_'.join(sanitized_flags)) + + assert ' *-:' not in display_name, ( + 'The test name must not contain any of the characters in " *-:". See ' + 'https://crbug.com/912199') + + return display_name + + +class InstrumentationTestInstance(test_instance.TestInstance): + + def __init__(self, args, data_deps_delegate, error_func): + super().__init__() + + self._additional_apks = [] + self._additional_apexs = [] + self._forced_queryable_additional_apks = [] + self._instant_additional_apks = [] + self._apk_under_test = None + self._apk_under_test_incremental_install_json = None + self._modules = None + self._fake_modules = None + self._additional_locales = None + self._package_info = None + self._suite = None + self._test_apk = None + self._test_apk_as_instant = False + self._test_apk_incremental_install_json = None + self._test_package = None + self._junit3_runner_class = None + self._junit4_runner_class = None + self._junit4_runner_supports_listing = None + self._test_support_apk = None + self._initializeApkAttributes(args, error_func) + + self._data_deps = None + self._data_deps_delegate = None + self._runtime_deps_path = None + self._store_data_in_app_directory = False + self._initializeDataDependencyAttributes(args, data_deps_delegate) + + self._annotations = None + self._excluded_annotations = None + self._test_filters = None + self._initializeTestFilterAttributes(args) + + self._run_setup_commands = [] + self._run_teardown_commands = [] + self._initializeSetupTeardownCommandAttributes(args) + + self._flags = None + self._use_apk_under_test_flags_file = False + self._initializeFlagAttributes(args) + + self._screenshot_dir = None + self._timeout_scale = None + self._wait_for_java_debugger = None + self._initializeTestControlAttributes(args) + + self._coverage_directory = None + self._initializeTestCoverageAttributes(args) + + self._store_tombstones = False + self._symbolizer = None + self._enable_breakpad_dump = False + self._proguard_mapping_path = None + self._deobfuscator = None + self._initializeLogAttributes(args) + + self._edit_shared_prefs = [] + self._initializeEditPrefsAttributes(args) + + self._replace_system_package = None + self._initializeReplaceSystemPackageAttributes(args) + + self._system_packages_to_remove = None + self._initializeSystemPackagesToRemoveAttributes(args) + + self._use_voice_interaction_service = None + self._initializeUseVoiceInteractionService(args) + + self._use_webview_provider = None + self._initializeUseWebviewProviderAttributes(args) + + self._skia_gold_properties = None + self._initializeSkiaGoldAttributes(args) + + self._test_launcher_batch_limit = None + self._initializeTestLauncherAttributes(args) + + self._approve_app_links_domain = None + self._approve_app_links_package = None + self._initializeApproveAppLinksAttributes(args) + + self._wpr_enable_record = args.wpr_enable_record + + self._external_shard_index = args.test_launcher_shard_index + self._total_external_shards = args.test_launcher_total_shards + + self._is_unit_test = False + self._initializeUnitTestFlag(args) + + def _initializeApkAttributes(self, args, error_func): + if args.apk_under_test: + apk_under_test_path = args.apk_under_test + if (not args.apk_under_test.endswith('.apk') + and not args.apk_under_test.endswith('.apks')): + apk_under_test_path = os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR, + '%s.apk' % args.apk_under_test) + + # TODO(jbudorick): Move the realpath up to the argument parser once + # APK-by-name is no longer supported. + apk_under_test_path = os.path.realpath(apk_under_test_path) + + if not os.path.exists(apk_under_test_path): + error_func('Unable to find APK under test: %s' % apk_under_test_path) + + self._apk_under_test = apk_helper.ToHelper(apk_under_test_path) + + test_apk_path = args.test_apk + if not os.path.exists(test_apk_path): + test_apk_path = os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR, + '%s.apk' % args.test_apk) + # TODO(jbudorick): Move the realpath up to the argument parser once + # APK-by-name is no longer supported. + test_apk_path = os.path.realpath(test_apk_path) + + if not os.path.exists(test_apk_path): + error_func('Unable to find test APK: %s' % test_apk_path) + + self._test_apk = apk_helper.ToHelper(test_apk_path) + self._suite = os.path.splitext(os.path.basename(args.test_apk))[0] + + self._test_apk_as_instant = args.test_apk_as_instant + + self._apk_under_test_incremental_install_json = ( + args.apk_under_test_incremental_install_json) + self._test_apk_incremental_install_json = ( + args.test_apk_incremental_install_json) + + if self._test_apk_incremental_install_json: + assert self._suite.endswith('_incremental') + self._suite = self._suite[:-len('_incremental')] + + self._modules = args.modules + self._fake_modules = args.fake_modules + self._additional_locales = args.additional_locales + + self._test_support_apk = apk_helper.ToHelper(os.path.join( + constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR, + '%sSupport.apk' % self._suite)) + + self._test_package = self._test_apk.GetPackageName() + all_instrumentations = self._test_apk.GetAllInstrumentations() + all_junit3_runner_classes = [ + x for x in all_instrumentations if ('0xffffffff' in x.get( + 'chromium-junit3', ''))] + all_junit4_runner_classes = [ + x for x in all_instrumentations if ('0xffffffff' not in x.get( + 'chromium-junit3', ''))] + + if len(all_junit3_runner_classes) > 1: + logging.warning('This test apk has more than one JUnit3 instrumentation') + if len(all_junit4_runner_classes) > 1: + logging.warning('This test apk has more than one JUnit4 instrumentation') + + self._junit3_runner_class = ( + all_junit3_runner_classes[0]['android:name'] + if all_junit3_runner_classes else self.test_apk.GetInstrumentationName()) + + self._junit4_runner_class = ( + all_junit4_runner_classes[0]['android:name'] + if all_junit4_runner_classes else None) + + if self._junit4_runner_class: + if self._test_apk_incremental_install_json: + self._junit4_runner_supports_listing = next( + (True for x in self._test_apk.GetAllMetadata() + if 'real-instr' in x[0] and x[1] in _TEST_LIST_JUNIT4_RUNNERS), + False) + else: + self._junit4_runner_supports_listing = ( + self._junit4_runner_class in _TEST_LIST_JUNIT4_RUNNERS) + + self._package_info = None + if self._apk_under_test: + package_under_test = self._apk_under_test.GetPackageName() + for package_info in six.itervalues(constants.PACKAGE_INFO): + if package_under_test == package_info.package: + self._package_info = package_info + break + if not self._package_info: + logging.warning( + 'Unable to find package info for %s. ' + '(This may just mean that the test package is ' + 'currently being installed.)', self._test_package) + + for x in set(args.additional_apks + args.forced_queryable_additional_apks + + args.instant_additional_apks): + if not os.path.exists(x): + error_func('Unable to find additional APK: %s' % x) + + apk = apk_helper.ToHelper(x) + self._additional_apks.append(apk) + + if x in args.forced_queryable_additional_apks: + self._forced_queryable_additional_apks.append(apk) + + if x in args.instant_additional_apks: + self._instant_additional_apks.append(apk) + + self._additional_apexs = args.additional_apexs + + def _initializeDataDependencyAttributes(self, args, data_deps_delegate): + self._data_deps = [] + self._data_deps_delegate = data_deps_delegate + self._runtime_deps_path = args.runtime_deps_path + self._store_data_in_app_directory = args.store_data_in_app_directory + if not self._runtime_deps_path: + logging.warning('No data dependencies will be pushed.') + + def _initializeTestFilterAttributes(self, args): + self._test_filters = test_filter.InitializeFiltersFromArgs(args) + + def annotation_element(a): + a = a.split('=', 1) + return (a[0], a[1] if len(a) == 2 else None) + + if args.annotation_str: + self._annotations = [ + annotation_element(a) for a in args.annotation_str.split(',')] + elif not self._test_filters: + self._annotations = [ + annotation_element(a) for a in _DEFAULT_ANNOTATIONS] + else: + self._annotations = [] + + if args.exclude_annotation_str: + self._excluded_annotations = [ + annotation_element(a) for a in args.exclude_annotation_str.split(',')] + else: + self._excluded_annotations = [] + + requested_annotations = set(a[0] for a in self._annotations) + if args.run_disabled: + self._excluded_annotations.extend( + annotation_element(a) for a in _DO_NOT_REVIVE_ANNOTATIONS + if a not in requested_annotations) + else: + self._excluded_annotations.extend( + annotation_element(a) for a in _EXCLUDE_UNLESS_REQUESTED_ANNOTATIONS + if a not in requested_annotations) + + def _initializeSetupTeardownCommandAttributes(self, args): + self._run_setup_commands = args.run_setup_commands + self._run_teardown_commands = args.run_teardown_commands + + def _initializeFlagAttributes(self, args): + self._use_apk_under_test_flags_file = args.use_apk_under_test_flags_file + self._flags = ['--enable-test-intents'] + if args.command_line_flags: + self._flags.extend(args.command_line_flags) + if args.device_flags_file: + with open(args.device_flags_file) as device_flags_file: + stripped_lines = (l.strip() for l in device_flags_file) + self._flags.extend(flag for flag in stripped_lines if flag) + if args.strict_mode and args.strict_mode != 'off' and ( + # TODO(yliuyliu): Turn on strict mode for coverage once + # crbug/1006397 is fixed. + not args.coverage_dir): + self._flags.append('--strict-mode=' + args.strict_mode) + + def _initializeTestControlAttributes(self, args): + self._screenshot_dir = args.screenshot_dir + self._timeout_scale = args.timeout_scale or 1 + self._wait_for_java_debugger = args.wait_for_java_debugger + + def _initializeTestCoverageAttributes(self, args): + self._coverage_directory = args.coverage_dir + + def _initializeLogAttributes(self, args): + self._enable_breakpad_dump = args.enable_breakpad_dump + self._proguard_mapping_path = args.proguard_mapping_path + self._store_tombstones = args.store_tombstones + self._symbolizer = stack_symbolizer.Symbolizer( + self.apk_under_test.path if self.apk_under_test else None) + + def _initializeEditPrefsAttributes(self, args): + if not hasattr(args, 'shared_prefs_file') or not args.shared_prefs_file: + return + if not isinstance(args.shared_prefs_file, str): + logging.warning("Given non-string for a filepath") + return + self._edit_shared_prefs = shared_preference_utils.ExtractSettingsFromJson( + args.shared_prefs_file) + + def _initializeReplaceSystemPackageAttributes(self, args): + if (not hasattr(args, 'replace_system_package') + or not args.replace_system_package): + return + self._replace_system_package = args.replace_system_package + + def _initializeSystemPackagesToRemoveAttributes(self, args): + if (not hasattr(args, 'system_packages_to_remove') + or not args.system_packages_to_remove): + return + self._system_packages_to_remove = args.system_packages_to_remove + + def _initializeUseVoiceInteractionService(self, args): + if (not hasattr(args, 'use_voice_interaction_service') + or not args.use_voice_interaction_service): + return + self._use_voice_interaction_service = args.use_voice_interaction_service + + def _initializeUseWebviewProviderAttributes(self, args): + if (not hasattr(args, 'use_webview_provider') + or not args.use_webview_provider): + return + self._use_webview_provider = args.use_webview_provider + + def _initializeSkiaGoldAttributes(self, args): + self._skia_gold_properties = gold_utils.AndroidSkiaGoldProperties(args) + + def _initializeTestLauncherAttributes(self, args): + if hasattr(args, 'test_launcher_batch_limit'): + self._test_launcher_batch_limit = args.test_launcher_batch_limit + + def _initializeApproveAppLinksAttributes(self, args): + if (not hasattr(args, 'approve_app_links') or not args.approve_app_links): + return + + # The argument will be formatted as com.android.thing:www.example.com . + app_links = args.approve_app_links.split(':') + + if (len(app_links) != 2 or not app_links[0] or not app_links[1]): + logging.warning('--approve_app_links option provided, but malformed.') + return + + self._approve_app_links_package = app_links[0] + self._approve_app_links_domain = app_links[1] + + def _initializeUnitTestFlag(self, args): + self._is_unit_test = args.is_unit_test + + @property + def additional_apks(self): + return self._additional_apks + + @property + def additional_apexs(self): + return self._additional_apexs + + @property + def apk_under_test(self): + return self._apk_under_test + + @property + def apk_under_test_incremental_install_json(self): + return self._apk_under_test_incremental_install_json + + @property + def approve_app_links_package(self): + return self._approve_app_links_package + + @property + def approve_app_links_domain(self): + return self._approve_app_links_domain + + @property + def modules(self): + return self._modules + + @property + def fake_modules(self): + return self._fake_modules + + @property + def additional_locales(self): + return self._additional_locales + + @property + def coverage_directory(self): + return self._coverage_directory + + @property + def edit_shared_prefs(self): + return self._edit_shared_prefs + + @property + def enable_breakpad_dump(self): + return self._enable_breakpad_dump + + @property + def external_shard_index(self): + return self._external_shard_index + + @property + def flags(self): + return self._flags + + @property + def is_unit_test(self): + return self._is_unit_test + + @property + def junit3_runner_class(self): + return self._junit3_runner_class + + @property + def junit4_runner_class(self): + return self._junit4_runner_class + + @property + def junit4_runner_supports_listing(self): + return self._junit4_runner_supports_listing + + @property + def package_info(self): + return self._package_info + + @property + def replace_system_package(self): + return self._replace_system_package + + @property + def run_setup_commands(self): + return self._run_setup_commands + + @property + def run_teardown_commands(self): + return self._run_teardown_commands + + @property + def use_voice_interaction_service(self): + return self._use_voice_interaction_service + + @property + def use_webview_provider(self): + return self._use_webview_provider + + @property + def screenshot_dir(self): + return self._screenshot_dir + + @property + def skia_gold_properties(self): + return self._skia_gold_properties + + @property + def store_data_in_app_directory(self): + return self._store_data_in_app_directory + + @property + def store_tombstones(self): + return self._store_tombstones + + @property + def suite(self): + return self._suite + + @property + def symbolizer(self): + return self._symbolizer + + @property + def system_packages_to_remove(self): + return self._system_packages_to_remove + + @property + def test_apk(self): + return self._test_apk + + @property + def test_apk_as_instant(self): + return self._test_apk_as_instant + + @property + def test_apk_incremental_install_json(self): + return self._test_apk_incremental_install_json + + @property + def test_filters(self): + return self._test_filters + + @property + def test_launcher_batch_limit(self): + return self._test_launcher_batch_limit + + @property + def test_support_apk(self): + return self._test_support_apk + + @property + def test_package(self): + return self._test_package + + @property + def timeout_scale(self): + return self._timeout_scale + + @property + def total_external_shards(self): + return self._total_external_shards + + @property + def use_apk_under_test_flags_file(self): + return self._use_apk_under_test_flags_file + + @property + def wait_for_java_debugger(self): + return self._wait_for_java_debugger + + @property + def wpr_record_mode(self): + return self._wpr_enable_record + + @property + def wpr_replay_mode(self): + return not self._wpr_enable_record + + #override + def TestType(self): + return 'instrumentation' + + #override + def GetPreferredAbis(self): + # We could alternatively take the intersection of what they all support, + # but it should never be the case that they support different things. + apks = [self._test_apk, self._apk_under_test] + self._additional_apks + for apk in apks: + if apk: + ret = apk.GetAbis() + if ret: + return ret + return [] + + #override + def SetUp(self): + self._data_deps.extend( + self._data_deps_delegate(self._runtime_deps_path)) + if self._proguard_mapping_path: + self._deobfuscator = deobfuscator.DeobfuscatorPool( + self._proguard_mapping_path) + + def GetDataDependencies(self): + return self._data_deps + + def GetTests(self): + if self._test_apk_incremental_install_json: + # Would likely just be a matter of calling GetAllTestsFromApk on all + # .dex files listed in the .json. + raise Exception('Support not implemented for incremental_install=true on ' + 'tests that do not use //base\'s test runner.') + raw_tests = GetAllTestsFromApk(self.test_apk.path) + return self.ProcessRawTests(raw_tests) + + def MaybeDeobfuscateLines(self, lines): + if not self._deobfuscator: + return lines + return self._deobfuscator.TransformLines(lines) + + def ProcessRawTests(self, raw_tests): + inflated_tests = self._ParameterizeTestsWithFlags( + self._InflateTests(raw_tests)) + if self._junit4_runner_class is None and any( + t['is_junit4'] for t in inflated_tests): + raise MissingJUnit4RunnerException() + filtered_tests = FilterTests(inflated_tests, self._test_filters, + self._annotations, self._excluded_annotations) + if self._test_filters and not filtered_tests: + for t in inflated_tests: + logging.debug(' %s', GetUniqueTestName(t)) + logging.warning('Unmatched Filters: %s', self._test_filters) + return filtered_tests + + def IsApkForceQueryable(self, apk): + return apk in self._forced_queryable_additional_apks + + def IsApkInstant(self, apk): + return apk in self._instant_additional_apks + + # pylint: disable=no-self-use + def _InflateTests(self, tests): + inflated_tests = [] + for c in tests: + for m in c['methods']: + a = dict(c['annotations']) + a.update(m['annotations']) + inflated_tests.append({ + 'class': c['class'], + 'method': m['method'], + 'annotations': a, + # TODO(https://crbug.com/1084729): Remove is_junit4. + 'is_junit4': True + }) + return inflated_tests + + def _ParameterizeTestsWithFlags(self, tests): + + def _checkParameterization(annotations): + types = [ + _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES, + _PARAMETERIZED_COMMAND_LINE_FLAGS, + ] + if types[0] in annotations and types[1] in annotations: + raise CommandLineParameterizationException( + 'Multiple command-line parameterization types: {}.'.format( + ', '.join(types))) + + def _switchesToFlags(switches): + return ['--{}'.format(s) for s in switches if s] + + def _annotationToSwitches(clazz, methods): + if clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS_SWITCHES: + return [methods['value']] + if clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS: + list_of_switches = [] + for annotation in methods['value']: + for c, m in six.iteritems(annotation): + list_of_switches += _annotationToSwitches(c, m) + return list_of_switches + return [] + + def _setTestFlags(test, flags): + if flags: + test['flags'] = flags + elif 'flags' in test: + del test['flags'] + + new_tests = [] + for t in tests: + annotations = t['annotations'] + list_of_switches = [] + _checkParameterization(annotations) + if _SKIP_PARAMETERIZATION not in annotations: + for clazz, methods in six.iteritems(annotations): + list_of_switches += _annotationToSwitches(clazz, methods) + if list_of_switches: + _setTestFlags(t, _switchesToFlags(list_of_switches[0])) + for p in list_of_switches[1:]: + parameterized_t = copy.copy(t) + _setTestFlags(parameterized_t, _switchesToFlags(p)) + new_tests.append(parameterized_t) + return tests + new_tests + + @staticmethod + def ParseAmInstrumentRawOutput(raw_output): + return ParseAmInstrumentRawOutput(raw_output) + + @staticmethod + def GenerateTestResults(result_code, result_bundle, statuses, duration_ms, + device_abi, symbolizer): + return GenerateTestResults(result_code, result_bundle, statuses, + duration_ms, device_abi, symbolizer) + + #override + def TearDown(self): + self.symbolizer.CleanUp() + if self._deobfuscator: + self._deobfuscator.Close() + self._deobfuscator = None diff --git a/android/pylib/instrumentation/instrumentation_test_instance_test.py b/android/pylib/instrumentation/instrumentation_test_instance_test.py new file mode 100755 index 000000000000..945c404d2b84 --- /dev/null +++ b/android/pylib/instrumentation/instrumentation_test_instance_test.py @@ -0,0 +1,1397 @@ +#!/usr/bin/env vpython3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unit tests for instrumentation_test_instance.""" + +# pylint: disable=protected-access + + +import collections +import tempfile +import unittest + +from six.moves import range # pylint: disable=redefined-builtin +from pylib.base import base_test_result +from pylib.instrumentation import instrumentation_test_instance + +import mock # pylint: disable=import-error + +_INSTRUMENTATION_TEST_INSTANCE_PATH = ( + 'pylib.instrumentation.instrumentation_test_instance.%s') + +class InstrumentationTestInstanceTest(unittest.TestCase): + + def setUp(self): + options = mock.Mock() + options.tool = '' + + @staticmethod + def createTestInstance(): + c = _INSTRUMENTATION_TEST_INSTANCE_PATH % 'InstrumentationTestInstance' + # yapf: disable + with mock.patch('%s._initializeApkAttributes' % c), ( + mock.patch('%s._initializeDataDependencyAttributes' % c)), ( + mock.patch('%s._initializeTestFilterAttributes' %c)), ( + mock.patch('%s._initializeFlagAttributes' % c)), ( + mock.patch('%s._initializeTestControlAttributes' % c)), ( + mock.patch('%s._initializeTestCoverageAttributes' % c)), ( + mock.patch('%s._initializeSkiaGoldAttributes' % c)): + # yapf: enable + return instrumentation_test_instance.InstrumentationTestInstance( + mock.MagicMock(), mock.MagicMock(), lambda s: None) + + _FlagAttributesArgs = collections.namedtuple('_FlagAttributesArgs', [ + 'command_line_flags', 'device_flags_file', 'strict_mode', + 'use_apk_under_test_flags_file', 'coverage_dir' + ]) + + def createFlagAttributesArgs(self, + command_line_flags=None, + device_flags_file=None, + strict_mode=None, + use_apk_under_test_flags_file=False, + coverage_dir=None): + return self._FlagAttributesArgs(command_line_flags, device_flags_file, + strict_mode, use_apk_under_test_flags_file, + coverage_dir) + + def test_initializeFlagAttributes_commandLineFlags(self): + o = self.createTestInstance() + args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar']) + o._initializeFlagAttributes(args) + self.assertEqual(o._flags, ['--enable-test-intents', '--foo', '--bar']) + + def test_initializeFlagAttributes_deviceFlagsFile(self): + o = self.createTestInstance() + with tempfile.NamedTemporaryFile(mode='w') as flags_file: + flags_file.write('\n'.join(['--foo', '--bar'])) + flags_file.flush() + + args = self.createFlagAttributesArgs(device_flags_file=flags_file.name) + o._initializeFlagAttributes(args) + self.assertEqual(o._flags, ['--enable-test-intents', '--foo', '--bar']) + + def test_initializeFlagAttributes_strictModeOn(self): + o = self.createTestInstance() + args = self.createFlagAttributesArgs(strict_mode='on') + o._initializeFlagAttributes(args) + self.assertEqual(o._flags, ['--enable-test-intents', '--strict-mode=on']) + + def test_initializeFlagAttributes_strictModeOn_coverageOn(self): + o = self.createTestInstance() + args = self.createFlagAttributesArgs( + strict_mode='on', coverage_dir='/coverage/dir') + o._initializeFlagAttributes(args) + self.assertEqual(o._flags, ['--enable-test-intents']) + + def test_initializeFlagAttributes_strictModeOff(self): + o = self.createTestInstance() + args = self.createFlagAttributesArgs(strict_mode='off') + o._initializeFlagAttributes(args) + self.assertEqual(o._flags, ['--enable-test-intents']) + + def testGetTests_noFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod1', + 'is_junit4': True, + }, + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod2', + 'is_junit4': True, + }, + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'method': 'testMethod1', + 'is_junit4': True, + }, + ] + + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_simpleGtestFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = ['org.chromium.test.SampleTest.testMethod1'] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_simpleGtestPositiveAndNegativeFilter(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None + }, + 'method': 'testMethod2', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest2', + 'superclass': + 'java.lang.Object', + 'methods': [{ + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }], + }] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = [ + 'org.chromium.test.SampleTest.*'\ + '-org.chromium.test.SampleTest.testMethod2' + ] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_multipleGtestPositiveAndNegativeFilter(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None + }, + 'method': 'testMethod2', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest2', + 'superclass': + 'java.lang.Object', + 'methods': [{ + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }], + }] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = [ + 'org.chromium.test.SampleTest*testMethod1', + 'org.chromium.test.SampleTest.*'\ + '-org.chromium.test.SampleTest.testMethod2' + ] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_simpleGtestUnqualifiedNameFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = ['SampleTest.testMethod1'] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_parameterizedTestGtestFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1__sandboxed_mode', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod1', + 'is_junit4': True, + }, + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'method': 'testMethod1__sandboxed_mode', + 'is_junit4': True, + }, + ] + + o._junit4_runner_class = 'J4Runner' + o._test_filters = ['org.chromium.test.SampleTest.testMethod1'] + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_wildcardGtestFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = ['org.chromium.test.SampleTest2.*'] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_negativeGtestFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod2', + }, + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._test_filters = ['*-org.chromium.test.SampleTest.testMethod1'] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_annotationFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Foo']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._annotations = [('SmallTest', None)] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_excludedAnnotationFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod2', + }, + ] + + o._excluded_annotations = [('SmallTest', None)] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_excludedDoNotReviveAnnotation(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'Feature': { + 'value': ['Foo'] + } + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'DisabledTest': None, + 'DoNotRevive': { + 'reason': 'sample reason' + }, + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'FlakyTest': None, + }, + 'method': 'testMethod2', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Bar'] + } + }, + 'class': + 'org.chromium.test.SampleTest2', + 'superclass': + 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'FlakyTest': None, + 'DoNotRevive': { + 'reason': 'sample reason' + }, + }, + 'method': 'testMethod1', + }, + ], + }, { + 'annotations': { + 'Feature': { + 'value': ['Baz'] + } + }, + 'class': + 'org.chromium.test.SampleTest3', + 'superclass': + 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'FlakyTest': None, + 'Manual': { + 'message': 'sample message' + }, + }, + 'method': 'testMethod1', + }, + ], + }] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'FlakyTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod2', + }, + ] + + o._excluded_annotations = [('DoNotRevive', None), ('Manual', None)] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_annotationSimpleValueFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'SmallTest': None, + 'TestValue': '1', + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None, + 'TestValue': '2', + }, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': { + 'SmallTest': None, + 'TestValue': '3', + }, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Foo'] + }, + 'SmallTest': None, + 'TestValue': '1', + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._annotations = [('TestValue', '1')] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTests_annotationDictValueFilter(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': {'MediumTest': None}, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'java.lang.Object', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': {'value': ['Bar']}, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._annotations = [('Feature', 'Bar')] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGetTestName(self): + test = { + 'annotations': { + 'RunWith': {'value': 'class J4Runner'}, + 'SmallTest': {}, + 'Test': {'expected': 'class org.junit.Test$None', + 'timeout': '0'}, + 'UiThreadTest': {}}, + 'class': 'org.chromium.TestA', + 'is_junit4': True, + 'method': 'testSimple'} + unqualified_class_test = { + 'class': test['class'].split('.')[-1], + 'method': test['method'] + } + + self.assertEqual(instrumentation_test_instance.GetTestName(test, sep='.'), + 'org.chromium.TestA.testSimple') + self.assertEqual( + instrumentation_test_instance.GetTestName(unqualified_class_test, + sep='.'), 'TestA.testSimple') + + def testGetUniqueTestName(self): + test = { + 'annotations': { + 'RunWith': {'value': 'class J4Runner'}, + 'SmallTest': {}, + 'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'}, + 'UiThreadTest': {}}, + 'class': 'org.chromium.TestA', + 'flags': ['enable_features=abc'], + 'is_junit4': True, + 'method': 'testSimple'} + self.assertEqual( + instrumentation_test_instance.GetUniqueTestName(test, sep='.'), + 'org.chromium.TestA.testSimple_with_enable_features=abc') + + def testGetTestNameWithoutParameterPostfix(self): + test = { + 'annotations': { + 'RunWith': {'value': 'class J4Runner'}, + 'SmallTest': {}, + 'Test': {'expected': 'class org.junit.Test$None', 'timeout': '0'}, + 'UiThreadTest': {}}, + 'class': 'org.chromium.TestA__sandbox_mode', + 'flags': 'enable_features=abc', + 'is_junit4': True, + 'method': 'testSimple'} + unqualified_class_test = { + 'class': test['class'].split('.')[-1], + 'method': test['method'] + } + self.assertEqual( + instrumentation_test_instance.GetTestNameWithoutParameterPostfix( + test, sep='.'), 'org.chromium.TestA') + self.assertEqual( + instrumentation_test_instance.GetTestNameWithoutParameterPostfix( + unqualified_class_test, sep='.'), 'TestA') + + def testGetTests_multipleAnnotationValuesRequested(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': {'Feature': {'value': ['Foo']}}, + 'class': 'org.chromium.test.SampleTest', + 'superclass': 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'Feature': {'value': ['Baz']}, + 'MediumTest': None, + }, + 'method': 'testMethod2', + }, + ], + }, + { + 'annotations': {'Feature': {'value': ['Bar']}}, + 'class': 'org.chromium.test.SampleTest2', + 'superclass': 'junit.framework.TestCase', + 'methods': [ + { + 'annotations': {'SmallTest': None}, + 'method': 'testMethod1', + }, + ], + } + ] + + expected_tests = [ + { + 'annotations': { + 'Feature': { + 'value': ['Baz'] + }, + 'MediumTest': None, + }, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod2', + }, + { + 'annotations': { + 'Feature': { + 'value': ['Bar'] + }, + 'SmallTest': None, + }, + 'class': 'org.chromium.test.SampleTest2', + 'is_junit4': True, + 'method': 'testMethod1', + }, + ] + + o._annotations = [('Feature', 'Bar'), ('Feature', 'Baz')] + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + + self.assertEqual(actual_tests, expected_tests) + + def testGenerateTestResults_noStatus(self): + results = instrumentation_test_instance.GenerateTestResults( + None, None, [], 1000, None, None) + self.assertEqual([], results) + + def testGenerateTestResults_testPassed(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 1000, None, None) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType()) + + def testGenerateTestResults_testSkipped_true(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'test_skipped': 'true', + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 1000, None, None) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType()) + + def testGenerateTestResults_testSkipped_false(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (0, { + 'test_skipped': 'false', + }), + (0, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 1000, None, None) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType()) + + def testGenerateTestResults_testFailed(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (-2, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 1000, None, None) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType()) + + def testGenerateTestResults_testUnknownException(self): + stacktrace = 'long\nstacktrace' + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (-1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + 'stack': stacktrace, + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 1000, None, None) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType()) + self.assertEqual(stacktrace, results[0].GetLog()) + + def testGenerateJUnitTestResults_testSkipped_true(self): + statuses = [ + (1, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + (-3, { + 'class': 'test.package.TestClass', + 'test': 'testMethod', + }), + ] + results = instrumentation_test_instance.GenerateTestResults( + None, None, statuses, 1000, None, None) + self.assertEqual(1, len(results)) + self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType()) + + def testParameterizedCommandLineFlagsSwitches(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'ParameterizedCommandLineFlags$Switches': { + 'value': ['enable-features=abc', 'enable-features=def'] + } + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None, + 'ParameterizedCommandLineFlags$Switches': { + 'value': ['enable-features=ghi', 'enable-features=jkl'] + }, + }, + 'method': 'testMethod2', + }, + { + 'annotations': { + 'MediumTest': None, + 'ParameterizedCommandLineFlags$Switches': { + 'value': [] + }, + }, + 'method': 'testMethod3', + }, + { + 'annotations': { + 'MediumTest': None, + 'SkipCommandLineParameterization': None, + }, + 'method': 'testMethod4', + }, + ], + }] + + expected_tests = [ + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'flags': ['--enable-features=abc', '--enable-features=def'], + 'is_junit4': True, + 'method': 'testMethod1' + }, + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'flags': ['--enable-features=ghi', '--enable-features=jkl'], + 'is_junit4': True, + 'method': 'testMethod2' + }, + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod3' + }, + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod4' + }, + ] + for i in range(4): + expected_tests[i]['annotations'].update(raw_tests[0]['annotations']) + expected_tests[i]['annotations'].update( + raw_tests[0]['methods'][i]['annotations']) + + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + self.assertEqual(actual_tests, expected_tests) + + def testParameterizedCommandLineFlags(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': { + 'ParameterizedCommandLineFlags': { + 'value': [ + { + 'ParameterizedCommandLineFlags$Switches': { + 'value': [ + 'enable-features=abc', + 'force-fieldtrials=trial/group' + ], + } + }, + { + 'ParameterizedCommandLineFlags$Switches': { + 'value': [ + 'enable-features=abc2', + 'force-fieldtrials=trial/group2' + ], + } + }, + ], + }, + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None + }, + 'method': 'testMethod1', + }, + { + 'annotations': { + 'MediumTest': None, + 'ParameterizedCommandLineFlags': { + 'value': [{ + 'ParameterizedCommandLineFlags$Switches': { + 'value': ['enable-features=def'] + } + }], + }, + }, + 'method': 'testMethod2', + }, + { + 'annotations': { + 'MediumTest': None, + 'ParameterizedCommandLineFlags': { + 'value': [], + }, + }, + 'method': 'testMethod3', + }, + { + 'annotations': { + 'MediumTest': None, + 'SkipCommandLineParameterization': None, + }, + 'method': 'testMethod4', + }, + ], + }] + + expected_tests = [ + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'flags': + ['--enable-features=abc', '--force-fieldtrials=trial/group'], + 'is_junit4': True, + 'method': 'testMethod1' + }, + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'flags': ['--enable-features=def'], + 'is_junit4': True, + 'method': 'testMethod2' + }, + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod3' + }, + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'is_junit4': True, + 'method': 'testMethod4' + }, + { + 'annotations': {}, + 'class': + 'org.chromium.test.SampleTest', + 'flags': [ + '--enable-features=abc2', + '--force-fieldtrials=trial/group2', + ], + 'is_junit4': + True, + 'method': + 'testMethod1' + }, + ] + for i in range(4): + expected_tests[i]['annotations'].update(raw_tests[0]['annotations']) + expected_tests[i]['annotations'].update( + raw_tests[0]['methods'][i]['annotations']) + expected_tests[4]['annotations'].update(raw_tests[0]['annotations']) + expected_tests[4]['annotations'].update( + raw_tests[0]['methods'][0]['annotations']) + + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + self.assertEqual(actual_tests, expected_tests) + + def testDifferentCommandLineParameterizations(self): + o = self.createTestInstance() + raw_tests = [{ + 'annotations': {}, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None, + 'ParameterizedCommandLineFlags': { + 'value': [ + { + 'ParameterizedCommandLineFlags$Switches': { + 'value': ['a1', 'a2'], + } + }, + ], + }, + }, + 'method': 'testMethod2', + }, + { + 'annotations': { + 'SmallTest': None, + 'ParameterizedCommandLineFlags$Switches': { + 'value': ['b1', 'b2'], + }, + }, + 'method': 'testMethod3', + }, + ], + }] + + expected_tests = [ + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'flags': ['--a1', '--a2'], + 'is_junit4': True, + 'method': 'testMethod2' + }, + { + 'annotations': {}, + 'class': 'org.chromium.test.SampleTest', + 'flags': ['--b1', '--b2'], + 'is_junit4': True, + 'method': 'testMethod3' + }, + ] + for i in range(2): + expected_tests[i]['annotations'].update( + raw_tests[0]['methods'][i]['annotations']) + + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + self.assertEqual(actual_tests, expected_tests) + + def testMultipleCommandLineParameterizations_raises(self): + o = self.createTestInstance() + raw_tests = [ + { + 'annotations': { + 'ParameterizedCommandLineFlags': { + 'value': [ + { + 'ParameterizedCommandLineFlags$Switches': { + 'value': [ + 'enable-features=abc', + 'force-fieldtrials=trial/group', + ], + } + }, + ], + }, + }, + 'class': + 'org.chromium.test.SampleTest', + 'superclass': + 'java.lang.Object', + 'methods': [ + { + 'annotations': { + 'SmallTest': None, + 'ParameterizedCommandLineFlags$Switches': { + 'value': [ + 'enable-features=abc', + 'force-fieldtrials=trial/group', + ], + }, + }, + 'method': 'testMethod1', + }, + ], + }, + ] + + o._junit4_runner_class = 'J4Runner' + self.assertRaises( + instrumentation_test_instance.CommandLineParameterizationException, + o.ProcessRawTests, [raw_tests[0]]) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/android/pylib/instrumentation/json_perf_parser.py b/android/pylib/instrumentation/json_perf_parser.py new file mode 100644 index 000000000000..ef541f49a114 --- /dev/null +++ b/android/pylib/instrumentation/json_perf_parser.py @@ -0,0 +1,162 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +"""A helper module for parsing JSON objects from perf tests results.""" + + +import json + + +def GetAverageRunInfo(json_data, name): + """Summarizes TraceEvent JSON data for performance metrics. + + Example JSON Inputs (More tags can be added but these are required): + Measuring Duration: + [ + { "cat": "Java", + "ts": 10000000000, + "ph": "S", + "name": "TestTrace" + }, + { "cat": "Java", + "ts": 10000004000, + "ph": "F", + "name": "TestTrace" + }, + ... + ] + + Measuring Call Frequency (FPS): + [ + { "cat": "Java", + "ts": 10000000000, + "ph": "I", + "name": "TestTraceFPS" + }, + { "cat": "Java", + "ts": 10000004000, + "ph": "I", + "name": "TestTraceFPS" + }, + ... + ] + + Args: + json_data: A list of dictonaries each representing a JSON object. + name: The 'name' tag to filter on in the JSON file. + + Returns: + A dictionary of result data with the following tags: + min: The minimum value tracked. + max: The maximum value tracked. + average: The average of all the values tracked. + count: The number of times the category/name pair was tracked. + type: The type of tracking ('Instant' for instant tags and 'Span' for + begin/end tags. + category: The passed in category filter. + name: The passed in name filter. + data_points: A list of all of the times used to generate this data. + units: The units for the values being reported. + + Raises: + Exception: if entry contains invalid data. + """ + + def EntryFilter(entry): + return entry['cat'] == 'Java' and entry['name'] == name + filtered_entries = [j for j in json_data if EntryFilter(j)] + + result = {} + + result['min'] = -1 + result['max'] = -1 + result['average'] = 0 + result['count'] = 0 + result['type'] = 'Unknown' + result['category'] = 'Java' + result['name'] = name + result['data_points'] = [] + result['units'] = '' + + total_sum = 0 + + last_val = 0 + val_type = None + for entry in filtered_entries: + if not val_type: + if 'mem' in entry: + val_type = 'mem' + + def GetVal(entry): + return entry['mem'] + + result['units'] = 'kb' + elif 'ts' in entry: + val_type = 'ts' + + def GetVal(entry): + return float(entry['ts']) / 1000.0 + + result['units'] = 'ms' + else: + raise Exception('Entry did not contain valid value info: %s' % entry) + + if not val_type in entry: + raise Exception('Entry did not contain expected value type "%s" ' + 'information: %s' % (val_type, entry)) + val = GetVal(entry) + if (entry['ph'] == 'S' and + (result['type'] == 'Unknown' or result['type'] == 'Span')): + result['type'] = 'Span' + last_val = val + elif ((entry['ph'] == 'F' and result['type'] == 'Span') or + (entry['ph'] == 'I' and (result['type'] == 'Unknown' or + result['type'] == 'Instant'))): + if last_val > 0: + delta = val - last_val + if result['min'] == -1 or result['min'] > delta: + result['min'] = delta + if result['max'] == -1 or result['max'] < delta: + result['max'] = delta + total_sum += delta + result['count'] += 1 + result['data_points'].append(delta) + if entry['ph'] == 'I': + result['type'] = 'Instant' + last_val = val + if result['count'] > 0: + result['average'] = total_sum / result['count'] + + return result + + +def GetAverageRunInfoFromJSONString(json_string, name): + """Returns the results from GetAverageRunInfo using a JSON string. + + Args: + json_string: The string containing JSON. + name: The 'name' tag to filter on in the JSON file. + + Returns: + See GetAverageRunInfo Returns section. + """ + return GetAverageRunInfo(json.loads(json_string), name) + + +def GetAverageRunInfoFromFile(json_file, name): + """Returns the results from GetAverageRunInfo using a JSON file. + + Args: + json_file: The path to a JSON file. + name: The 'name' tag to filter on in the JSON file. + + Returns: + See GetAverageRunInfo Returns section. + """ + with open(json_file, 'r') as f: + data = f.read() + perf = json.loads(data) + + return GetAverageRunInfo(perf, name) diff --git a/android/pylib/instrumentation/render_test.html.jinja b/android/pylib/instrumentation/render_test.html.jinja new file mode 100644 index 000000000000..81b85b78e321 --- /dev/null +++ b/android/pylib/instrumentation/render_test.html.jinja @@ -0,0 +1,40 @@ + + + {{ test_name }} + + + + Link to Golden (in repo)
    + Download Failure Image (right click and 'Save link as') + + + + + + + + + + + + {% if golden_link %} + + + {% else %} + + {% endif %} + + +
    FailureGoldenDiff
    No Golden Image.
    + + diff --git a/android/pylib/instrumentation/test_result.py b/android/pylib/instrumentation/test_result.py new file mode 100644 index 000000000000..dc56605966a4 --- /dev/null +++ b/android/pylib/instrumentation/test_result.py @@ -0,0 +1,33 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib.base import base_test_result + + + +class InstrumentationTestResult(base_test_result.BaseTestResult): + """Result information for a single instrumentation test.""" + + def __init__(self, full_name, test_type, dur, log=''): + """Construct an InstrumentationTestResult object. + + Args: + full_name: Full name of the test. + test_type: Type of the test result as defined in ResultType. + dur: Duration of the test run in milliseconds. + log: A string listing any errors. + """ + super().__init__(full_name, test_type, dur, log) + name_pieces = full_name.rsplit('#') + if len(name_pieces) > 1: + self._test_name = name_pieces[1] + self._class_name = name_pieces[0] + else: + self._class_name = full_name + self._test_name = full_name + + def SetDuration(self, duration): + """Set the test duration.""" + self._duration = duration diff --git a/android/pylib/junit/__init__.py b/android/pylib/junit/__init__.py new file mode 100644 index 000000000000..d46d7b496679 --- /dev/null +++ b/android/pylib/junit/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/junit/junit_test_instance.py b/android/pylib/junit/junit_test_instance.py new file mode 100644 index 000000000000..f7bd49ad002c --- /dev/null +++ b/android/pylib/junit/junit_test_instance.py @@ -0,0 +1,87 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +from pylib.base import test_instance +from pylib.utils import test_filter + + +class JunitTestInstance(test_instance.TestInstance): + + def __init__(self, args, _): + super().__init__() + + self._coverage_dir = args.coverage_dir + self._debug_socket = args.debug_socket + self._coverage_on_the_fly = args.coverage_on_the_fly + self._native_libs_dir = args.native_libs_dir + self._package_filter = args.package_filter + self._resource_apk = args.resource_apk + self._robolectric_runtime_deps_dir = args.robolectric_runtime_deps_dir + self._runner_filter = args.runner_filter + self._shards = args.shards + self._test_filters = test_filter.InitializeFiltersFromArgs(args) + self._has_literal_filters = (args.isolated_script_test_filters + or args.test_filters) + self._test_suite = args.test_suite + + #override + def TestType(self): + return 'junit' + + #override + def SetUp(self): + pass + + #override + def TearDown(self): + pass + + @property + def coverage_dir(self): + return self._coverage_dir + + @property + def coverage_on_the_fly(self): + return self._coverage_on_the_fly + + @property + def debug_socket(self): + return self._debug_socket + + @property + def native_libs_dir(self): + return self._native_libs_dir + + @property + def package_filter(self): + return self._package_filter + + @property + def resource_apk(self): + return self._resource_apk + + @property + def robolectric_runtime_deps_dir(self): + return self._robolectric_runtime_deps_dir + + @property + def runner_filter(self): + return self._runner_filter + + @property + def test_filters(self): + return self._test_filters + + @property + def has_literal_filters(self): + return self._has_literal_filters + + @property + def shards(self): + return self._shards + + @property + def suite(self): + return self._test_suite diff --git a/android/pylib/local/__init__.py b/android/pylib/local/__init__.py new file mode 100644 index 000000000000..d46d7b496679 --- /dev/null +++ b/android/pylib/local/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/local/device/__init__.py b/android/pylib/local/device/__init__.py new file mode 100644 index 000000000000..d46d7b496679 --- /dev/null +++ b/android/pylib/local/device/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/local/device/local_device_environment.py b/android/pylib/local/device/local_device_environment.py new file mode 100644 index 000000000000..a51f370b2b1a --- /dev/null +++ b/android/pylib/local/device/local_device_environment.py @@ -0,0 +1,354 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import datetime +import functools +import logging +import os +import shutil +import tempfile +import threading + +import devil_chromium +from devil import base_error +from devil.android import device_denylist +from devil.android import device_errors +from devil.android import device_utils +from devil.android import logcat_monitor +from devil.android.sdk import adb_wrapper +from devil.utils import file_utils +from devil.utils import parallelizer +from pylib import constants +from pylib.constants import host_paths +from pylib.base import environment +from pylib.utils import instrumentation_tracing +from py_trace_event import trace_event + + +LOGCAT_FILTERS = [ + 'chromium:v', + 'cr_*:v', + 'DEBUG:I', + 'StrictMode:D', +] + +SYSTEM_USER_ID = 0 + + +def _DeviceCachePath(device): + file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial() + return os.path.join(constants.GetOutDirectory(), file_name) + + +def handle_shard_failures(f): + """A decorator that handles device failures for per-device functions. + + Args: + f: the function being decorated. The function must take at least one + argument, and that argument must be the device. + """ + return handle_shard_failures_with(None)(f) + + +# TODO(jbudorick): Refactor this to work as a decorator or context manager. +def handle_shard_failures_with(on_failure): + """A decorator that handles device failures for per-device functions. + + This calls on_failure in the event of a failure. + + Args: + f: the function being decorated. The function must take at least one + argument, and that argument must be the device. + on_failure: A binary function to call on failure. + """ + def decorator(f): + @functools.wraps(f) + def wrapper(dev, *args, **kwargs): + try: + return f(dev, *args, **kwargs) + except device_errors.CommandTimeoutError: + logging.exception('Shard timed out: %s(%s)', f.__name__, str(dev)) + except device_errors.DeviceUnreachableError: + logging.exception('Shard died: %s(%s)', f.__name__, str(dev)) + except base_error.BaseError: + logging.exception('Shard failed: %s(%s)', f.__name__, str(dev)) + except SystemExit: + logging.exception('Shard killed: %s(%s)', f.__name__, str(dev)) + raise + if on_failure: + on_failure(dev, f.__name__) + return None + + return wrapper + + return decorator + + +def place_nomedia_on_device(dev, device_root, run_as=None, as_root=False): + """Places .nomedia file in test data root. + + This helps to prevent system from scanning media files inside test data. + + Args: + dev: Device to place .nomedia file. + device_root: Base path on device to place .nomedia file. + """ + + dev.RunShellCommand(['mkdir', '-p', device_root], + run_as=run_as, + as_root=as_root, + check_return=True) + dev.WriteFile('%s/.nomedia' % device_root, + 'https://crbug.com/796640', + run_as=run_as, + as_root=as_root) + + +# TODO(1262303): After Telemetry is supported by python3 we can re-add +# super without arguments in this script. +# pylint: disable=super-with-arguments +class LocalDeviceEnvironment(environment.Environment): + + def __init__(self, args, output_manager, _error_func): + super(LocalDeviceEnvironment, self).__init__(output_manager) + self._current_try = 0 + self._denylist = (device_denylist.Denylist(args.denylist_file) + if args.denylist_file else None) + self._device_serials = args.test_devices + self._devices_lock = threading.Lock() + self._devices = None + self._concurrent_adb = args.enable_concurrent_adb + self._enable_device_cache = args.enable_device_cache + self._logcat_monitors = [] + self._logcat_output_dir = args.logcat_output_dir + self._logcat_output_file = args.logcat_output_file + self._max_tries = 1 + args.num_retries + self._preferred_abis = None + self._recover_devices = args.recover_devices + self._skip_clear_data = args.skip_clear_data + self._tool_name = args.tool + self._trace_output = None + if hasattr(args, 'trace_output'): + self._trace_output = args.trace_output + self._trace_all = None + if hasattr(args, 'trace_all'): + self._trace_all = args.trace_all + self._use_persistent_shell = args.use_persistent_shell + self._disable_test_server = args.disable_test_server + + devil_chromium.Initialize( + output_directory=constants.GetOutDirectory(), + adb_path=args.adb_path) + + # Some things such as Forwarder require ADB to be in the environment path, + # while others like Devil's bundletool.py require Java on the path. + adb_dir = os.path.dirname(adb_wrapper.AdbWrapper.GetAdbPath()) + if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep): + os.environ['PATH'] = os.pathsep.join( + [adb_dir, host_paths.JAVA_PATH, os.environ['PATH']]) + + #override + def SetUp(self): + if self.trace_output and self._trace_all: + to_include = [r"pylib\..*", r"devil\..*", "__main__"] + to_exclude = ["logging"] + instrumentation_tracing.start_instrumenting(self.trace_output, to_include, + to_exclude) + elif self.trace_output: + self.EnableTracing() + + # Must be called before accessing |devices|. + def SetPreferredAbis(self, abis): + assert self._devices is None + self._preferred_abis = abis + + def _InitDevices(self): + device_arg = [] + if self._device_serials: + device_arg = self._device_serials + + self._devices = device_utils.DeviceUtils.HealthyDevices( + self._denylist, + retries=5, + enable_usb_resets=True, + enable_device_files_cache=self._enable_device_cache, + default_retries=self._max_tries - 1, + device_arg=device_arg, + abis=self._preferred_abis, + persistent_shell=self._use_persistent_shell) + + if self._logcat_output_file: + self._logcat_output_dir = tempfile.mkdtemp() + + @handle_shard_failures_with(on_failure=self.DenylistDevice) + def prepare_device(d): + d.WaitUntilFullyBooted() + if d.GetCurrentUser() != SYSTEM_USER_ID: + # Use system user to run tasks to avoid "/sdcard "accessing issue + # due to multiple-users. For details, see + # https://source.android.com/docs/devices/admin/multi-user-testing + logging.info('Switching to user with id %s', SYSTEM_USER_ID) + d.SwitchUser(SYSTEM_USER_ID) + + if self._enable_device_cache: + cache_path = _DeviceCachePath(d) + if os.path.exists(cache_path): + logging.info('Using device cache: %s', cache_path) + with open(cache_path) as f: + d.LoadCacheData(f.read()) + # Delete cached file so that any exceptions cause it to be cleared. + os.unlink(cache_path) + + if self._logcat_output_dir: + logcat_file = os.path.join( + self._logcat_output_dir, + '%s_%s' % (d.adb.GetDeviceSerial(), + datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S'))) + monitor = logcat_monitor.LogcatMonitor(d.adb, + clear=True, + output_file=logcat_file, + check_error=False) + self._logcat_monitors.append(monitor) + monitor.Start() + + self.parallel_devices.pMap(prepare_device) + + @property + def current_try(self): + return self._current_try + + def IncrementCurrentTry(self): + self._current_try += 1 + + def ResetCurrentTry(self): + self._current_try = 0 + + @property + def denylist(self): + return self._denylist + + @property + def concurrent_adb(self): + return self._concurrent_adb + + @property + def devices(self): + # Initialize lazily so that host-only tests do not fail when no devices are + # attached. + if self._devices is None: + self._InitDevices() + return self._devices + + @property + def max_tries(self): + return self._max_tries + + @property + def parallel_devices(self): + return parallelizer.SyncParallelizer(self.devices) + + @property + def recover_devices(self): + return self._recover_devices + + @property + def skip_clear_data(self): + return self._skip_clear_data + + @property + def tool(self): + return self._tool_name + + @property + def trace_output(self): + return self._trace_output + + @property + def disable_test_server(self): + return self._disable_test_server + + #override + def TearDown(self): + if self.trace_output and self._trace_all: + instrumentation_tracing.stop_instrumenting() + elif self.trace_output: + self.DisableTracing() + + # By default, teardown will invoke ADB. When receiving SIGTERM due to a + # timeout, there's a high probability that ADB is non-responsive. In these + # cases, sending an ADB command will potentially take a long time to time + # out. Before this happens, the process will be hard-killed for not + # responding to SIGTERM fast enough. + if self._received_sigterm: + return + + if not self._devices: + return + + @handle_shard_failures_with(on_failure=self.DenylistDevice) + def tear_down_device(d): + # Write the cache even when not using it so that it will be ready the + # first time that it is enabled. Writing it every time is also necessary + # so that an invalid cache can be flushed just by disabling it for one + # run. + cache_path = _DeviceCachePath(d) + if os.path.exists(os.path.dirname(cache_path)): + with open(cache_path, 'w') as f: + f.write(d.DumpCacheData()) + logging.info('Wrote device cache: %s', cache_path) + else: + logging.warning( + 'Unable to write device cache as %s directory does not exist', + os.path.dirname(cache_path)) + + self.parallel_devices.pMap(tear_down_device) + + for m in self._logcat_monitors: + try: + m.Stop() + m.Close() + _, temp_path = tempfile.mkstemp() + with open(m.output_file, 'r') as infile: + with open(temp_path, 'w') as outfile: + for line in infile: + outfile.write('Device(%s) %s' % (m.adb.GetDeviceSerial(), line)) + shutil.move(temp_path, m.output_file) + except base_error.BaseError: + logging.exception('Failed to stop logcat monitor for %s', + m.adb.GetDeviceSerial()) + except IOError: + logging.exception('Failed to locate logcat for device %s', + m.adb.GetDeviceSerial()) + + if self._logcat_output_file: + file_utils.MergeFiles( + self._logcat_output_file, + [m.output_file for m in self._logcat_monitors + if os.path.exists(m.output_file)]) + shutil.rmtree(self._logcat_output_dir) + + def DenylistDevice(self, device, reason='local_device_failure'): + device_serial = device.adb.GetDeviceSerial() + if self._denylist: + self._denylist.Extend([device_serial], reason=reason) + with self._devices_lock: + self._devices = [d for d in self._devices if str(d) != device_serial] + logging.error('Device %s denylisted: %s', device_serial, reason) + if not self._devices: + raise device_errors.NoDevicesError( + 'All devices were denylisted due to errors') + + @staticmethod + def DisableTracing(): + if not trace_event.trace_is_enabled(): + logging.warning('Tracing is not running.') + else: + trace_event.trace_disable() + + def EnableTracing(self): + if trace_event.trace_is_enabled(): + logging.warning('Tracing is already running.') + else: + trace_event.trace_enable(self._trace_output) diff --git a/android/pylib/local/device/local_device_gtest_run.py b/android/pylib/local/device/local_device_gtest_run.py new file mode 100644 index 000000000000..796f614d78c8 --- /dev/null +++ b/android/pylib/local/device/local_device_gtest_run.py @@ -0,0 +1,970 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import contextlib +import collections +import fnmatch +import itertools +import logging +import math +import os +import posixpath +import subprocess +import shutil +import time + +from six.moves import range # pylint: disable=redefined-builtin +from devil import base_error +from devil.android import crash_handler +from devil.android import device_errors +from devil.android import device_temp_file +from devil.android import logcat_monitor +from devil.android import ports +from devil.android.sdk import version_codes +from devil.utils import reraiser_thread +from incremental_install import installer +from pylib import constants +from pylib.base import base_test_result +from pylib.gtest import gtest_test_instance +from pylib.local import local_test_server_spawner +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_test_run +from pylib.utils import google_storage_helper +from pylib.utils import logdog_helper +from py_trace_event import trace_event +from py_utils import contextlib_ext +from py_utils import tempfile_ext +import tombstones + +_MAX_INLINE_FLAGS_LENGTH = 50 # Arbitrarily chosen. +_EXTRA_COMMAND_LINE_FILE = ( + 'org.chromium.native_test.NativeTest.CommandLineFile') +_EXTRA_COMMAND_LINE_FLAGS = ( + 'org.chromium.native_test.NativeTest.CommandLineFlags') +_EXTRA_COVERAGE_DEVICE_FILE = ( + 'org.chromium.native_test.NativeTest.CoverageDeviceFile') +_EXTRA_STDOUT_FILE = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner' + '.StdoutFile') +_EXTRA_TEST = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner' + '.Test') +_EXTRA_TEST_LIST = ( + 'org.chromium.native_test.NativeTestInstrumentationTestRunner' + '.TestList') + +# Used to identify the prefix in gtests. +_GTEST_PRETEST_PREFIX = 'PRE_' + +_SECONDS_TO_NANOS = int(1e9) + +# Tests that use SpawnedTestServer must run the LocalTestServerSpawner on the +# host machine. +# TODO(jbudorick): Move this up to the test instance if the net test server is +# handled outside of the APK for the remote_device environment. +_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [ + 'components_browsertests', 'content_unittests', 'content_browsertests', + 'net_unittests', 'services_unittests', 'unit_tests' +] + +# These are use for code coverage. +_LLVM_PROFDATA_PATH = os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', + 'llvm-build', 'Release+Asserts', 'bin', + 'llvm-profdata') +# Name of the file extension for profraw data files. +_PROFRAW_FILE_EXTENSION = 'profraw' +# Name of the file where profraw data files are merged. +_MERGE_PROFDATA_FILE_NAME = 'coverage_merged.' + _PROFRAW_FILE_EXTENSION + +# No-op context manager. If we used Python 3, we could change this to +# contextlib.ExitStack() +class _NullContextManager: + def __enter__(self): + pass + def __exit__(self, *args): + pass + + +def _GenerateSequentialFileNames(filename): + """Infinite generator of names: 'name.ext', 'name_1.ext', 'name_2.ext', ...""" + yield filename + base, ext = os.path.splitext(filename) + for i in itertools.count(1): + yield '%s_%d%s' % (base, i, ext) + + +def _ExtractTestsFromFilters(gtest_filters): + """Returns the list of tests specified by the given filters. + + Returns: + None if the device should be queried for the test list instead. + """ + # - means exclude filter. + for gtest_filter in gtest_filters: + if '-' in gtest_filter: + return None + # Empty means all tests + if not any(gtest_filters): + return None + + if len(gtest_filters) == 1: + patterns = gtest_filters[0].split(':') + # For a single pattern, allow it even if it has a wildcard so long as the + # wildcard comes at the end and there is at least one . to prove the scope + # is not too large. + # This heuristic is not necessarily faster, but normally is. + if len(patterns) == 1 and patterns[0].endswith('*'): + no_suffix = patterns[0].rstrip('*') + if '*' not in no_suffix and '.' in no_suffix: + return patterns + + all_patterns = set(gtest_filters[0].split(':')) + for gtest_filter in gtest_filters: + patterns = gtest_filter.split(':') + for pattern in patterns: + if '*' in pattern: + return None + all_patterns = all_patterns.intersection(set(patterns)) + return list(all_patterns) + + +def _GetDeviceTimeoutMultiplier(): + # Emulated devices typically run 20-150x slower than real-time. + # Give a way to control this through the DEVICE_TIMEOUT_MULTIPLIER + # environment variable. + multiplier = os.getenv("DEVICE_TIMEOUT_MULTIPLIER") + if multiplier: + return int(multiplier) + return 1 + + +def _MergeCoverageFiles(coverage_dir, profdata_dir): + """Merge coverage data files. + + Each instrumentation activity generates a separate profraw data file. This + merges all profraw files in profdata_dir into a single file in + coverage_dir. This happens after each test, rather than waiting until after + all tests are ran to reduce the memory footprint used by all the profraw + files. + + Args: + coverage_dir: The path to the coverage directory. + profdata_dir: The directory where the profraw data file(s) are located. + + Return: + None + """ + # profdata_dir may not exist if pulling coverage files failed. + if not os.path.exists(profdata_dir): + logging.debug('Profraw directory does not exist.') + return + + merge_file = os.path.join(coverage_dir, _MERGE_PROFDATA_FILE_NAME) + profraw_files = [ + os.path.join(profdata_dir, f) for f in os.listdir(profdata_dir) + if f.endswith(_PROFRAW_FILE_EXTENSION) + ] + + try: + logging.debug('Merging target profraw files into merged profraw file.') + subprocess_cmd = [ + _LLVM_PROFDATA_PATH, + 'merge', + '-o', + merge_file, + '-sparse=true', + ] + # Grow the merge file by merging it with itself and the new files. + if os.path.exists(merge_file): + subprocess_cmd.append(merge_file) + subprocess_cmd.extend(profraw_files) + output = subprocess.check_output(subprocess_cmd) + logging.debug('Merge output: %s', output) + except subprocess.CalledProcessError: + # Don't raise error as that will kill the test run. When code coverage + # generates a report, that will raise the error in the report generation. + logging.error( + 'Failed to merge target profdata files to create merged profraw file.') + + # Free up memory space on bot as all data is in the merge file. + for f in profraw_files: + os.remove(f) + + +def _PullCoverageFiles(device, device_coverage_dir, output_dir): + """Pulls coverage files on device to host directory. + + Args: + device: The working device. + device_coverage_dir: The directory to store coverage data on device. + output_dir: The output directory on host. + """ + try: + if not os.path.exists(output_dir): + os.makedirs(output_dir) + device.PullFile(device_coverage_dir, output_dir) + if not os.listdir(os.path.join(output_dir, 'profraw')): + logging.warning('No coverage data was generated for this run') + except (OSError, base_error.BaseError) as e: + logging.warning('Failed to handle coverage data after tests: %s', e) + finally: + device.RemovePath(device_coverage_dir, force=True, recursive=True) + + +def _GetDeviceCoverageDir(device): + """Gets the directory to generate coverage data on device. + + Args: + device: The working device. + + Returns: + The directory path on the device. + """ + return posixpath.join(device.GetExternalStoragePath(), 'chrome', 'test', + 'coverage', 'profraw') + + +def _GetLLVMProfilePath(device_coverage_dir, suite, coverage_index): + """Gets 'LLVM_PROFILE_FILE' environment variable path. + + Dumping data to ONLY 1 file may cause warning and data overwrite in + browsertests, so that pattern "%2m" is used to expand to 2 raw profiles + at runtime. + + Args: + device_coverage_dir: The directory to generate data on device. + suite: Test suite name. + coverage_index: The incremental index for this test suite. + + Returns: + The path pattern for environment variable 'LLVM_PROFILE_FILE'. + """ + return posixpath.join(device_coverage_dir, + '_'.join([suite, + str(coverage_index), '%2m.profraw'])) + + +class _ApkDelegate: + def __init__(self, test_instance, tool): + self._activity = test_instance.activity + self._apk_helper = test_instance.apk_helper + self._test_apk_incremental_install_json = ( + test_instance.test_apk_incremental_install_json) + self._package = test_instance.package + self._runner = test_instance.runner + self._permissions = test_instance.permissions + self._suite = test_instance.suite + self._component = '%s/%s' % (self._package, self._runner) + self._extras = test_instance.extras + self._wait_for_java_debugger = test_instance.wait_for_java_debugger + self._tool = tool + self._coverage_dir = test_instance.coverage_dir + self._coverage_index = 0 + self._use_existing_test_data = test_instance.use_existing_test_data + + def GetTestDataRoot(self, device): + # pylint: disable=no-self-use + return posixpath.join(device.GetExternalStoragePath(), + 'chromium_tests_root') + + def Install(self, device): + if self._use_existing_test_data: + return + if self._test_apk_incremental_install_json: + installer.Install(device, self._test_apk_incremental_install_json, + apk=self._apk_helper, permissions=self._permissions) + else: + device.Install( + self._apk_helper, + allow_downgrade=True, + reinstall=True, + permissions=self._permissions) + + def ResultsDirectory(self, device): # pylint: disable=no-self-use + return device.GetExternalStoragePath() + + def Run(self, test, device, flags=None, **kwargs): + extras = dict(self._extras) + device_api = device.build_version_sdk + + if self._coverage_dir and device_api >= version_codes.LOLLIPOP: + device_coverage_dir = _GetDeviceCoverageDir(device) + extras[_EXTRA_COVERAGE_DEVICE_FILE] = _GetLLVMProfilePath( + device_coverage_dir, self._suite, self._coverage_index) + self._coverage_index += 1 + + if ('timeout' in kwargs + and gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT not in extras): + # Make sure the instrumentation doesn't kill the test before the + # scripts do. The provided timeout value is in seconds, but the + # instrumentation deals with nanoseconds because that's how Android + # handles time. + extras[gtest_test_instance.EXTRA_SHARD_NANO_TIMEOUT] = int( + kwargs['timeout'] * _SECONDS_TO_NANOS) + + command_line_file = _NullContextManager() + if flags: + if len(flags) > _MAX_INLINE_FLAGS_LENGTH: + command_line_file = device_temp_file.DeviceTempFile(device.adb) + device.WriteFile(command_line_file.name, '_ %s' % flags) + extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name + else: + extras[_EXTRA_COMMAND_LINE_FLAGS] = flags + + test_list_file = _NullContextManager() + if test: + if len(test) > 1: + test_list_file = device_temp_file.DeviceTempFile(device.adb) + device.WriteFile(test_list_file.name, '\n'.join(test)) + extras[_EXTRA_TEST_LIST] = test_list_file.name + else: + extras[_EXTRA_TEST] = test[0] + + # We need to use GetAppWritablePath here instead of GetExternalStoragePath + # since we will not have yet applied legacy storage permission workarounds + # on R+. + stdout_file = device_temp_file.DeviceTempFile( + device.adb, dir=device.GetAppWritablePath(), suffix='.gtest_out') + extras[_EXTRA_STDOUT_FILE] = stdout_file.name + + if self._wait_for_java_debugger: + cmd = ['am', 'set-debug-app', '-w', self._package] + device.RunShellCommand(cmd, check_return=True) + logging.warning('*' * 80) + logging.warning('Waiting for debugger to attach to process: %s', + self._package) + logging.warning('*' * 80) + + with command_line_file, test_list_file, stdout_file: + try: + device.StartInstrumentation( + self._component, extras=extras, raw=False, **kwargs) + except device_errors.CommandFailedError: + logging.exception('gtest shard failed.') + except device_errors.CommandTimeoutError: + logging.exception('gtest shard timed out.') + except device_errors.DeviceUnreachableError: + logging.exception('gtest shard device unreachable.') + except Exception: + device.ForceStop(self._package) + raise + finally: + if self._coverage_dir and device_api >= version_codes.LOLLIPOP: + if not os.path.isdir(self._coverage_dir): + os.makedirs(self._coverage_dir) + # TODO(crbug.com/1179004) Use _MergeCoverageFiles when llvm-profdata + # not found is fixed. + _PullCoverageFiles( + device, device_coverage_dir, + os.path.join(self._coverage_dir, str(self._coverage_index))) + + return device.ReadFile(stdout_file.name).splitlines() + + def PullAppFiles(self, device, files, directory): + device_dir = device.GetApplicationDataDirectory(self._package) + host_dir = os.path.join(directory, str(device)) + for f in files: + device_file = posixpath.join(device_dir, f) + host_file = os.path.join(host_dir, *f.split(posixpath.sep)) + for host_file in _GenerateSequentialFileNames(host_file): + if not os.path.exists(host_file): + break + device.PullFile(device_file, host_file) + + def Clear(self, device): + device.ClearApplicationState(self._package, permissions=self._permissions) + + +class _ExeDelegate: + + def __init__(self, tr, test_instance, tool): + self._host_dist_dir = test_instance.exe_dist_dir + self._exe_file_name = os.path.basename( + test_instance.exe_dist_dir)[:-len('__dist')] + self._device_dist_dir = posixpath.join( + constants.TEST_EXECUTABLE_DIR, + os.path.basename(test_instance.exe_dist_dir)) + self._test_run = tr + self._tool = tool + self._suite = test_instance.suite + self._coverage_dir = test_instance.coverage_dir + self._coverage_index = 0 + + def GetTestDataRoot(self, device): + # pylint: disable=no-self-use + # pylint: disable=unused-argument + return posixpath.join(constants.TEST_EXECUTABLE_DIR, 'chromium_tests_root') + + def Install(self, device): + # TODO(jbudorick): Look into merging this with normal data deps pushing if + # executables become supported on nonlocal environments. + device.PushChangedFiles([(self._host_dist_dir, self._device_dist_dir)], + delete_device_stale=True) + + def ResultsDirectory(self, device): + # pylint: disable=no-self-use + # pylint: disable=unused-argument + return constants.TEST_EXECUTABLE_DIR + + def Run(self, test, device, flags=None, **kwargs): + tool = self._test_run.GetTool(device).GetTestWrapper() + if tool: + cmd = [tool] + else: + cmd = [] + cmd.append(posixpath.join(self._device_dist_dir, self._exe_file_name)) + + if test: + cmd.append('--gtest_filter=%s' % ':'.join(test)) + if flags: + # TODO(agrieve): This won't work if multiple flags are passed. + cmd.append(flags) + cwd = constants.TEST_EXECUTABLE_DIR + + env = { + 'LD_LIBRARY_PATH': self._device_dist_dir + } + + if self._coverage_dir: + device_coverage_dir = _GetDeviceCoverageDir(device) + env['LLVM_PROFILE_FILE'] = _GetLLVMProfilePath( + device_coverage_dir, self._suite, self._coverage_index) + self._coverage_index += 1 + + if self._tool != 'asan': + env['UBSAN_OPTIONS'] = constants.UBSAN_OPTIONS + + try: + gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP'] + external = device.GetExternalStoragePath() + env['GCOV_PREFIX'] = '%s/gcov' % external + env['GCOV_PREFIX_STRIP'] = gcov_strip_depth + except (device_errors.CommandFailedError, KeyError): + pass + + # Executable tests return a nonzero exit code on test failure, which is + # fine from the test runner's perspective; thus check_return=False. + output = device.RunShellCommand( + cmd, cwd=cwd, env=env, check_return=False, large_output=True, **kwargs) + + if self._coverage_dir: + _PullCoverageFiles( + device, device_coverage_dir, + os.path.join(self._coverage_dir, str(self._coverage_index))) + + return output + + def PullAppFiles(self, device, files, directory): + pass + + def Clear(self, device): + device.KillAll(self._exe_file_name, + blocking=True, + timeout=30 * _GetDeviceTimeoutMultiplier(), + quiet=True) + + +class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun): + + def __init__(self, env, test_instance): + assert isinstance(env, local_device_environment.LocalDeviceEnvironment) + assert isinstance(test_instance, gtest_test_instance.GtestTestInstance) + super().__init__(env, test_instance) + + if self._test_instance.apk_helper: + self._installed_packages = [ + self._test_instance.apk_helper.GetPackageName() + ] + + if self._test_instance.apk: + self._delegate = _ApkDelegate(self._test_instance, env.tool) + elif self._test_instance.exe_dist_dir: + self._delegate = _ExeDelegate(self, self._test_instance, self._env.tool) + if self._test_instance.isolated_script_test_perf_output: + self._test_perf_output_filenames = _GenerateSequentialFileNames( + self._test_instance.isolated_script_test_perf_output) + else: + self._test_perf_output_filenames = itertools.repeat(None) + self._crashes = set() + self._servers = collections.defaultdict(list) + + #override + def TestPackage(self): + return self._test_instance.suite + + #override + def SetUp(self): + @local_device_environment.handle_shard_failures_with( + on_failure=self._env.DenylistDevice) + @trace_event.traced + def individual_device_set_up(device, host_device_tuples): + def install_apk(dev): + # Install test APK. + self._delegate.Install(dev) + + def push_test_data(dev): + if self._test_instance.use_existing_test_data: + return + # Push data dependencies. + device_root = self._delegate.GetTestDataRoot(dev) + host_device_tuples_substituted = [ + (h, local_device_test_run.SubstituteDeviceRoot(d, device_root)) + for h, d in host_device_tuples] + local_device_environment.place_nomedia_on_device(dev, device_root) + dev.PushChangedFiles( + host_device_tuples_substituted, + delete_device_stale=True, + # Some gtest suites, e.g. unit_tests, have data dependencies that + # can take longer than the default timeout to push. See + # crbug.com/791632 for context. + timeout=600 * math.ceil(_GetDeviceTimeoutMultiplier() / 10)) + if not host_device_tuples: + dev.RemovePath(device_root, force=True, recursive=True, rename=True) + dev.RunShellCommand(['mkdir', '-p', device_root], check_return=True) + + def init_tool_and_start_servers(dev): + tool = self.GetTool(dev) + tool.CopyFiles(dev) + tool.SetupEnvironment() + + if self._env.disable_test_server: + logging.warning('Not starting test server. Some tests may fail.') + return + + try: + # See https://crbug.com/1030827. + # This is a hack that may break in the future. We're relying on the + # fact that adb doesn't use ipv6 for it's server, and so doesn't + # listen on ipv6, but ssh remote forwarding does. 5037 is the port + # number adb uses for its server. + if b"[::1]:5037" in subprocess.check_output( + "ss -o state listening 'sport = 5037'", shell=True): + logging.error( + 'Test Server cannot be started with a remote-forwarded adb ' + 'server. Continuing anyways, but some tests may fail.') + return + except subprocess.CalledProcessError: + pass + + self._servers[str(dev)] = [] + if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER: + self._servers[str(dev)].append( + local_test_server_spawner.LocalTestServerSpawner( + ports.AllocateTestServerPort(), dev, tool)) + + for s in self._servers[str(dev)]: + s.SetUp() + + def bind_crash_handler(step, dev): + return lambda: crash_handler.RetryOnSystemCrash(step, dev) + + steps = [ + bind_crash_handler(s, device) + for s in (install_apk, push_test_data, init_tool_and_start_servers)] + if self._env.concurrent_adb: + reraiser_thread.RunAsync(steps) + else: + for step in steps: + step() + + self._env.parallel_devices.pMap( + individual_device_set_up, + self._test_instance.GetDataDependencies()) + + #override + def _ShouldShardTestsForDevices(self): + """Shard tests across several devices. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ + return True + + #override + def _CreateShardsForDevices(self, tests): + """Create shards of tests to run on devices. + + Args: + tests: List containing tests or test batches. + + Returns: + List of test batches. + """ + # _crashes are tests that might crash and make the tests in the same shard + # following the crashed testcase not run. + # Thus we need to create separate shards for each crashed testcase, + # so that other tests can be run. + device_count = len(self._env.devices) + shards = [] + + # Add shards with only one suspect testcase. + shards += [[crash] for crash in self._crashes if crash in tests] + + # Delete suspect testcase from tests. + tests = [test for test in tests if not test in self._crashes] + + # Sort tests by hash. + # TODO(crbug.com/1257820): Add sorting logic back to _PartitionTests. + tests = self._SortTests(tests) + + max_shard_size = self._test_instance.test_launcher_batch_limit + + shards.extend(self._PartitionTests(tests, device_count, max_shard_size)) + return shards + + #override + def _GetTests(self): + if self._test_instance.extract_test_list_from_filter: + # When the exact list of tests to run is given via command-line (e.g. when + # locally iterating on a specific test), skip querying the device (which + # takes ~3 seconds). + tests = _ExtractTestsFromFilters(self._test_instance.gtest_filters) + if tests: + return tests + + # Even when there's only one device, it still makes sense to retrieve the + # test list so that tests can be split up and run in batches rather than all + # at once (since test output is not streamed). + @local_device_environment.handle_shard_failures_with( + on_failure=self._env.DenylistDevice) + def list_tests(dev): + timeout = 30 * _GetDeviceTimeoutMultiplier() + retries = 1 + if self._test_instance.wait_for_java_debugger: + timeout = None + + flags = [ + f for f in self._test_instance.flags if f not in [ + '--wait-for-debugger', '--wait-for-java-debugger', + '--gtest_also_run_disabled_tests' + ] + ] + flags.append('--gtest_list_tests') + + # TODO(crbug.com/726880): Remove retries when no longer necessary. + for i in range(0, retries+1): + logging.info('flags:') + for f in flags: + logging.info(' %s', f) + + with self._ArchiveLogcat(dev, 'list_tests'): + raw_test_list = crash_handler.RetryOnSystemCrash( + lambda d: self._delegate.Run( + None, d, flags=' '.join(flags), timeout=timeout), + device=dev) + + tests = gtest_test_instance.ParseGTestListTests(raw_test_list) + if not tests: + logging.info('No tests found. Output:') + for l in raw_test_list: + logging.info(' %s', l) + if i < retries: + logging.info('Retrying...') + else: + break + return tests + + # Query all devices in case one fails. + test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None) + + # If all devices failed to list tests, raise an exception. + # Check that tl is not None and is not empty. + if all(not tl for tl in test_lists): + raise device_errors.CommandFailedError( + 'Failed to list tests on any device') + tests = list(sorted(set().union(*[set(tl) for tl in test_lists if tl]))) + tests = self._test_instance.FilterTests(tests) + tests = self._ApplyExternalSharding( + tests, self._test_instance.external_shard_index, + self._test_instance.total_external_shards) + return tests + + #override + def _GroupTests(self, tests): + pre_tests = dict() + other_tests = [] + for test in tests: + test_name_start = max(test.find('.') + 1, 0) + test_name = test[test_name_start:] + if test_name_start == 0 or not test_name.startswith( + _GTEST_PRETEST_PREFIX): + other_tests.append(test) + else: + test_suite = test[:test_name_start - 1] + trim_test = test + trim_tests = [test] + + while test_name.startswith(_GTEST_PRETEST_PREFIX): + test_name = test_name[len(_GTEST_PRETEST_PREFIX):] + trim_test = '%s.%s' % (test_suite, test_name) + trim_tests.append(trim_test) + + if not trim_test in pre_tests or len( + pre_tests[trim_test]) < len(trim_tests): + pre_tests[trim_test] = trim_tests + + all_tests = [] + for other_test in other_tests: + if not other_test in pre_tests: + all_tests.append(other_test) + + # TODO(crbug.com/1257820): Add logic to support grouping tests. + # Once grouping logic is added, switch to 'append' from 'extend'. + for _, test_list in pre_tests.items(): + all_tests.extend(test_list) + + return all_tests + + def _UploadTestArtifacts(self, device, test_artifacts_dir): + # TODO(jbudorick): Reconcile this with the output manager once + # https://codereview.chromium.org/2933993002/ lands. + if test_artifacts_dir: + with tempfile_ext.NamedTemporaryDirectory() as test_artifacts_host_dir: + device.PullFile(test_artifacts_dir.name, test_artifacts_host_dir) + with tempfile_ext.NamedTemporaryDirectory() as temp_zip_dir: + zip_base_name = os.path.join(temp_zip_dir, 'test_artifacts') + test_artifacts_zip = shutil.make_archive( + zip_base_name, 'zip', test_artifacts_host_dir) + link = google_storage_helper.upload( + google_storage_helper.unique_name( + 'test_artifacts', device=device), + test_artifacts_zip, + bucket='%s/test_artifacts' % ( + self._test_instance.gs_test_artifacts_bucket)) + logging.info('Uploading test artifacts to %s.', link) + return link + return None + + def _PullRenderTestOutput(self, device, render_test_output_device_dir): + # We pull the render tests into a temp directory then copy them over + # individually. Otherwise we end up with a temporary directory name + # in the host output directory. + with tempfile_ext.NamedTemporaryDirectory() as tmp_host_dir: + try: + device.PullFile(render_test_output_device_dir, tmp_host_dir) + except device_errors.CommandFailedError: + logging.exception('Failed to pull render test output dir %s', + render_test_output_device_dir) + temp_host_dir = os.path.join( + tmp_host_dir, os.path.basename(render_test_output_device_dir)) + for output_file in os.listdir(temp_host_dir): + src_path = os.path.join(temp_host_dir, output_file) + dst_path = os.path.join(self._test_instance.render_test_output_dir, + output_file) + shutil.move(src_path, dst_path) + + @contextlib.contextmanager + def _ArchiveLogcat(self, device, test): + if isinstance(test, str): + desc = test + else: + desc = hash(tuple(test)) + + stream_name = 'logcat_%s_shard%s_%s_%s' % ( + desc, self._test_instance.external_shard_index, + time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial) + + logcat_file = None + logmon = None + try: + with self._env.output_manager.ArchivedTempfile(stream_name, + 'logcat') as logcat_file: + with logcat_monitor.LogcatMonitor( + device.adb, + filter_specs=local_device_environment.LOGCAT_FILTERS, + output_file=logcat_file.name, + check_error=False) as logmon: + with contextlib_ext.Optional(trace_event.trace(str(test)), + self._env.trace_output): + yield logcat_file + finally: + if logmon: + logmon.Close() + if logcat_file and logcat_file.Link(): + logging.critical('Logcat saved to %s', logcat_file.Link()) + + #override + def _RunTest(self, device, test): + # Run the test. + timeout = (self._test_instance.shard_timeout * + self.GetTool(device).GetTimeoutScale() * + _GetDeviceTimeoutMultiplier()) + if self._test_instance.wait_for_java_debugger: + timeout = None + if self._test_instance.store_tombstones: + tombstones.ClearAllTombstones(device) + test_perf_output_filename = next(self._test_perf_output_filenames) + + if self._test_instance.isolated_script_test_output: + suffix = '.json' + else: + suffix = '.xml' + + with device_temp_file.DeviceTempFile( + adb=device.adb, + dir=self._delegate.ResultsDirectory(device), + suffix=suffix) as device_tmp_results_file: + with contextlib_ext.Optional( + device_temp_file.NamedDeviceTemporaryDirectory( + adb=device.adb, dir='/sdcard/'), + self._test_instance.gs_test_artifacts_bucket) as test_artifacts_dir: + with (contextlib_ext.Optional( + device_temp_file.DeviceTempFile( + adb=device.adb, dir=self._delegate.ResultsDirectory(device)), + test_perf_output_filename)) as isolated_script_test_perf_output: + with contextlib_ext.Optional( + device_temp_file.NamedDeviceTemporaryDirectory(adb=device.adb, + dir='/sdcard/'), + self._test_instance.render_test_output_dir + ) as render_test_output_dir: + + flags = list(self._test_instance.flags) + if self._test_instance.enable_xml_result_parsing: + flags.append('--gtest_output=xml:%s' % + device_tmp_results_file.name) + + if self._test_instance.gs_test_artifacts_bucket: + flags.append('--test_artifacts_dir=%s' % test_artifacts_dir.name) + + if self._test_instance.isolated_script_test_output: + flags.append('--isolated-script-test-output=%s' % + device_tmp_results_file.name) + + if test_perf_output_filename: + flags.append('--isolated_script_test_perf_output=%s' % + isolated_script_test_perf_output.name) + + if self._test_instance.render_test_output_dir: + flags.append('--render-test-output-dir=%s' % + render_test_output_dir.name) + + logging.info('flags:') + for f in flags: + logging.info(' %s', f) + + with self._ArchiveLogcat(device, test) as logcat_file: + output = self._delegate.Run(test, + device, + flags=' '.join(flags), + timeout=timeout, + retries=0) + + if self._test_instance.enable_xml_result_parsing: + try: + gtest_xml = device.ReadFile(device_tmp_results_file.name) + except device_errors.CommandFailedError: + logging.exception('Failed to pull gtest results XML file %s', + device_tmp_results_file.name) + gtest_xml = None + + if self._test_instance.isolated_script_test_output: + try: + gtest_json = device.ReadFile(device_tmp_results_file.name) + except device_errors.CommandFailedError: + logging.exception('Failed to pull gtest results JSON file %s', + device_tmp_results_file.name) + gtest_json = None + + if test_perf_output_filename: + try: + device.PullFile(isolated_script_test_perf_output.name, + test_perf_output_filename) + except device_errors.CommandFailedError: + logging.exception('Failed to pull chartjson results %s', + isolated_script_test_perf_output.name) + + test_artifacts_url = self._UploadTestArtifacts( + device, test_artifacts_dir) + + if render_test_output_dir: + self._PullRenderTestOutput(device, render_test_output_dir.name) + + for s in self._servers[str(device)]: + s.Reset() + if self._test_instance.app_files: + self._delegate.PullAppFiles(device, self._test_instance.app_files, + self._test_instance.app_file_dir) + if not self._env.skip_clear_data: + self._delegate.Clear(device) + + for l in output: + logging.info(l) + + # Parse the output. + # TODO(jbudorick): Transition test scripts away from parsing stdout. + if self._test_instance.enable_xml_result_parsing: + results = gtest_test_instance.ParseGTestXML(gtest_xml) + elif self._test_instance.isolated_script_test_output: + results = gtest_test_instance.ParseGTestJSON(gtest_json) + else: + results = gtest_test_instance.ParseGTestOutput( + output, self._test_instance.symbolizer, device.product_cpu_abi) + + tombstones_url = None + for r in results: + if logcat_file: + r.SetLink('logcat', logcat_file.Link()) + + if self._test_instance.gs_test_artifacts_bucket: + r.SetLink('test_artifacts', test_artifacts_url) + + if r.GetType() == base_test_result.ResultType.CRASH: + self._crashes.add(r.GetName()) + if self._test_instance.store_tombstones: + if not tombstones_url: + resolved_tombstones = tombstones.ResolveTombstones( + device, + resolve_all_tombstones=True, + include_stack_symbols=False, + wipe_tombstones=True) + stream_name = 'tombstones_%s_%s' % ( + time.strftime('%Y%m%dT%H%M%S', time.localtime()), + device.serial) + tombstones_url = logdog_helper.text( + stream_name, '\n'.join(resolved_tombstones)) + r.SetLink('tombstones', tombstones_url) + + tests_stripped_disabled_prefix = set() + for t in test: + tests_stripped_disabled_prefix.add( + gtest_test_instance.TestNameWithoutDisabledPrefix(t)) + not_run_tests = tests_stripped_disabled_prefix.difference( + set(r.GetName() for r in results)) + + if self._test_instance.extract_test_list_from_filter: + # A test string might end with a * in this mode, and so may not match any + # r.GetName() for the set difference. It's possible a filter like foo.* + # can match two tests, ie foo.baz and foo.foo. + # When running it's possible Foo.baz is ran, foo.foo is not, but the test + # list foo.* will not be reran as at least one result matched it. + not_run_tests = { + t + for t in not_run_tests + if not any(fnmatch.fnmatch(r.GetName(), t) for r in results) + } + + return results, list(not_run_tests) if results else None + + #override + def TearDown(self): + # By default, teardown will invoke ADB. When receiving SIGTERM due to a + # timeout, there's a high probability that ADB is non-responsive. In these + # cases, sending an ADB command will potentially take a long time to time + # out. Before this happens, the process will be hard-killed for not + # responding to SIGTERM fast enough. + if self._received_sigterm: + return + + @local_device_environment.handle_shard_failures + @trace_event.traced + def individual_device_tear_down(dev): + for s in self._servers.get(str(dev), []): + s.TearDown() + + tool = self.GetTool(dev) + tool.CleanUpEnvironment() + + self._env.parallel_devices.pMap(individual_device_tear_down) diff --git a/android/pylib/local/device/local_device_gtest_run_test.py b/android/pylib/local/device/local_device_gtest_run_test.py new file mode 100755 index 000000000000..5a485c6b31b2 --- /dev/null +++ b/android/pylib/local/device/local_device_gtest_run_test.py @@ -0,0 +1,118 @@ +#!/usr/bin/env vpython3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests for local_device_gtest_test_run.""" + +# pylint: disable=protected-access + + +import os +import tempfile +import unittest + +from pylib.gtest import gtest_test_instance +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_gtest_run +from py_utils import tempfile_ext + +import mock # pylint: disable=import-error + + +def isSliceInList(s, l): + lenOfSlice = len(s) + return any(s == l[i:lenOfSlice + i] for i in range(len(l) - lenOfSlice + 1)) + + +class LocalDeviceGtestRunTest(unittest.TestCase): + def setUp(self): + self._obj = local_device_gtest_run.LocalDeviceGtestRun( + mock.MagicMock(spec=local_device_environment.LocalDeviceEnvironment), + mock.MagicMock(spec=gtest_test_instance.GtestTestInstance)) + + def testExtractTestsFromFilter(self): + # Checks splitting by colons. + self.assertEqual( + set([ + 'm4e3', + 'p51', + 'b17', + ]), + set(local_device_gtest_run._ExtractTestsFromFilters(['b17:m4e3:p51']))) + # Checks the '-' sign. + self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilters(['-mk2'])) + # Checks the more than one asterick. + self.assertIsNone( + local_device_gtest_run._ExtractTestsFromFilters(['.mk2*:.M67*'])) + # Checks just an asterick without a period + self.assertIsNone(local_device_gtest_run._ExtractTestsFromFilters(['M67*'])) + # Checks an asterick at the end with a period. + self.assertEqual(['.M67*'], + local_device_gtest_run._ExtractTestsFromFilters(['.M67*'])) + # Checks multiple filters intersect + self.assertEqual(['m4e3'], + local_device_gtest_run._ExtractTestsFromFilters( + ['b17:m4e3:p51', 'b17:m4e3', 'm4e3:p51'])) + + def testGetLLVMProfilePath(self): + path = local_device_gtest_run._GetLLVMProfilePath('test_dir', 'sr71', '5') + self.assertEqual(path, os.path.join('test_dir', 'sr71_5_%2m.profraw')) + + @mock.patch('subprocess.check_output') + def testMergeCoverageFiles(self, mock_sub): + with tempfile_ext.NamedTemporaryDirectory() as cov_tempd: + pro_tempd = os.path.join(cov_tempd, 'profraw') + os.mkdir(pro_tempd) + profdata = tempfile.NamedTemporaryFile( + dir=pro_tempd, + delete=False, + suffix=local_device_gtest_run._PROFRAW_FILE_EXTENSION) + local_device_gtest_run._MergeCoverageFiles(cov_tempd, pro_tempd) + # Merged file should be deleted. + self.assertFalse(os.path.exists(profdata.name)) + self.assertTrue(mock_sub.called) + + @mock.patch('pylib.utils.google_storage_helper.upload') + def testUploadTestArtifacts(self, mock_gsh): + link = self._obj._UploadTestArtifacts(mock.MagicMock(), None) + self.assertFalse(mock_gsh.called) + self.assertIsNone(link) + + result = 'A/10/warthog/path' + mock_gsh.return_value = result + with tempfile_ext.NamedTemporaryFile() as temp_f: + link = self._obj._UploadTestArtifacts(mock.MagicMock(), temp_f) + self.assertTrue(mock_gsh.called) + self.assertEqual(result, link) + + def testGroupTests(self): + test = [ + "TestClass1.testcase1", + "TestClass1.otherTestCase", + "TestClass1.PRE_testcase1", + "TestClass1.abc_testcase2", + "TestClass1.PRE_PRE_testcase1", + "TestClass1.PRE_abc_testcase2", + "TestClass1.PRE_PRE_abc_testcase2", + ] + expectedTestcase1 = [ + "TestClass1.PRE_PRE_testcase1", + "TestClass1.PRE_testcase1", + "TestClass1.testcase1", + ] + expectedTestcase2 = [ + "TestClass1.PRE_PRE_abc_testcase2", + "TestClass1.PRE_abc_testcase2", + "TestClass1.abc_testcase2", + ] + expectedOtherTestcase = [ + "TestClass1.otherTestCase", + ] + actualTestCase = self._obj._GroupTests(test) + self.assertTrue(isSliceInList(expectedTestcase1, actualTestCase)) + self.assertTrue(isSliceInList(expectedTestcase2, actualTestCase)) + self.assertTrue(isSliceInList(expectedOtherTestcase, actualTestCase)) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/android/pylib/local/device/local_device_instrumentation_test_run.py b/android/pylib/local/device/local_device_instrumentation_test_run.py new file mode 100644 index 000000000000..f479007a292f --- /dev/null +++ b/android/pylib/local/device/local_device_instrumentation_test_run.py @@ -0,0 +1,1718 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import collections +import contextlib +import copy +import hashlib +import json +import logging +import os +import posixpath +import re +import shutil +import sys +import tempfile +import time + +from six.moves import range # pylint: disable=redefined-builtin +from six.moves import zip # pylint: disable=redefined-builtin +from devil import base_error +from devil.android import apk_helper +from devil.android import crash_handler +from devil.android import device_errors +from devil.android import device_temp_file +from devil.android import flag_changer +from devil.android.sdk import shared_prefs +from devil.android.sdk import version_codes +from devil.android import logcat_monitor +from devil.android.tools import system_app +from devil.android.tools import webview_app +from devil.utils import reraiser_thread +from incremental_install import installer +from pylib import constants +from pylib import valgrind_tools +from pylib.base import base_test_result +from pylib.base import output_manager +from pylib.constants import host_paths +from pylib.instrumentation import instrumentation_test_instance +from pylib.local.device import local_device_environment +from pylib.local.device import local_device_test_run +from pylib.output import remote_output_manager +from pylib.utils import chrome_proxy_utils +from pylib.utils import gold_utils +from pylib.utils import instrumentation_tracing +from pylib.utils import shared_preference_utils +from py_trace_event import trace_event +from py_trace_event import trace_time +from py_utils import contextlib_ext +from py_utils import tempfile_ext +import tombstones + +with host_paths.SysPath( + os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'), 0): + import jinja2 # pylint: disable=import-error + import markupsafe # pylint: disable=import-error,unused-import + + +_JINJA_TEMPLATE_DIR = os.path.join( + host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'instrumentation') +_JINJA_TEMPLATE_FILENAME = 'render_test.html.jinja' + +_WPR_GO_LINUX_X86_64_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'third_party', 'webpagereplay', 'bin', + 'linux', 'x86_64', 'wpr') + +_TAG = 'test_runner_py' + +TIMEOUT_ANNOTATIONS = [ + ('Manual', 10 * 60 * 60), + ('IntegrationTest', 10 * 60), + ('External', 10 * 60), + ('EnormousTest', 5 * 60), + ('LargeTest', 2 * 60), + ('MediumTest', 30), + ('SmallTest', 10), +] + +# Account for Instrumentation and process init overhead. +FIXED_TEST_TIMEOUT_OVERHEAD = 60 + +# 30 minute max timeout for an instrumentation invocation to avoid shard +# timeouts when tests never finish. The shard timeout is currently 60 minutes, +# so this needs to be less than that. +MAX_BATCH_TEST_TIMEOUT = 30 * 60 + +LOGCAT_FILTERS = ['*:e', 'chromium:v', 'cr_*:v', 'DEBUG:I', + 'StrictMode:D', '%s:I' % _TAG] + +EXTRA_SCREENSHOT_FILE = ( + 'org.chromium.base.test.ScreenshotOnFailureStatement.ScreenshotFile') + +EXTRA_UI_CAPTURE_DIR = ( + 'org.chromium.base.test.util.Screenshooter.ScreenshotDir') + +EXTRA_TRACE_FILE = ('org.chromium.base.test.BaseJUnit4ClassRunner.TraceFile') + +_EXTRA_TEST_LIST = ( + 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.TestList') + +_EXTRA_TEST_IS_UNIT = ( + 'org.chromium.base.test.BaseChromiumAndroidJUnitRunner.IsUnitTest') + +_EXTRA_PACKAGE_UNDER_TEST = ('org.chromium.chrome.test.pagecontroller.rules.' + 'ChromeUiApplicationTestRule.PackageUnderTest') + +FEATURE_ANNOTATION = 'Feature' +RENDER_TEST_FEATURE_ANNOTATION = 'RenderTest' +WPR_ARCHIVE_FILE_PATH_ANNOTATION = 'WPRArchiveDirectory' +WPR_ARCHIVE_NAME_ANNOTATION = 'WPRArchiveDirectory$ArchiveName' +WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION = 'WPRRecordReplayTest' + +_DEVICE_GOLD_DIR = 'skia_gold' +# A map of Android product models to SDK ints. +RENDER_TEST_MODEL_SDK_CONFIGS = { + # Android x86 emulator. + 'Android SDK built for x86': [23, 24], + # We would like this to be supported, but it is currently too prone to + # introducing flakiness due to a combination of Gold and Chromium issues. + # See crbug.com/1233700 and skbug.com/12149 for more information. + # 'Pixel 2': [28], +} + +_BATCH_SUFFIX = '_batch' +# If the batch is too big it starts to fail for command line length reasons. +_LOCAL_TEST_BATCH_MAX_GROUP_SIZE = 200 + + +@contextlib.contextmanager +def _LogTestEndpoints(device, test_name): + device.RunShellCommand( + ['log', '-p', 'i', '-t', _TAG, 'START %s' % test_name], + check_return=True) + try: + yield + finally: + device.RunShellCommand( + ['log', '-p', 'i', '-t', _TAG, 'END %s' % test_name], + check_return=True) + + +@contextlib.contextmanager +def _VoiceInteractionService(device, use_voice_interaction_service): + def set_voice_interaction_service(service): + device.RunShellCommand( + ['settings', 'put', 'secure', 'voice_interaction_service', service]) + + default_voice_interaction_service = None + try: + default_voice_interaction_service = device.RunShellCommand( + ['settings', 'get', 'secure', 'voice_interaction_service'], + single_line=True) + + set_voice_interaction_service(use_voice_interaction_service) + yield + finally: + set_voice_interaction_service(default_voice_interaction_service) + + +def DismissCrashDialogs(device): + # Dismiss any error dialogs. Limit the number in case we have an error + # loop or we are failing to dismiss. + packages = set() + try: + for _ in range(10): + package = device.DismissCrashDialogIfNeeded(timeout=10, retries=1) + if not package: + break + packages.add(package) + except device_errors.CommandFailedError: + logging.exception('Error while attempting to dismiss crash dialog.') + return packages + + +_CURRENT_FOCUS_CRASH_RE = re.compile( + r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}') + + +def _GetTargetPackageName(test_apk): + # apk_under_test does not work for smoke tests, where it is set to an + # apk that is not listed as the targetPackage in the test apk's manifest. + return test_apk.GetAllInstrumentations()[0]['android:targetPackage'] + + +class LocalDeviceInstrumentationTestRun( + local_device_test_run.LocalDeviceTestRun): + def __init__(self, env, test_instance): + super().__init__(env, test_instance) + self._chrome_proxy = None + self._context_managers = collections.defaultdict(list) + self._flag_changers = {} + self._render_tests_device_output_dir = None + self._shared_prefs_to_restore = [] + self._skia_gold_session_manager = None + self._skia_gold_work_dir = None + self._target_package = _GetTargetPackageName(test_instance.test_apk) + + #override + def TestPackage(self): + return self._test_instance.suite + + def _GetDataStorageRootDirectory(self, device): + if self._test_instance.store_data_in_app_directory: + # TODO(rmhasan): Add check to makes sure api level > 27. Selinux + # policy on Oreo does not allow app to read files from app data dir + # that were not put there by the app. + return device.GetApplicationDataDirectory(self._target_package) + return device.GetExternalStoragePath() + + #override + def SetUp(self): + + @local_device_environment.handle_shard_failures_with( + self._env.DenylistDevice) + @trace_event.traced + def individual_device_set_up(device, host_device_tuples): + steps = [] + + if self._test_instance.replace_system_package: + @trace_event.traced + def replace_package(dev): + # We need the context manager to be applied before modifying any + # shared preference files in case the replacement APK needs to be + # set up, and it needs to be applied while the test is running. + # Thus, it needs to be applied early during setup, but must still be + # applied during _RunTest, which isn't possible using 'with' without + # applying the context manager up in test_runner. Instead, we + # manually invoke its __enter__ and __exit__ methods in setup and + # teardown. + system_app_context = system_app.ReplaceSystemApp( + dev, self._test_instance.replace_system_package) + # Pylint is not smart enough to realize that this field has + # an __enter__ method, and will complain loudly. + # pylint: disable=no-member + system_app_context.__enter__() + # pylint: enable=no-member + self._context_managers[str(dev)].append(system_app_context) + + steps.append(replace_package) + + if self._test_instance.system_packages_to_remove: + + @trace_event.traced + def remove_packages(dev): + logging.info('Attempting to remove system packages %s', + self._test_instance.system_packages_to_remove) + system_app.RemoveSystemApps( + dev, self._test_instance.system_packages_to_remove) + logging.info('Done removing system packages') + + # This should be at the front in case we're removing the package to make + # room for another APK installation later on. Since we disallow + # concurrent adb with this option specified, this should be safe. + steps.insert(0, remove_packages) + + def install_helper(apk, + modules=None, + fake_modules=None, + permissions=None, + additional_locales=None, + instant_app=False): + + @instrumentation_tracing.no_tracing + @trace_event.traced + def install_helper_internal(d, apk_path=None): + # pylint: disable=unused-argument + d.Install( + apk, + modules=modules, + fake_modules=fake_modules, + permissions=permissions, + additional_locales=additional_locales, + instant_app=instant_app, + force_queryable=self._test_instance.IsApkForceQueryable(apk)) + + return install_helper_internal + + def install_apex_helper(apex): + @instrumentation_tracing.no_tracing + @trace_event.traced + def install_helper_internal(d, apk_path=None): + # pylint: disable=unused-argument + d.InstallApex(apex) + + return install_helper_internal + + def incremental_install_helper(apk, json_path, permissions): + + @trace_event.traced + def incremental_install_helper_internal(d, apk_path=None): + # pylint: disable=unused-argument + installer.Install(d, json_path, apk=apk, permissions=permissions) + + return incremental_install_helper_internal + + steps.extend( + install_apex_helper(apex) + for apex in self._test_instance.additional_apexs) + + steps.extend( + install_helper(apk, instant_app=self._test_instance.IsApkInstant(apk)) + for apk in self._test_instance.additional_apks) + + permissions = self._test_instance.test_apk.GetPermissions() + if self._test_instance.test_apk_incremental_install_json: + if self._test_instance.test_apk_as_instant: + raise Exception('Test APK cannot be installed as an instant ' + 'app if it is incremental') + + steps.append( + incremental_install_helper( + self._test_instance.test_apk, + self._test_instance.test_apk_incremental_install_json, + permissions)) + else: + steps.append( + install_helper(self._test_instance.test_apk, + permissions=permissions, + instant_app=self._test_instance.test_apk_as_instant)) + + # We'll potentially need the package names later for setting app + # compatibility workarounds. + for apk in (self._test_instance.additional_apks + + [self._test_instance.test_apk]): + self._installed_packages.append(apk_helper.GetPackageName(apk)) + + if self._test_instance.use_webview_provider: + + @trace_event.traced + def use_webview_provider(dev): + # We need the context manager to be applied before modifying any + # shared preference files in case the replacement APK needs to be + # set up, and it needs to be applied while the test is running. + # Thus, it needs to be applied early during setup, but must still be + # applied during _RunTest, which isn't possible using 'with' without + # applying the context manager up in test_runner. Instead, we + # manually invoke its __enter__ and __exit__ methods in setup and + # teardown. + # We do this after installing additional APKs so that + # we can install trichrome library before installing the webview + # provider + webview_context = webview_app.UseWebViewProvider( + dev, self._test_instance.use_webview_provider) + # Pylint is not smart enough to realize that this field has + # an __enter__ method, and will complain loudly. + # pylint: disable=no-member + webview_context.__enter__() + # pylint: enable=no-member + self._context_managers[str(dev)].append(webview_context) + + steps.append(use_webview_provider) + + if self._test_instance.use_voice_interaction_service: + + @trace_event.traced + def use_voice_interaction_service(device): + voice_interaction_service_context = _VoiceInteractionService( + device, self._test_instance.use_voice_interaction_service) + # Pylint is not smart enough to realize that this field has + # an __enter__ method, and will complain loudly. + # pylint: disable=no-member + voice_interaction_service_context.__enter__() + # pylint: enable=no-member + self._context_managers[str(device)].append( + voice_interaction_service_context) + + steps.append(use_voice_interaction_service) + + # The apk under test needs to be installed last since installing other + # apks after will unintentionally clear the fake module directory. + # TODO(wnwen): Make this more robust, fix crbug.com/1010954. + if self._test_instance.apk_under_test: + self._installed_packages.append( + apk_helper.GetPackageName(self._test_instance.apk_under_test)) + permissions = self._test_instance.apk_under_test.GetPermissions() + if self._test_instance.apk_under_test_incremental_install_json: + steps.append( + incremental_install_helper( + self._test_instance.apk_under_test, + self._test_instance.apk_under_test_incremental_install_json, + permissions)) + else: + steps.append( + install_helper(self._test_instance.apk_under_test, + self._test_instance.modules, + self._test_instance.fake_modules, permissions, + self._test_instance.additional_locales)) + + # Execute any custom setup shell commands + if self._test_instance.run_setup_commands: + + @trace_event.traced + def run_setup_commands(dev): + for cmd in self._test_instance.run_setup_commands: + logging.info('Running custom setup shell command: %s', cmd) + dev.RunShellCommand(cmd, shell=True, check_return=True) + + steps.append(run_setup_commands) + + @trace_event.traced + def set_debug_app(dev): + # Set debug app in order to enable reading command line flags on user + # builds + cmd = ['am', 'set-debug-app', '--persistent'] + if self._test_instance.wait_for_java_debugger: + cmd.append('-w') + cmd.append(self._target_package) + dev.RunShellCommand(cmd, check_return=True) + + @trace_event.traced + def edit_shared_prefs(dev): + for setting in self._test_instance.edit_shared_prefs: + shared_pref = shared_prefs.SharedPrefs( + dev, setting['package'], setting['filename'], + use_encrypted_path=setting.get('supports_encrypted_path', False)) + pref_to_restore = copy.copy(shared_pref) + pref_to_restore.Load() + self._shared_prefs_to_restore.append(pref_to_restore) + + shared_preference_utils.ApplySharedPreferenceSetting( + shared_pref, setting) + + @trace_event.traced + def approve_app_links(dev): + self._ToggleAppLinks(dev, 'STATE_APPROVED') + + @trace_event.traced + def set_vega_permissions(dev): + # Normally, installation of VrCore automatically grants storage + # permissions. However, since VrCore is part of the system image on + # the Vega standalone headset, we don't install the APK as part of test + # setup. Instead, grant the permissions here so that it can take + # screenshots. + if dev.product_name == 'vega': + dev.GrantPermissions('com.google.vr.vrcore', [ + 'android.permission.WRITE_EXTERNAL_STORAGE', + 'android.permission.READ_EXTERNAL_STORAGE' + ]) + + @instrumentation_tracing.no_tracing + def push_test_data(dev): + test_data_root_dir = posixpath.join( + self._GetDataStorageRootDirectory(dev), 'chromium_tests_root') + host_device_tuples_substituted = [ + (h, + local_device_test_run.SubstituteDeviceRoot(d, test_data_root_dir)) + for h, d in host_device_tuples + ] + logging.info('Pushing data dependencies.') + for h, d in host_device_tuples_substituted: + logging.debug(' %r -> %r', h, d) + + as_root = self._test_instance.store_data_in_app_directory + local_device_environment.place_nomedia_on_device(dev, + test_data_root_dir, + as_root=as_root) + dev.PushChangedFiles(host_device_tuples_substituted, + delete_device_stale=True, + as_root=as_root) + + if not host_device_tuples_substituted: + dev.RunShellCommand(['rm', '-rf', test_data_root_dir], + check_return=True, + as_root=as_root) + dev.RunShellCommand(['mkdir', '-p', test_data_root_dir], + check_return=True, + as_root=as_root) + + @trace_event.traced + def create_flag_changer(dev): + if self._test_instance.flags: + self._CreateFlagChangerIfNeeded(dev) + logging.debug('Attempting to set flags: %r', + self._test_instance.flags) + self._flag_changers[str(dev)].AddFlags(self._test_instance.flags) + + valgrind_tools.SetChromeTimeoutScale( + dev, self._test_instance.timeout_scale) + + steps += [ + set_debug_app, edit_shared_prefs, approve_app_links, push_test_data, + create_flag_changer, set_vega_permissions, DismissCrashDialogs + ] + + def bind_crash_handler(step, dev): + return lambda: crash_handler.RetryOnSystemCrash(step, dev) + + steps = [bind_crash_handler(s, device) for s in steps] + + try: + if self._env.concurrent_adb: + reraiser_thread.RunAsync(steps) + else: + for step in steps: + step() + if self._test_instance.store_tombstones: + tombstones.ClearAllTombstones(device) + except device_errors.CommandFailedError: + if not device.IsOnline(): + raise + + # A bugreport can be large and take a while to generate, so only capture + # one if we're using a remote manager. + if isinstance( + self._env.output_manager, + remote_output_manager.RemoteOutputManager): + logging.error( + 'Error when setting up device for tests. Taking a bugreport for ' + 'investigation. This may take a while...') + report_name = '%s.bugreport' % device.serial + with self._env.output_manager.ArchivedTempfile( + report_name, 'bug_reports') as report_file: + device.TakeBugReport(report_file.name) + logging.error('Bug report saved to %s', report_file.Link()) + raise + + self._env.parallel_devices.pMap( + individual_device_set_up, + self._test_instance.GetDataDependencies()) + # Created here instead of on a per-test basis so that the downloaded + # expectations can be re-used between tests, saving a significant amount + # of time. + self._skia_gold_work_dir = tempfile.mkdtemp() + self._skia_gold_session_manager = gold_utils.AndroidSkiaGoldSessionManager( + self._skia_gold_work_dir, self._test_instance.skia_gold_properties) + if self._test_instance.wait_for_java_debugger: + logging.warning('*' * 80) + logging.warning('Waiting for debugger to attach to process: %s', + self._target_package) + logging.warning('*' * 80) + + #override + def TearDown(self): + shutil.rmtree(self._skia_gold_work_dir) + self._skia_gold_work_dir = None + self._skia_gold_session_manager = None + # By default, teardown will invoke ADB. When receiving SIGTERM due to a + # timeout, there's a high probability that ADB is non-responsive. In these + # cases, sending an ADB command will potentially take a long time to time + # out. Before this happens, the process will be hard-killed for not + # responding to SIGTERM fast enough. + if self._received_sigterm: + return + + @local_device_environment.handle_shard_failures_with( + self._env.DenylistDevice) + @trace_event.traced + def individual_device_tear_down(dev): + if str(dev) in self._flag_changers: + self._flag_changers[str(dev)].Restore() + + # Remove package-specific configuration + dev.RunShellCommand(['am', 'clear-debug-app'], check_return=True) + + # Execute any custom teardown shell commands + for cmd in self._test_instance.run_teardown_commands: + logging.info('Running custom teardown shell command: %s', cmd) + dev.RunShellCommand(cmd, shell=True, check_return=True) + + valgrind_tools.SetChromeTimeoutScale(dev, None) + + # Restore any shared preference files that we stored during setup. + # This should be run sometime before the replace package contextmanager + # gets exited so we don't have to special case restoring files of + # replaced system apps. + for pref_to_restore in self._shared_prefs_to_restore: + pref_to_restore.Commit(force_commit=True) + + # If we've force approved app links for a package, undo that now. + self._ToggleAppLinks(dev, 'STATE_NO_RESPONSE') + + # Context manager exit handlers are applied in reverse order + # of the enter handlers. + for context in reversed(self._context_managers[str(dev)]): + # See pylint-related comment above with __enter__() + # pylint: disable=no-member + context.__exit__(*sys.exc_info()) + # pylint: enable=no-member + + self._env.parallel_devices.pMap(individual_device_tear_down) + + def _ToggleAppLinks(self, dev, state): + # The set-app-links command was added in Android 12 (sdk = 31). The + # restrictions that require us to set the app links were also added in + # Android 12, so doing nothing on earlier Android versions is fine. + if dev.build_version_sdk < version_codes.S: + return + + package = self._test_instance.approve_app_links_package + domain = self._test_instance.approve_app_links_domain + + if not package or not domain: + return + + cmd = [ + 'pm', 'set-app-links', '--package', package, state, domain + ] + dev.RunShellCommand(cmd, check_return=True) + + def _CreateFlagChangerIfNeeded(self, device): + if str(device) not in self._flag_changers: + cmdline_file = 'test-cmdline-file' + if self._test_instance.use_apk_under_test_flags_file: + if self._test_instance.package_info: + cmdline_file = self._test_instance.package_info.cmdline_file + else: + raise Exception('No PackageInfo found but' + '--use-apk-under-test-flags-file is specified.') + self._flag_changers[str(device)] = flag_changer.FlagChanger( + device, cmdline_file) + + #override + def _CreateShardsForDevices(self, tests): + """Create shards of tests to run on devices. + + Args: + tests: List containing tests or test batches. + + Returns: + List of tests or batches. + """ + # Each test or test batch will be a single shard. + return tests + + #override + def _GetTests(self): + if self._test_instance.junit4_runner_supports_listing: + raw_tests = self._GetTestsFromRunner() + tests = self._test_instance.ProcessRawTests(raw_tests) + else: + tests = self._test_instance.GetTests() + tests = self._ApplyExternalSharding( + tests, self._test_instance.external_shard_index, + self._test_instance.total_external_shards) + return tests + + #override + def GetTestsForListing(self): + # Parent class implementation assumes _GetTests() returns strings rather + # than dicts. + test_dicts = self._GetTests() + test_dicts = local_device_test_run.FlattenTestList(test_dicts) + return sorted('{}#{}'.format(d['class'], d['method']) for d in test_dicts) + + #override + def _GroupTests(self, tests): + batched_tests = dict() + other_tests = [] + for test in tests: + annotations = test['annotations'] + if 'Batch' in annotations and 'RequiresRestart' not in annotations: + batch_name = annotations['Batch']['value'] + if not batch_name: + batch_name = test['class'] + + # Feature flags won't work in instrumentation tests unless the activity + # is restarted. + # Tests with identical features are grouped to minimize restarts. + # UnitTests that specify flags always use Features.JUnitProcessor, so + # they don't need to be split. + if batch_name != 'UnitTests': + if 'Features$EnableFeatures' in annotations: + batch_name += '|enabled:' + ','.join( + sorted(annotations['Features$EnableFeatures']['value'])) + if 'Features$DisableFeatures' in annotations: + batch_name += '|disabled:' + ','.join( + sorted(annotations['Features$DisableFeatures']['value'])) + if 'CommandLineFlags$Add' in annotations: + batch_name += '|cmd_line_add:' + ','.join( + sorted(annotations['CommandLineFlags$Add']['value'])) + if 'CommandLineFlags$Remove' in annotations: + batch_name += '|cmd_line_remove:' + ','.join( + sorted(annotations['CommandLineFlags$Remove']['value'])) + + batched_tests.setdefault(batch_name, []).append(test) + else: + other_tests.append(test) + + def dict2list(d): + if isinstance(d, dict): + return sorted([(k, dict2list(v)) for k, v in d.items()]) + if isinstance(d, list): + return [dict2list(v) for v in d] + if isinstance(d, tuple): + return tuple(dict2list(v) for v in d) + return d + + test_count = sum( + [len(test) - 1 for test in tests if self._CountTestsIndividually(test)]) + test_count += len(tests) + if self._test_instance.total_external_shards > 1: + # Calculate suitable test batch max group size based on average partition + # size. The batch size should be below partition size to balance between + # shards. Choose to divide by 3 as it works fine with most of test suite + # without increasing too much setup/teardown time for batch tests. + test_batch_max_group_size = \ + max(1, test_count // self._test_instance.total_external_shards // 3) + else: + test_batch_max_group_size = _LOCAL_TEST_BATCH_MAX_GROUP_SIZE + + all_tests = [] + for _, btests in list(batched_tests.items()): + # Ensure a consistent ordering across external shards. + btests.sort(key=dict2list) + all_tests.extend([ + btests[i:i + test_batch_max_group_size] + for i in range(0, len(btests), test_batch_max_group_size) + ]) + all_tests.extend(other_tests) + # Sort all tests by hash. + # TODO(crbug.com/1257820): Add sorting logic back to _PartitionTests. + return self._SortTests(all_tests) + + #override + def _GetUniqueTestName(self, test): + return instrumentation_test_instance.GetUniqueTestName(test) + + #override + def _RunTest(self, device, test): + extras = {} + + if self._test_instance.is_unit_test: + extras[_EXTRA_TEST_IS_UNIT] = 'true' + + # Provide package name under test for apk_under_test. + if self._test_instance.apk_under_test: + package_name = self._test_instance.apk_under_test.GetPackageName() + extras[_EXTRA_PACKAGE_UNDER_TEST] = package_name + + flags_to_add = [] + test_timeout_scale = None + if self._test_instance.coverage_directory: + coverage_basename = '%s' % ('%s_%s_group' % + (test[0]['class'], test[0]['method']) + if isinstance(test, list) else '%s_%s' % + (test['class'], test['method'])) + extras['coverage'] = 'true' + coverage_directory = os.path.join( + device.GetExternalStoragePath(), 'chrome', 'test', 'coverage') + if not device.PathExists(coverage_directory): + device.RunShellCommand(['mkdir', '-p', coverage_directory], + check_return=True) + coverage_device_file = os.path.join(coverage_directory, coverage_basename) + coverage_device_file += '.exec' + extras['coverageFile'] = coverage_device_file + + if self._test_instance.enable_breakpad_dump: + # Use external storage directory so that the breakpad dump can be accessed + # by the test APK in addition to the apk_under_test. + breakpad_dump_directory = os.path.join(device.GetExternalStoragePath(), + 'chromium_dumps') + if device.PathExists(breakpad_dump_directory): + device.RemovePath(breakpad_dump_directory, recursive=True) + flags_to_add.append('--breakpad-dump-location=' + breakpad_dump_directory) + + # Save screenshot if screenshot dir is specified (save locally) or if + # a GS bucket is passed (save in cloud). + screenshot_device_file = device_temp_file.DeviceTempFile( + device.adb, suffix='.png', dir=device.GetExternalStoragePath()) + extras[EXTRA_SCREENSHOT_FILE] = screenshot_device_file.name + + # Set up the screenshot directory. This needs to be done for each test so + # that we only get screenshots created by that test. It has to be on + # external storage since the default location doesn't allow file creation + # from the instrumentation test app on Android L and M. + ui_capture_dir = device_temp_file.NamedDeviceTemporaryDirectory( + device.adb, + dir=device.GetExternalStoragePath()) + extras[EXTRA_UI_CAPTURE_DIR] = ui_capture_dir.name + + if self._env.trace_output: + trace_device_file = device_temp_file.DeviceTempFile( + device.adb, suffix='.json', dir=device.GetExternalStoragePath()) + extras[EXTRA_TRACE_FILE] = trace_device_file.name + + target = '%s/%s' % (self._test_instance.test_package, + self._test_instance.junit4_runner_class) + if isinstance(test, list): + + def name_and_timeout(t): + n = instrumentation_test_instance.GetTestName(t) + i = self._GetTimeoutFromAnnotations(t['annotations'], n) + return (n, i) + + test_names, timeouts = list(zip(*(name_and_timeout(t) for t in test))) + + test_name = instrumentation_test_instance.GetTestName( + test[0]) + _BATCH_SUFFIX + extras['class'] = ','.join(test_names) + test_display_name = test_name + timeout = min(MAX_BATCH_TEST_TIMEOUT, + FIXED_TEST_TIMEOUT_OVERHEAD + sum(timeouts)) + else: + assert test['is_junit4'] + test_name = instrumentation_test_instance.GetTestName(test) + test_display_name = self._GetUniqueTestName(test) + + extras['class'] = test_name + if 'flags' in test and test['flags']: + flags_to_add.extend(test['flags']) + timeout = FIXED_TEST_TIMEOUT_OVERHEAD + self._GetTimeoutFromAnnotations( + test['annotations'], test_display_name) + + test_timeout_scale = self._GetTimeoutScaleFromAnnotations( + test['annotations']) + if test_timeout_scale and test_timeout_scale != 1: + valgrind_tools.SetChromeTimeoutScale( + device, test_timeout_scale * self._test_instance.timeout_scale) + + if self._test_instance.wait_for_java_debugger: + timeout = None + logging.info('preparing to run %s: %s', test_display_name, test) + + if _IsRenderTest(test): + # TODO(mikecase): Add DeviceTempDirectory class and use that instead. + self._render_tests_device_output_dir = posixpath.join( + device.GetExternalStoragePath(), 'render_test_output_dir') + flags_to_add.append('--render-test-output-dir=%s' % + self._render_tests_device_output_dir) + + if _IsWPRRecordReplayTest(test): + wpr_archive_relative_path = _GetWPRArchivePath(test) + if not wpr_archive_relative_path: + raise RuntimeError('Could not find the WPR archive file path ' + 'from annotation.') + wpr_archive_path = os.path.join(host_paths.DIR_SOURCE_ROOT, + wpr_archive_relative_path) + if not os.path.isdir(wpr_archive_path): + raise RuntimeError('WPRArchiveDirectory annotation should point ' + 'to a directory only. ' + '{0} exist: {1}'.format( + wpr_archive_path, + os.path.exists(wpr_archive_path))) + + file_name = _GetWPRArchiveFileName( + test) or self._GetUniqueTestName(test) + '.wprgo' + + # Some linux version does not like # in the name. Replaces it with __. + archive_path = os.path.join(wpr_archive_path, + _ReplaceUncommonChars(file_name)) + + if not os.path.exists(_WPR_GO_LINUX_X86_64_PATH): + # If we got to this stage, then we should have + # checkout_android set. + raise RuntimeError( + 'WPR Go binary not found at {}'.format(_WPR_GO_LINUX_X86_64_PATH)) + # Tells the server to use the binaries retrieved from CIPD. + chrome_proxy_utils.ChromeProxySession.SetWPRServerBinary( + _WPR_GO_LINUX_X86_64_PATH) + self._chrome_proxy = chrome_proxy_utils.ChromeProxySession() + self._chrome_proxy.wpr_record_mode = self._test_instance.wpr_record_mode + self._chrome_proxy.Start(device, archive_path) + flags_to_add.extend(self._chrome_proxy.GetFlags()) + + if flags_to_add: + self._CreateFlagChangerIfNeeded(device) + self._flag_changers[str(device)].PushFlags(add=flags_to_add) + + if self._test_instance.store_data_in_app_directory: + extras.update({'fetchTestDataFromAppDataDir': 'true'}) + + time_ms = lambda: int(time.time() * 1e3) + start_ms = time_ms() + + with ui_capture_dir: + with self._ArchiveLogcat(device, test_name) as logcat_file: + output = device.StartInstrumentation( + target, raw=True, extras=extras, timeout=timeout, retries=0) + + duration_ms = time_ms() - start_ms + + with contextlib_ext.Optional( + trace_event.trace('ProcessResults'), + self._env.trace_output): + output = self._test_instance.MaybeDeobfuscateLines(output) + # TODO(jbudorick): Make instrumentation tests output a JSON so this + # doesn't have to parse the output. + result_code, result_bundle, statuses = ( + self._test_instance.ParseAmInstrumentRawOutput(output)) + results = self._test_instance.GenerateTestResults( + result_code, result_bundle, statuses, duration_ms, + device.product_cpu_abi, self._test_instance.symbolizer) + + if self._env.trace_output: + self._SaveTraceData(trace_device_file, device, test['class']) + + + def restore_flags(): + if flags_to_add: + self._flag_changers[str(device)].Restore() + + def restore_timeout_scale(): + if test_timeout_scale: + valgrind_tools.SetChromeTimeoutScale( + device, self._test_instance.timeout_scale) + + def handle_coverage_data(): + if self._test_instance.coverage_directory: + try: + if not os.path.exists(self._test_instance.coverage_directory): + os.makedirs(self._test_instance.coverage_directory) + # Retries add time to test execution. + if device.PathExists(coverage_device_file, retries=0): + device.PullFile(coverage_device_file, + self._test_instance.coverage_directory) + device.RemovePath(coverage_device_file, True) + else: + logging.warning('Coverage file does not exist: %s', + coverage_device_file) + except (OSError, base_error.BaseError) as e: + logging.warning('Failed to handle coverage data after tests: %s', e) + + def handle_render_test_data(): + if _IsRenderTest(test): + # Render tests do not cause test failure by default. So we have to + # check to see if any failure images were generated even if the test + # does not fail. + try: + self._ProcessRenderTestResults(device, results) + finally: + device.RemovePath(self._render_tests_device_output_dir, + recursive=True, + force=True) + self._render_tests_device_output_dir = None + + def pull_ui_screen_captures(): + screenshots = [] + for filename in device.ListDirectory(ui_capture_dir.name): + if filename.endswith('.json'): + screenshots.append(pull_ui_screenshot(filename)) + if screenshots: + json_archive_name = 'ui_capture_%s_%s.json' % ( + test_name.replace('#', '.'), + time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime())) + with self._env.output_manager.ArchivedTempfile( + json_archive_name, 'ui_capture', output_manager.Datatype.JSON + ) as json_archive: + json.dump(screenshots, json_archive) + _SetLinkOnResults(results, test_name, 'ui screenshot', + json_archive.Link()) + + def pull_ui_screenshot(filename): + source_dir = ui_capture_dir.name + json_path = posixpath.join(source_dir, filename) + json_data = json.loads(device.ReadFile(json_path)) + image_file_path = posixpath.join(source_dir, json_data['location']) + with self._env.output_manager.ArchivedTempfile( + json_data['location'], 'ui_capture', output_manager.Datatype.PNG + ) as image_archive: + device.PullFile(image_file_path, image_archive.name) + json_data['image_link'] = image_archive.Link() + return json_data + + def stop_chrome_proxy(): + # Removes the port forwarding + if self._chrome_proxy: + self._chrome_proxy.Stop(device) + if not self._chrome_proxy.wpr_replay_mode: + logging.info('WPR Record test generated archive file %s', + self._chrome_proxy.wpr_archive_path) + self._chrome_proxy = None + + def pull_baseline_profile(): + # Search though status responses for the one with the key we are + # looking for. + for _, bundle in statuses: + baseline_profile_path = bundle.get( + 'additionalTestOutputFile_baseline-profile-ts') + if baseline_profile_path: + # Found it. + break + else: + # This test does not generate a baseline profile. + return + with self._env.output_manager.ArchivedTempfile( + 'baseline_profile.txt', 'baseline_profile') as baseline_profile: + device.PullFile(baseline_profile_path, baseline_profile.name) + _SetLinkOnResults(results, test_name, 'baseline_profile', + baseline_profile.Link()) + logging.warning('Baseline Profile Location %s', baseline_profile.Link()) + + + # While constructing the TestResult objects, we can parallelize several + # steps that involve ADB. These steps should NOT depend on any info in + # the results! Things such as whether the test CRASHED have not yet been + # determined. + post_test_steps = [ + restore_flags, restore_timeout_scale, stop_chrome_proxy, + handle_coverage_data, handle_render_test_data, + pull_ui_screen_captures, pull_baseline_profile + ] + if self._env.concurrent_adb: + reraiser_thread.RunAsync(post_test_steps) + else: + for step in post_test_steps: + step() + + if logcat_file: + _SetLinkOnResults(results, test_name, 'logcat', logcat_file.Link()) + + # Update the result name if the test used flags. + if flags_to_add: + for r in results: + if r.GetName() == test_name: + r.SetName(test_display_name) + + # Add UNKNOWN results for any missing tests. + iterable_test = test if isinstance(test, list) else [test] + test_names = set(self._GetUniqueTestName(t) for t in iterable_test) + results_names = set(r.GetName() for r in results) + results.extend( + base_test_result.BaseTestResult(u, base_test_result.ResultType.UNKNOWN) + for u in test_names.difference(results_names)) + + # Update the result type if we detect a crash. + try: + crashed_packages = DismissCrashDialogs(device) + # Assume test package convention of ".test" suffix + if any(p in self._test_instance.test_package for p in crashed_packages): + for r in results: + if r.GetType() == base_test_result.ResultType.UNKNOWN: + r.SetType(base_test_result.ResultType.CRASH) + elif (crashed_packages and len(results) == 1 + and results[0].GetType() != base_test_result.ResultType.PASS): + # Add log message and set failure reason if: + # 1) The app crash was likely not caused by the test. + # AND + # 2) The app crash possibly caused the test to fail. + # Crashes of the package under test are assumed to be the test's fault. + _AppendToLogForResult( + results[0], 'OS displayed error dialogs for {}'.format( + ', '.join(crashed_packages))) + results[0].SetFailureReason('{} Crashed'.format( + ','.join(crashed_packages))) + except device_errors.CommandTimeoutError: + logging.warning('timed out when detecting/dismissing error dialogs') + # Attach screenshot to the test to help with debugging the dialog boxes. + self._SaveScreenshot(device, screenshot_device_file, test_display_name, + results, 'dialog_box_screenshot') + + # The crash result can be set above or in + # InstrumentationTestRun.GenerateTestResults. If a test crashes, + # subprocesses such as the one used by EmbeddedTestServerRule can be left + # alive in a bad state, so kill them now. + for r in results: + if r.GetType() == base_test_result.ResultType.CRASH: + for apk in self._test_instance.additional_apks: + device.ForceStop(apk.GetPackageName()) + + # Handle failures by: + # - optionally taking a screenshot + # - logging the raw output at ERROR level + # - clearing the application state while persisting permissions + if any(r.GetType() not in (base_test_result.ResultType.PASS, + base_test_result.ResultType.SKIP) + for r in results): + self._SaveScreenshot(device, screenshot_device_file, test_display_name, + results, 'post_test_screenshot') + + logging.error('detected failure in %s. raw output:', test_display_name) + for l in output: + logging.error(' %s', l) + if not self._env.skip_clear_data: + if self._test_instance.package_info: + permissions = (self._test_instance.apk_under_test.GetPermissions() + if self._test_instance.apk_under_test else None) + device.ClearApplicationState(self._test_instance.package_info.package, + permissions=permissions) + if self._test_instance.enable_breakpad_dump: + device.RemovePath(breakpad_dump_directory, recursive=True) + else: + logging.debug('raw output from %s:', test_display_name) + for l in output: + logging.debug(' %s', l) + + if self._test_instance.store_tombstones: + resolved_tombstones = tombstones.ResolveTombstones( + device, + resolve_all_tombstones=True, + include_stack_symbols=False, + wipe_tombstones=True, + tombstone_symbolizer=self._test_instance.symbolizer) + if resolved_tombstones: + tombstone_filename = 'tombstones_%s_%s' % (time.strftime( + '%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial) + with self._env.output_manager.ArchivedTempfile( + tombstone_filename, 'tombstones') as tombstone_file: + tombstone_file.write('\n'.join(resolved_tombstones)) + + # Associate tombstones with first crashing test. + for result in results: + if result.GetType() == base_test_result.ResultType.CRASH: + result.SetLink('tombstones', tombstone_file.Link()) + break + else: + # We don't always detect crashes correctly. In this case, + # associate with the first test. + results[0].SetLink('tombstones', tombstone_file.Link()) + + unknown_tests = set(r.GetName() for r in results + if r.GetType() == base_test_result.ResultType.UNKNOWN) + + # If a test that is batched crashes, the rest of the tests in that batch + # won't be ran and will have their status left as unknown in results, + # so rerun the tests. (see crbug/1127935) + # Need to "unbatch" the tests, so that on subsequent tries, the tests can + # get ran individually. This prevents an unrecognized crash from preventing + # the tests in the batch from being ran. Running the test as unbatched does + # not happen until a retry happens at the local_device_test_run/environment + # level. + tests_to_rerun = [] + for t in iterable_test: + if self._GetUniqueTestName(t) in unknown_tests: + prior_attempts = t.get('run_attempts', 0) + t['run_attempts'] = prior_attempts + 1 + # It's possible every test in the batch could crash, so need to + # try up to as many times as tests that there are. + if prior_attempts < len(results): + if t['annotations']: + t['annotations'].pop('Batch', None) + tests_to_rerun.append(t) + + # If we have a crash that isn't recognized as a crash in a batch, the tests + # will be marked as unknown. Sometimes a test failure causes a crash, but + # the crash isn't recorded because the failure was detected first. + # When the UNKNOWN tests are reran while unbatched and pass, + # they'll have an UNKNOWN, PASS status, so will be improperly marked as + # flaky, so change status to NOTRUN and don't try rerunning. They will + # get rerun individually at the local_device_test_run/environment level. + # as the "Batch" annotation was removed. + found_crash_or_fail = False + for r in results: + if (r.GetType() == base_test_result.ResultType.CRASH + or r.GetType() == base_test_result.ResultType.FAIL): + found_crash_or_fail = True + break + if not found_crash_or_fail: + # Don't bother rerunning since the unrecognized crashes in + # the batch will keep failing. + tests_to_rerun = None + for r in results: + if r.GetType() == base_test_result.ResultType.UNKNOWN: + r.SetType(base_test_result.ResultType.NOTRUN) + + return results, tests_to_rerun if tests_to_rerun else None + + def _GetTestsFromRunner(self): + test_apk_path = self._test_instance.test_apk.path + pickle_path = '%s-runner.pickle' % test_apk_path + # For incremental APKs, the code doesn't live in the apk, so instead check + # the timestamp of the target's .stamp file. + if self._test_instance.test_apk_incremental_install_json: + with open(self._test_instance.test_apk_incremental_install_json) as f: + data = json.load(f) + out_dir = constants.GetOutDirectory() + test_mtime = max( + os.path.getmtime(os.path.join(out_dir, p)) for p in data['dex_files']) + else: + test_mtime = os.path.getmtime(test_apk_path) + + try: + return instrumentation_test_instance.GetTestsFromPickle( + pickle_path, test_mtime) + except instrumentation_test_instance.TestListPickleException as e: + logging.info('Could not get tests from pickle: %s', e) + logging.info('Getting tests by having %s list them.', + self._test_instance.junit4_runner_class) + # We need to use GetAppWritablePath instead of GetExternalStoragePath + # here because we will not have applied legacy storage workarounds on R+ + # yet. + # TODO(rmhasan): Figure out how to create the temp file inside the test + # app's data directory. Currently when the temp file is created read + # permissions are only given to the app's user id. Therefore we can't + # pull the file from the device. + def list_tests(d): + def _run(dev): + with device_temp_file.DeviceTempFile( + dev.adb, suffix='.json', + dir=dev.GetAppWritablePath()) as dev_test_list_json: + junit4_runner_class = self._test_instance.junit4_runner_class + test_package = self._test_instance.test_package + extras = { + 'log': 'true', + # Workaround for https://github.com/mockito/mockito/issues/922 + 'notPackage': 'net.bytebuddy', + } + extras[_EXTRA_TEST_LIST] = dev_test_list_json.name + target = '%s/%s' % (test_package, junit4_runner_class) + timeout = 240 + if self._test_instance.wait_for_java_debugger: + timeout = None + with self._ArchiveLogcat(dev, 'list_tests'): + test_list_run_output = dev.StartInstrumentation( + target, extras=extras, retries=0, timeout=timeout) + if any(test_list_run_output): + logging.error('Unexpected output while listing tests:') + for line in test_list_run_output: + logging.error(' %s', line) + with tempfile_ext.NamedTemporaryDirectory() as host_dir: + host_file = os.path.join(host_dir, 'list_tests.json') + dev.PullFile(dev_test_list_json.name, host_file) + with open(host_file, 'r') as host_file: + return json.load(host_file) + + return crash_handler.RetryOnSystemCrash(_run, d) + + raw_test_lists = self._env.parallel_devices.pMap(list_tests).pGet(None) + + # If all devices failed to list tests, raise an exception. + # Check that tl is not None and is not empty. + if all(not tl for tl in raw_test_lists): + raise device_errors.CommandFailedError( + 'Failed to list tests on any device') + + # Get the first viable list of raw tests + raw_tests = [tl for tl in raw_test_lists if tl][0] + + instrumentation_test_instance.SaveTestsToPickle(pickle_path, raw_tests) + return raw_tests + + @contextlib.contextmanager + def _ArchiveLogcat(self, device, test_name): + stream_name = 'logcat_%s_shard%s_%s_%s' % ( + test_name.replace('#', '.'), self._test_instance.external_shard_index, + time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), device.serial) + + logcat_file = None + logmon = None + try: + with self._env.output_manager.ArchivedTempfile(stream_name, + 'logcat') as logcat_file: + with logcat_monitor.LogcatMonitor( + device.adb, + filter_specs=local_device_environment.LOGCAT_FILTERS, + output_file=logcat_file.name, + transform_func=self._test_instance.MaybeDeobfuscateLines, + check_error=False) as logmon: + with contextlib_ext.Optional(trace_event.trace(test_name), + self._env.trace_output): + yield logcat_file + finally: + if logmon: + logmon.Close() + if logcat_file and logcat_file.Link(): + logging.critical('Logcat saved to %s', logcat_file.Link()) + + def _SaveTraceData(self, trace_device_file, device, test_class): + trace_host_file = self._env.trace_output + + if device.FileExists(trace_device_file.name): + try: + java_trace_json = device.ReadFile(trace_device_file.name) + except IOError as e: + raise Exception('error pulling trace file from device') from e + finally: + trace_device_file.close() + + process_name = '%s (device %s)' % (test_class, device.serial) + process_hash = int(hashlib.md5(process_name).hexdigest()[:6], 16) + + java_trace = json.loads(java_trace_json) + java_trace.sort(key=lambda event: event['ts']) + + get_date_command = 'echo $EPOCHREALTIME' + device_time = device.RunShellCommand(get_date_command, single_line=True) + device_time = float(device_time) * 1e6 + system_time = trace_time.Now() + time_difference = system_time - device_time + + threads_to_add = set() + for event in java_trace: + # Ensure thread ID and thread name will be linked in the metadata. + threads_to_add.add((event['tid'], event['name'])) + + event['pid'] = process_hash + + # Adjust time stamp to align with Python trace times (from + # trace_time.Now()). + event['ts'] += time_difference + + for tid, thread_name in threads_to_add: + thread_name_metadata = {'pid': process_hash, 'tid': tid, + 'ts': 0, 'ph': 'M', 'cat': '__metadata', + 'name': 'thread_name', + 'args': {'name': thread_name}} + java_trace.append(thread_name_metadata) + + process_name_metadata = {'pid': process_hash, 'tid': 0, 'ts': 0, + 'ph': 'M', 'cat': '__metadata', + 'name': 'process_name', + 'args': {'name': process_name}} + java_trace.append(process_name_metadata) + + java_trace_json = json.dumps(java_trace) + java_trace_json = java_trace_json.rstrip(' ]') + + with open(trace_host_file, 'r') as host_handle: + host_contents = host_handle.readline() + + if host_contents: + java_trace_json = ',%s' % java_trace_json.lstrip(' [') + + with open(trace_host_file, 'a') as host_handle: + host_handle.write(java_trace_json) + + def _SaveScreenshot(self, device, screenshot_device_file, test_name, results, + link_name): + screenshot_filename = '%s-%s.png' % ( + test_name, time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime())) + if device.FileExists(screenshot_device_file.name): + with self._env.output_manager.ArchivedTempfile( + screenshot_filename, 'screenshot', + output_manager.Datatype.PNG) as screenshot_host_file: + try: + device.PullFile(screenshot_device_file.name, + screenshot_host_file.name) + finally: + screenshot_device_file.close() + _SetLinkOnResults(results, test_name, link_name, + screenshot_host_file.Link()) + + def _ProcessRenderTestResults(self, device, results): + if not self._render_tests_device_output_dir: + return + self._ProcessSkiaGoldRenderTestResults(device, results) + + def _ProcessSkiaGoldRenderTestResults(self, device, results): + gold_dir = posixpath.join(self._render_tests_device_output_dir, + _DEVICE_GOLD_DIR) + if not device.FileExists(gold_dir): + return + + gold_properties = self._test_instance.skia_gold_properties + with tempfile_ext.NamedTemporaryDirectory() as host_dir: + use_luci = not (gold_properties.local_pixel_tests + or gold_properties.no_luci_auth) + + # Pull everything at once instead of pulling individually, as it's + # slightly faster since each command over adb has some overhead compared + # to doing the same thing locally. + host_dir = os.path.join(host_dir, _DEVICE_GOLD_DIR) + device.PullFile(gold_dir, host_dir) + for image_name in os.listdir(host_dir): + if not image_name.endswith('.png'): + continue + + render_name = image_name[:-4] + json_name = render_name + '.json' + json_path = os.path.join(host_dir, json_name) + image_path = os.path.join(host_dir, image_name) + full_test_name = None + if not os.path.exists(json_path): + _FailTestIfNecessary(results, full_test_name) + _AppendToLog( + results, full_test_name, + 'Unable to find corresponding JSON file for image %s ' + 'when doing Skia Gold comparison.' % image_name) + continue + + # Add 'ignore': '1' if a comparison failure would not be surfaced, as + # that implies that we aren't actively maintaining baselines for the + # test. This helps prevent unrelated CLs from getting comments posted to + # them. + should_rewrite = False + with open(json_path) as infile: + # All the key/value pairs in the JSON file are strings, so convert + # to a bool. + json_dict = json.load(infile) + optional_dict = json_dict.get('optional_keys', {}) + if 'optional_keys' in json_dict: + should_rewrite = True + del json_dict['optional_keys'] + fail_on_unsupported = optional_dict.get('fail_on_unsupported_configs', + 'false') + fail_on_unsupported = fail_on_unsupported.lower() == 'true' + # Grab the full test name so we can associate the comparison with a + # particular test, which is necessary if tests are batched together. + # Remove the key/value pair from the JSON since we don't need/want to + # upload it to Gold. + full_test_name = json_dict.get('full_test_name') + if 'full_test_name' in json_dict: + should_rewrite = True + del json_dict['full_test_name'] + + running_on_unsupported = ( + device.build_version_sdk not in RENDER_TEST_MODEL_SDK_CONFIGS.get( + device.product_model, []) and not fail_on_unsupported) + should_ignore_in_gold = running_on_unsupported + # We still want to fail the test even if we're ignoring the image in + # Gold if we're running on a supported configuration, so + # should_ignore_in_gold != should_hide_failure. + should_hide_failure = running_on_unsupported + if should_ignore_in_gold: + # This is put in the regular keys dict instead of the optional one + # because ignore rules do not apply to optional keys. + json_dict['ignore'] = '1' + if should_rewrite: + with open(json_path, 'w') as outfile: + json.dump(json_dict, outfile) + + gold_session = self._skia_gold_session_manager.GetSkiaGoldSession( + keys_input=json_path) + + try: + status, error = gold_session.RunComparison( + name=render_name, + png_file=image_path, + output_manager=self._env.output_manager, + use_luci=use_luci, + optional_keys=optional_dict) + except Exception as e: # pylint: disable=broad-except + _FailTestIfNecessary(results, full_test_name) + _AppendToLog(results, full_test_name, + 'Skia Gold comparison raised exception: %s' % e) + continue + + if not status: + continue + + # Don't fail the test if we ran on an unsupported configuration unless + # the test has explicitly opted in, as it's likely that baselines + # aren't maintained for that configuration. + if should_hide_failure: + if self._test_instance.skia_gold_properties.local_pixel_tests: + _AppendToLog( + results, full_test_name, + 'Gold comparison for %s failed, but model %s with SDK ' + '%d is not a supported configuration. This failure would be ' + 'ignored on the bots, but failing since tests are being run ' + 'locally.' % + (render_name, device.product_model, device.build_version_sdk)) + else: + _AppendToLog( + results, full_test_name, + 'Gold comparison for %s failed, but model %s with SDK ' + '%d is not a supported configuration, so ignoring failure.' % + (render_name, device.product_model, device.build_version_sdk)) + continue + + _FailTestIfNecessary(results, full_test_name) + failure_log = ( + 'Skia Gold reported failure for RenderTest %s. See ' + 'RENDER_TESTS.md for how to fix this failure.' % render_name) + status_codes =\ + self._skia_gold_session_manager.GetSessionClass().StatusCodes + if status == status_codes.AUTH_FAILURE: + _AppendToLog(results, full_test_name, + 'Gold authentication failed with output %s' % error) + elif status == status_codes.INIT_FAILURE: + _AppendToLog(results, full_test_name, + 'Gold initialization failed with output %s' % error) + elif status == status_codes.COMPARISON_FAILURE_REMOTE: + public_triage_link, internal_triage_link =\ + gold_session.GetTriageLinks(render_name) + if not public_triage_link: + _AppendToLog( + results, full_test_name, + 'Failed to get triage link for %s, raw output: %s' % + (render_name, error)) + _AppendToLog( + results, full_test_name, 'Reason for no triage link: %s' % + gold_session.GetTriageLinkOmissionReason(render_name)) + continue + if gold_properties.IsTryjobRun(): + _SetLinkOnResults(results, full_test_name, + 'Public Skia Gold triage link for entire CL', + public_triage_link) + _SetLinkOnResults(results, full_test_name, + 'Internal Skia Gold triage link for entire CL', + internal_triage_link) + else: + _SetLinkOnResults( + results, full_test_name, + 'Public Skia Gold triage link for %s' % render_name, + public_triage_link) + _SetLinkOnResults( + results, full_test_name, + 'Internal Skia Gold triage link for %s' % render_name, + internal_triage_link) + _AppendToLog(results, full_test_name, failure_log) + + elif status == status_codes.COMPARISON_FAILURE_LOCAL: + given_link = gold_session.GetGivenImageLink(render_name) + closest_link = gold_session.GetClosestImageLink(render_name) + diff_link = gold_session.GetDiffImageLink(render_name) + + processed_template_output = _GenerateRenderTestHtml( + render_name, given_link, closest_link, diff_link) + with self._env.output_manager.ArchivedTempfile( + '%s.html' % render_name, 'gold_local_diffs', + output_manager.Datatype.HTML) as html_results: + html_results.write(processed_template_output) + _SetLinkOnResults(results, full_test_name, render_name, + html_results.Link()) + _AppendToLog( + results, full_test_name, + 'See %s link for diff image with closest positive.' % render_name) + elif status == status_codes.LOCAL_DIFF_FAILURE: + _AppendToLog(results, full_test_name, + 'Failed to generate diffs from Gold: %s' % error) + else: + logging.error( + 'Given unhandled SkiaGoldSession StatusCode %s with error %s', + status, error) + + #override + def _ShouldRetry(self, test, result): + # We've tried to disable retries in the past with mixed results. + # See crbug.com/619055 for historical context and crbug.com/797002 + # for ongoing efforts. + if 'Batch' in test['annotations'] and test['annotations']['Batch'][ + 'value'] == 'UnitTests': + return False + del test, result + return True + + #override + def _ShouldShardTestsForDevices(self): + """Shard tests across several devices. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ + return True + + @classmethod + def _GetTimeoutScaleFromAnnotations(cls, annotations): + try: + return int(annotations.get('TimeoutScale', {}).get('value', 1)) + except ValueError as e: + logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e)) + return 1 + + @classmethod + def _GetTimeoutFromAnnotations(cls, annotations, test_name): + for k, v in TIMEOUT_ANNOTATIONS: + if k in annotations: + timeout = v + break + else: + logging.warning('Using default 1 minute timeout for %s', test_name) + timeout = 60 + + timeout *= cls._GetTimeoutScaleFromAnnotations(annotations) + + return timeout + + +def _IsWPRRecordReplayTest(test): + """Determines whether a test or a list of tests is a WPR RecordReplay Test.""" + if not isinstance(test, list): + test = [test] + return any(WPR_RECORD_REPLAY_TEST_FEATURE_ANNOTATION in t['annotations'].get( + FEATURE_ANNOTATION, {}).get('value', ()) for t in test) + + +def _GetWPRArchivePath(test): + """Retrieves the archive path from the WPRArchiveDirectory annotation.""" + return test['annotations'].get(WPR_ARCHIVE_FILE_PATH_ANNOTATION, + {}).get('value', ()) + + +def _GetWPRArchiveFileName(test): + """Retrieves the WPRArchiveDirectory.ArchiveName annotation.""" + value = test['annotations'].get(WPR_ARCHIVE_NAME_ANNOTATION, + {}).get('value', None) + return value[0] if value else None + + +def _ReplaceUncommonChars(original): + """Replaces uncommon characters with __.""" + if not original: + raise ValueError('parameter should not be empty') + + uncommon_chars = ['#'] + for char in uncommon_chars: + original = original.replace(char, '__') + return original + + +def _IsRenderTest(test): + """Determines if a test or list of tests has a RenderTest amongst them.""" + if not isinstance(test, list): + test = [test] + return any(RENDER_TEST_FEATURE_ANNOTATION in t['annotations'].get( + FEATURE_ANNOTATION, {}).get('value', ()) for t in test) + + +def _GenerateRenderTestHtml(image_name, failure_link, golden_link, diff_link): + """Generates a RenderTest results page. + + Displays the generated (failure) image, the golden image, and the diff + between them. + + Args: + image_name: The name of the image whose comparison failed. + failure_link: The URL to the generated/failure image. + golden_link: The URL to the golden image. + diff_link: The URL to the diff image between the failure and golden images. + + Returns: + A string containing the generated HTML. + """ + jinja2_env = jinja2.Environment( + loader=jinja2.FileSystemLoader(_JINJA_TEMPLATE_DIR), trim_blocks=True) + template = jinja2_env.get_template(_JINJA_TEMPLATE_FILENAME) + # pylint: disable=no-member + return template.render( + test_name=image_name, + failure_link=failure_link, + golden_link=golden_link, + diff_link=diff_link) + + +def _FailTestIfNecessary(results, full_test_name): + """Marks the given results as failed if it wasn't already. + + Marks the result types as ResultType.FAIL unless they were already some sort + of failure type, e.g. ResultType.CRASH. + + Args: + results: A list of base_test_result.BaseTestResult objects. + full_test_name: A string containing the full name of the test, e.g. + org.chromium.chrome.SomeTestClass#someTestMethod. + """ + found_matching_test = _MatchingTestInResults(results, full_test_name) + if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name): + logging.error( + 'Could not find result specific to %s, failing all tests in the batch.', + full_test_name) + for result in results: + if found_matching_test and result.GetName() != full_test_name: + continue + if result.GetType() not in [ + base_test_result.ResultType.FAIL, base_test_result.ResultType.CRASH, + base_test_result.ResultType.TIMEOUT, base_test_result.ResultType.UNKNOWN + ]: + result.SetType(base_test_result.ResultType.FAIL) + + +def _AppendToLog(results, full_test_name, line): + """Appends the given line to the end of the logs of the given results. + + Args: + results: A list of base_test_result.BaseTestResult objects. + full_test_name: A string containing the full name of the test, e.g. + org.chromium.chrome.SomeTestClass#someTestMethod. + line: A string to be appended as a neww line to the log of |result|. + """ + found_matching_test = _MatchingTestInResults(results, full_test_name) + if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name): + logging.error( + 'Could not find result specific to %s, appending to log of all tests ' + 'in the batch.', full_test_name) + for result in results: + if found_matching_test and result.GetName() != full_test_name: + continue + _AppendToLogForResult(result, line) + + +def _AppendToLogForResult(result, line): + result.SetLog(result.GetLog() + '\n' + line) + + +def _SetLinkOnResults(results, full_test_name, link_name, link): + """Sets the given link on the given results. + + Args: + results: A list of base_test_result.BaseTestResult objects. + full_test_name: A string containing the full name of the test, e.g. + org.chromium.chrome.SomeTestClass#someTestMethod. + link_name: A string containing the name of the link being set. + link: A string containing the lkink being set. + """ + found_matching_test = _MatchingTestInResults(results, full_test_name) + if not found_matching_test and _ShouldReportNoMatchingResult(full_test_name): + logging.error( + 'Could not find result specific to %s, adding link to results of all ' + 'tests in the batch.', full_test_name) + for result in results: + if found_matching_test and result.GetName() != full_test_name: + continue + result.SetLink(link_name, link) + + +def _MatchingTestInResults(results, full_test_name): + """Checks if any tests named |full_test_name| are in |results|. + + Args: + results: A list of base_test_result.BaseTestResult objects. + full_test_name: A string containing the full name of the test, e.g. + org.chromium.chrome.Some + + Returns: + True if one of the results in |results| has the same name as + |full_test_name|, otherwise False. + """ + return any(r for r in results if r.GetName() == full_test_name) + + +def _ShouldReportNoMatchingResult(full_test_name): + """Determines whether a failure to find a matching result is actually bad. + + Args: + full_test_name: A string containing the full name of the test, e.g. + org.chromium.chrome.Some + + Returns: + False if the failure to find a matching result is expected and should not + be reported, otherwise True. + """ + if full_test_name is not None and full_test_name.endswith(_BATCH_SUFFIX): + # Handle batched tests, whose reported name is the first test's name + + # "_batch". + return False + return True diff --git a/android/pylib/local/device/local_device_instrumentation_test_run_test.py b/android/pylib/local/device/local_device_instrumentation_test_run_test.py new file mode 100755 index 000000000000..fb41572d3f30 --- /dev/null +++ b/android/pylib/local/device/local_device_instrumentation_test_run_test.py @@ -0,0 +1,197 @@ +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for local_device_instrumentation_test_run.""" + +# pylint: disable=protected-access + + +import unittest +import mock # pylint: disable=import-error + +from pylib.base import base_test_result +from pylib.base import mock_environment +from pylib.base import mock_test_instance +from pylib.local.device import local_device_instrumentation_test_run + + +class LocalDeviceInstrumentationTestRunTest(unittest.TestCase): + + def setUp(self): + super().setUp() + self._env = mock_environment.MockEnvironment() + self._ti = mock_test_instance.MockTestInstance() + self._obj = ( + local_device_instrumentation_test_run.LocalDeviceInstrumentationTestRun( + self._env, self._ti)) + + # TODO(crbug.com/797002): Decide whether the _ShouldRetry hook is worth + # retaining and remove these tests if not. + + def testShouldRetry_failure(self): + test = { + 'annotations': {}, + 'class': 'SadTest', + 'method': 'testFailure', + 'is_junit4': True, + } + result = base_test_result.BaseTestResult( + 'SadTest.testFailure', base_test_result.ResultType.FAIL) + self.assertTrue(self._obj._ShouldRetry(test, result)) + + def testShouldRetry_retryOnFailure(self): + test = { + 'annotations': {'RetryOnFailure': None}, + 'class': 'SadTest', + 'method': 'testRetryOnFailure', + 'is_junit4': True, + } + result = base_test_result.BaseTestResult( + 'SadTest.testRetryOnFailure', base_test_result.ResultType.FAIL) + self.assertTrue(self._obj._ShouldRetry(test, result)) + + def testShouldRetry_notRun(self): + test = { + 'annotations': {}, + 'class': 'SadTest', + 'method': 'testNotRun', + 'is_junit4': True, + } + result = base_test_result.BaseTestResult( + 'SadTest.testNotRun', base_test_result.ResultType.NOTRUN) + self.assertTrue(self._obj._ShouldRetry(test, result)) + + def testIsWPRRecordReplayTest_matchedWithKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['WPRRecordReplayTest', 'dummy'] + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertTrue( + local_device_instrumentation_test_run._IsWPRRecordReplayTest(test)) + + def testIsWPRRecordReplayTest_noMatchedKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['abc', 'dummy'] + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertFalse( + local_device_instrumentation_test_run._IsWPRRecordReplayTest(test)) + + def testGetWPRArchivePath_matchedWithKey(self): + test = { + 'annotations': { + 'WPRArchiveDirectory': { + 'value': 'abc' + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertEqual( + local_device_instrumentation_test_run._GetWPRArchivePath(test), 'abc') + + def testGetWPRArchivePath_noMatchedWithKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': 'abc' + } + }, + 'class': 'WPRDummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertFalse( + local_device_instrumentation_test_run._GetWPRArchivePath(test)) + + def testIsRenderTest_matchedWithKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['RenderTest', 'dummy'] + } + }, + 'class': 'DummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertTrue(local_device_instrumentation_test_run._IsRenderTest(test)) + + def testIsRenderTest_noMatchedKey(self): + test = { + 'annotations': { + 'Feature': { + 'value': ['abc', 'dummy'] + } + }, + 'class': 'DummyTest', + 'method': 'testRun', + 'is_junit4': True, + } + self.assertFalse(local_device_instrumentation_test_run._IsRenderTest(test)) + + def testReplaceUncommonChars(self): + original = 'abc#edf' + self.assertEqual( + local_device_instrumentation_test_run._ReplaceUncommonChars(original), + 'abc__edf') + original = 'abc#edf#hhf' + self.assertEqual( + local_device_instrumentation_test_run._ReplaceUncommonChars(original), + 'abc__edf__hhf') + original = 'abcedfhhf' + self.assertEqual( + local_device_instrumentation_test_run._ReplaceUncommonChars(original), + 'abcedfhhf') + original = None + with self.assertRaises(ValueError): + local_device_instrumentation_test_run._ReplaceUncommonChars(original) + original = '' + with self.assertRaises(ValueError): + local_device_instrumentation_test_run._ReplaceUncommonChars(original) + + def testStoreDataInAppDir(self): + env = mock.MagicMock() + test_instance = mock.MagicMock() + test_instance.store_data_in_app_directory = True + device = mock.MagicMock() + + device.GetApplicationDataDirectory.return_value = 'app_dir' + device.GetExternalStoragePath.return_value = 'external_dir' + test_run = ( + local_device_instrumentation_test_run.LocalDeviceInstrumentationTestRun( + env, test_instance)) + self.assertEqual(test_run._GetDataStorageRootDirectory(device), 'app_dir') + + def testStoreDataInExternalDir(self): + env = mock.MagicMock() + test_instance = mock.MagicMock() + test_instance.store_data_in_app_directory = False + device = mock.MagicMock() + + device.GetApplicationDataDirectory.return_value = 'app_dir' + device.GetExternalStoragePath.return_value = 'external_dir' + test_run = ( + local_device_instrumentation_test_run.LocalDeviceInstrumentationTestRun( + env, test_instance)) + self.assertEqual(test_run._GetDataStorageRootDirectory(device), + 'external_dir') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/android/pylib/local/device/local_device_monkey_test_run.py b/android/pylib/local/device/local_device_monkey_test_run.py new file mode 100644 index 000000000000..e90cbbd27f84 --- /dev/null +++ b/android/pylib/local/device/local_device_monkey_test_run.py @@ -0,0 +1,140 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import logging + +from six.moves import range # pylint: disable=redefined-builtin +from devil.android import device_errors +from devil.android.sdk import intent +from pylib import constants +from pylib.base import base_test_result +from pylib.local.device import local_device_test_run + + +_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package + +class LocalDeviceMonkeyTestRun(local_device_test_run.LocalDeviceTestRun): + def TestPackage(self): + return 'monkey' + + #override + def SetUp(self): + pass + + #override + def _RunTest(self, device, test): + device.ClearApplicationState(self._test_instance.package) + + # Chrome crashes are not always caught by Monkey test runner. + # Launch Chrome and verify Chrome has the same PID before and after + # the test. + device.StartActivity( + intent.Intent(package=self._test_instance.package, + activity=self._test_instance.activity, + action='android.intent.action.MAIN'), + blocking=True, force_stop=True) + before_pids = device.GetPids(self._test_instance.package) + + output = '' + if before_pids: + if len(before_pids.get(self._test_instance.package, [])) > 1: + raise Exception( + 'At most one instance of process %s expected but found pids: ' + '%s' % (self._test_instance.package, before_pids)) + output = '\n'.join(self._LaunchMonkeyTest(device)) + after_pids = device.GetPids(self._test_instance.package) + + crashed = True + if not self._test_instance.package in before_pids: + logging.error('Failed to start the process.') + elif not self._test_instance.package in after_pids: + logging.error('Process %s has died.', + before_pids[self._test_instance.package]) + elif (before_pids[self._test_instance.package] != + after_pids[self._test_instance.package]): + logging.error('Detected process restart %s -> %s', + before_pids[self._test_instance.package], + after_pids[self._test_instance.package]) + else: + crashed = False + + success_pattern = 'Events injected: %d' % self._test_instance.event_count + if success_pattern in output and not crashed: + result = base_test_result.BaseTestResult( + test, base_test_result.ResultType.PASS, log=output) + else: + result = base_test_result.BaseTestResult( + test, base_test_result.ResultType.FAIL, log=output) + if 'chrome' in self._test_instance.package: + logging.warning('Starting MinidumpUploadService...') + # TODO(jbudorick): Update this after upstreaming. + minidump_intent = intent.Intent( + action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE, + package=self._test_instance.package, + activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE) + try: + device.RunShellCommand( + ['am', 'startservice'] + minidump_intent.am_args, + as_root=True, check_return=True) + except device_errors.CommandFailedError: + logging.exception('Failed to start MinidumpUploadService') + + return result, None + + #override + def TearDown(self): + pass + + #override + def _CreateShardsForDevices(self, tests): + """Create shards of tests to run on devices. + + Args: + tests: List containing tests or test batches. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ + return tests + + #override + def _ShouldShardTestsForDevices(self): + """Shard tests across several devices. + + Returns: + True if tests should be sharded across several devices, + False otherwise. + """ + # TODO(mikecase): Run Monkey test concurrently on each attached device. + return False + + #override + def _GetTests(self): + return ['MonkeyTest'] + + def _LaunchMonkeyTest(self, device): + try: + cmd = ['monkey', + '-p', self._test_instance.package, + '--throttle', str(self._test_instance.throttle), + '-s', str(self._test_instance.seed), + '--monitor-native-crashes', + '--kill-process-after-error'] + for category in self._test_instance.categories: + cmd.extend(['-c', category]) + for _ in range(self._test_instance.verbose_count): + cmd.append('-v') + cmd.append(str(self._test_instance.event_count)) + return device.RunShellCommand( + cmd, timeout=self._test_instance.timeout, check_return=True) + finally: + try: + # Kill the monkey test process on the device. If you manually + # interrupt the test run, this will prevent the monkey test from + # continuing to run. + device.KillAll('com.android.commands.monkey') + except device_errors.CommandFailedError: + pass diff --git a/android/pylib/local/device/local_device_test_run.py b/android/pylib/local/device/local_device_test_run.py new file mode 100644 index 000000000000..0e0b93033400 --- /dev/null +++ b/android/pylib/local/device/local_device_test_run.py @@ -0,0 +1,420 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import hashlib +import logging +import posixpath +import signal +try: + import _thread as thread +except ImportError: + import thread +import threading + +from devil import base_error +from devil.android import crash_handler +from devil.android import device_errors +from devil.android.sdk import version_codes +from devil.android.tools import device_recovery +from devil.utils import signal_handler +from pylib import valgrind_tools +from pylib.base import base_test_result +from pylib.base import test_run +from pylib.base import test_collection +from pylib.local.device import local_device_environment + + +_SIGTERM_TEST_LOG = ( + ' Suite execution terminated, probably due to swarming timeout.\n' + ' Your test may not have run.') + + +def SubstituteDeviceRoot(device_path, device_root): + if not device_path: + return device_root + if isinstance(device_path, list): + return posixpath.join(*(p if p else device_root for p in device_path)) + return device_path + + +class TestsTerminated(Exception): + pass + + +class InvalidShardingSettings(Exception): + def __init__(self, shard_index, total_shards): + super().__init__( + 'Invalid sharding settings. shard_index: %d total_shards: %d' % + (shard_index, total_shards)) + + +class LocalDeviceTestRun(test_run.TestRun): + + def __init__(self, env, test_instance): + super().__init__(env, test_instance) + self._tools = {} + # This is intended to be filled by a child class. + self._installed_packages = [] + env.SetPreferredAbis(test_instance.GetPreferredAbis()) + + #override + def RunTests(self, results, raw_logs_fh=None): + tests = self._GetTests() + + exit_now = threading.Event() + + @local_device_environment.handle_shard_failures + def run_tests_on_device(dev, tests, results): + # This is performed here instead of during setup because restarting the + # device clears app compatibility flags, which will happen if a device + # needs to be recovered. + SetAppCompatibilityFlagsIfNecessary(self._installed_packages, dev) + consecutive_device_errors = 0 + for test in tests: + if not test: + logging.warning('No tests in shared. Continuing.') + tests.test_completed() + continue + if exit_now.isSet(): + thread.exit() + + result = None + rerun = None + try: + result, rerun = crash_handler.RetryOnSystemCrash( + lambda d, t=test: self._RunTest(d, t), + device=dev) + consecutive_device_errors = 0 + if isinstance(result, base_test_result.BaseTestResult): + results.AddResult(result) + elif isinstance(result, list): + results.AddResults(result) + else: + raise Exception( + 'Unexpected result type: %s' % type(result).__name__) + except device_errors.CommandTimeoutError: + # Test timeouts don't count as device errors for the purpose + # of bad device detection. + consecutive_device_errors = 0 + + if isinstance(test, list): + results.AddResults( + base_test_result.BaseTestResult( + self._GetUniqueTestName(t), + base_test_result.ResultType.TIMEOUT) for t in test) + else: + results.AddResult( + base_test_result.BaseTestResult( + self._GetUniqueTestName(test), + base_test_result.ResultType.TIMEOUT)) + except device_errors.DeviceUnreachableError: + # If the device is no longer reachable then terminate this + # run_tests_on_device call. + raise + except base_error.BaseError: + # If we get a device error but believe the device is still + # reachable, attempt to continue using it. + if isinstance(tests, test_collection.TestCollection): + rerun = test + + consecutive_device_errors += 1 + if consecutive_device_errors >= 3: + # We believe the device is still reachable and may still be usable, + # but if it fails repeatedly, we shouldn't attempt to keep using + # it. + logging.error('Repeated failures on device %s. Abandoning.', + str(dev)) + raise + + logging.exception( + 'Attempting to continue using device %s despite failure (%d/3).', + str(dev), consecutive_device_errors) + + finally: + if isinstance(tests, test_collection.TestCollection): + if rerun: + tests.add(rerun) + tests.test_completed() + + logging.info('Finished running tests on this device.') + + def stop_tests(_signum, _frame): + logging.critical('Received SIGTERM. Stopping test execution.') + exit_now.set() + raise TestsTerminated() + + try: + with signal_handler.AddSignalHandler(signal.SIGTERM, stop_tests): + self._env.ResetCurrentTry() + while self._env.current_try < self._env.max_tries and tests: + tries = self._env.current_try + grouped_tests = self._GroupTests(tests) + logging.info('STARTING TRY #%d/%d', tries + 1, self._env.max_tries) + if tries > 0 and self._env.recover_devices: + if any(d.build_version_sdk == version_codes.LOLLIPOP_MR1 + for d in self._env.devices): + logging.info( + 'Attempting to recover devices due to known issue on L MR1. ' + 'See crbug.com/787056 for details.') + self._env.parallel_devices.pMap( + device_recovery.RecoverDevice, None) + elif tries + 1 == self._env.max_tries: + logging.info( + 'Attempting to recover devices prior to last test attempt.') + self._env.parallel_devices.pMap( + device_recovery.RecoverDevice, None) + logging.info('Will run %d tests on %d devices: %s', + len(tests), len(self._env.devices), + ', '.join(str(d) for d in self._env.devices)) + for t in tests: + logging.debug(' %s', t) + + try_results = base_test_result.TestRunResults() + test_names = (self._GetUniqueTestName(t) for t in tests) + try_results.AddResults( + base_test_result.BaseTestResult( + t, base_test_result.ResultType.NOTRUN) + for t in test_names if not t.endswith('*')) + + # As soon as we know the names of the tests, we populate |results|. + # The tests in try_results will have their results updated by + # try_results.AddResult() as they are run. + results.append(try_results) + + try: + if self._ShouldShardTestsForDevices(): + tc = test_collection.TestCollection( + self._CreateShardsForDevices(grouped_tests)) + self._env.parallel_devices.pMap( + run_tests_on_device, tc, try_results).pGet(None) + else: + self._env.parallel_devices.pMap(run_tests_on_device, + grouped_tests, + try_results).pGet(None) + except TestsTerminated: + for unknown_result in try_results.GetUnknown(): + try_results.AddResult( + base_test_result.BaseTestResult( + unknown_result.GetName(), + base_test_result.ResultType.TIMEOUT, + log=_SIGTERM_TEST_LOG)) + raise + + self._env.IncrementCurrentTry() + tests = self._GetTestsToRetry(tests, try_results) + + logging.info('FINISHED TRY #%d/%d', tries + 1, self._env.max_tries) + if tests: + logging.info('%d failed tests remain.', len(tests)) + else: + logging.info('All tests completed.') + except TestsTerminated: + pass + + def _GetTestsToRetry(self, tests, try_results): + + def is_failure_result(test_result): + if isinstance(test_result, list): + return any(is_failure_result(r) for r in test_result) + return ( + test_result is None + or test_result.GetType() not in ( + base_test_result.ResultType.PASS, + base_test_result.ResultType.SKIP)) + + all_test_results = {r.GetName(): r for r in try_results.GetAll()} + + tests_and_names = ((t, self._GetUniqueTestName(t)) for t in tests) + + tests_and_results = {} + for test, name in tests_and_names: + if name.endswith('*'): + tests_and_results[name] = (test, [ + r for n, r in all_test_results.items() if fnmatch.fnmatch(n, name) + ]) + else: + tests_and_results[name] = (test, all_test_results.get(name)) + + failed_tests_and_results = ((test, result) + for test, result in tests_and_results.values() + if is_failure_result(result)) + + return [t for t, r in failed_tests_and_results if self._ShouldRetry(t, r)] + + def _ApplyExternalSharding(self, tests, shard_index, total_shards): + logging.info('Using external sharding settings. This is shard %d/%d', + shard_index, total_shards) + + if total_shards < 0 or shard_index < 0 or total_shards <= shard_index: + raise InvalidShardingSettings(shard_index, total_shards) + + sharded_tests = [] + + # Sort tests by hash. + # TODO(crbug.com/1257820): Add sorting logic back to _PartitionTests. + tests = self._SortTests(tests) + + # Group tests by tests that should run in the same test invocation - either + # unit tests or batched tests. + grouped_tests = self._GroupTests(tests) + + # Partition grouped tests approximately evenly across shards. + partitioned_tests = self._PartitionTests(grouped_tests, total_shards, + float('inf')) + if len(partitioned_tests) <= shard_index: + return [] + for t in partitioned_tests[shard_index]: + if isinstance(t, list): + sharded_tests.extend(t) + else: + sharded_tests.append(t) + return sharded_tests + + # Sort by hash so we don't put all tests in a slow suite in the same + # partition. + def _SortTests(self, tests): + return sorted(tests, + key=lambda t: hashlib.sha256( + self._GetUniqueTestName(t[0] if isinstance(t, list) else t + ).encode()).hexdigest()) + + # Partition tests evenly into |num_desired_partitions| partitions where + # possible. However, many constraints make partitioning perfectly impossible. + # If the max_partition_size isn't large enough, extra partitions may be + # created (infinite max size should always return precisely the desired + # number of partitions). Even if the |max_partition_size| is technically large + # enough to hold all of the tests in |num_desired_partitions|, we attempt to + # keep test order relatively stable to minimize flakes, so when tests are + # grouped (eg. batched tests), we cannot perfectly fill all paritions as that + # would require breaking up groups. + def _PartitionTests(self, tests, num_desired_partitions, max_partition_size): + # pylint: disable=no-self-use + partitions = [] + + + num_not_yet_allocated = sum( + [len(test) - 1 for test in tests if self._CountTestsIndividually(test)]) + num_not_yet_allocated += len(tests) + + # Fast linear partition approximation capped by max_partition_size. We + # cannot round-robin or otherwise re-order tests dynamically because we want + # test order to remain stable. + partition_size = min(num_not_yet_allocated // num_desired_partitions, + max_partition_size) + partitions.append([]) + last_partition_size = 0 + for test in tests: + test_count = len(test) if self._CountTestsIndividually(test) else 1 + # Make a new shard whenever we would overfill the previous one. However, + # if the size of the test group is larger than the max partition size on + # its own, just put the group in its own shard instead of splitting up the + # group. + if (last_partition_size + test_count > partition_size + and last_partition_size > 0): + num_desired_partitions -= 1 + if num_desired_partitions <= 0: + # Too many tests for number of partitions, just fill all partitions + # beyond num_desired_partitions. + partition_size = max_partition_size + else: + # Re-balance remaining partitions. + partition_size = min(num_not_yet_allocated // num_desired_partitions, + max_partition_size) + partitions.append([]) + partitions[-1].append(test) + last_partition_size = test_count + else: + partitions[-1].append(test) + last_partition_size += test_count + + num_not_yet_allocated -= test_count + + if not partitions[-1]: + partitions.pop() + return partitions + + def _CountTestsIndividually(self, test): + # pylint: disable=no-self-use + if not isinstance(test, list): + return False + annotations = test[0]['annotations'] + # UnitTests tests are really fast, so to balance shards better, count + # UnitTests Batches as single tests. + return ('Batch' not in annotations + or annotations['Batch']['value'] != 'UnitTests') + + def GetTool(self, device): + if str(device) not in self._tools: + self._tools[str(device)] = valgrind_tools.CreateTool( + self._env.tool, device) + return self._tools[str(device)] + + def _CreateShardsForDevices(self, tests): + raise NotImplementedError + + def _GetUniqueTestName(self, test): + # pylint: disable=no-self-use + return test + + def _ShouldRetry(self, test, result): + # pylint: disable=no-self-use,unused-argument + return True + + #override + def GetTestsForListing(self): + ret = self._GetTests() + ret = FlattenTestList(ret) + ret.sort() + return ret + + def _GetTests(self): + raise NotImplementedError + + def _GroupTests(self, tests): + # pylint: disable=no-self-use + return tests + + def _RunTest(self, device, test): + raise NotImplementedError + + def _ShouldShardTestsForDevices(self): + raise NotImplementedError + + +def FlattenTestList(values): + """Returns a list with all nested lists (shard groupings) expanded.""" + ret = [] + for v in values: + if isinstance(v, list): + ret += v + else: + ret.append(v) + return ret + + +def SetAppCompatibilityFlagsIfNecessary(packages, device): + """Sets app compatibility flags on the given packages and device. + + Args: + packages: A list of strings containing package names to apply flags to. + device: A DeviceUtils instance to apply the flags on. + """ + + def set_flag_for_packages(flag, enable): + enable_str = 'enable' if enable else 'disable' + for p in packages: + cmd = ['am', 'compat', enable_str, flag, p] + device.RunShellCommand(cmd) + + sdk_version = device.build_version_sdk + if sdk_version >= version_codes.R: + # These flags are necessary to use the legacy storage permissions on R+. + # See crbug.com/1173699 for more information. + set_flag_for_packages('DEFAULT_SCOPED_STORAGE', False) + set_flag_for_packages('FORCE_ENABLE_SCOPED_STORAGE', False) + + +class NoTestsError(Exception): + """Error for when no tests are found.""" diff --git a/android/pylib/local/device/local_device_test_run_test.py b/android/pylib/local/device/local_device_test_run_test.py new file mode 100755 index 000000000000..5f0068ae03f8 --- /dev/null +++ b/android/pylib/local/device/local_device_test_run_test.py @@ -0,0 +1,174 @@ +#!/usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + + +import unittest + +from pylib.base import base_test_result +from pylib.local.device import local_device_test_run + +import mock # pylint: disable=import-error + + +class SubstituteDeviceRootTest(unittest.TestCase): + + def testNoneDevicePath(self): + self.assertEqual( + '/fake/device/root', + local_device_test_run.SubstituteDeviceRoot(None, '/fake/device/root')) + + def testStringDevicePath(self): + self.assertEqual( + '/another/fake/device/path', + local_device_test_run.SubstituteDeviceRoot('/another/fake/device/path', + '/fake/device/root')) + + def testListWithNoneDevicePath(self): + self.assertEqual( + '/fake/device/root/subpath', + local_device_test_run.SubstituteDeviceRoot([None, 'subpath'], + '/fake/device/root')) + + def testListWithoutNoneDevicePath(self): + self.assertEqual( + '/another/fake/device/path', + local_device_test_run.SubstituteDeviceRoot( + ['/', 'another', 'fake', 'device', 'path'], '/fake/device/root')) + + +class TestLocalDeviceTestRun(local_device_test_run.LocalDeviceTestRun): + + # pylint: disable=abstract-method + + def __init__(self): + super().__init__(mock.MagicMock(), mock.MagicMock()) + + +class TestLocalDeviceNonStringTestRun( + local_device_test_run.LocalDeviceTestRun): + + # pylint: disable=abstract-method + + def __init__(self): + super().__init__(mock.MagicMock(), mock.MagicMock()) + + def _GetUniqueTestName(self, test): + return test['name'] + + +class LocalDeviceTestRunTest(unittest.TestCase): + + def testSortTests(self): + test_run = TestLocalDeviceTestRun() + self.assertEqual(test_run._SortTests(['a', 'b', 'c', 'd', 'e', 'f', 'g']), + ['d', 'f', 'c', 'b', 'e', 'a', 'g']) + + def testGetTestsToRetry_allTestsPassed(self): + results = [ + base_test_result.BaseTestResult( + 'Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'Test2', base_test_result.ResultType.PASS), + ] + + tests = [r.GetName() for r in results] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEqual(0, len(tests_to_retry)) + + def testGetTestsToRetry_testFailed(self): + results = [ + base_test_result.BaseTestResult( + 'Test1', base_test_result.ResultType.FAIL), + base_test_result.BaseTestResult( + 'Test2', base_test_result.ResultType.PASS), + ] + + tests = [r.GetName() for r in results] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEqual(1, len(tests_to_retry)) + self.assertIn('Test1', tests_to_retry) + + def testGetTestsToRetry_testUnknown(self): + results = [ + base_test_result.BaseTestResult( + 'Test2', base_test_result.ResultType.PASS), + ] + + tests = ['Test1'] + [r.GetName() for r in results] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEqual(1, len(tests_to_retry)) + self.assertIn('Test1', tests_to_retry) + + def testGetTestsToRetry_wildcardFilter_allPass(self): + results = [ + base_test_result.BaseTestResult( + 'TestCase.Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'TestCase.Test2', base_test_result.ResultType.PASS), + ] + + tests = ['TestCase.*'] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEqual(0, len(tests_to_retry)) + + def testGetTestsToRetry_wildcardFilter_oneFails(self): + results = [ + base_test_result.BaseTestResult( + 'TestCase.Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'TestCase.Test2', base_test_result.ResultType.FAIL), + ] + + tests = ['TestCase.*'] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEqual(1, len(tests_to_retry)) + self.assertIn('TestCase.*', tests_to_retry) + + def testGetTestsToRetry_nonStringTests(self): + results = [ + base_test_result.BaseTestResult( + 'TestCase.Test1', base_test_result.ResultType.PASS), + base_test_result.BaseTestResult( + 'TestCase.Test2', base_test_result.ResultType.FAIL), + ] + + tests = [ + {'name': 'TestCase.Test1'}, + {'name': 'TestCase.Test2'}, + ] + try_results = base_test_result.TestRunResults() + try_results.AddResults(results) + + test_run = TestLocalDeviceNonStringTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) + self.assertEqual(1, len(tests_to_retry)) + self.assertIsInstance(tests_to_retry[0], dict) + self.assertEqual(tests[1], tests_to_retry[0]) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/android/pylib/local/emulator/OWNERS b/android/pylib/local/emulator/OWNERS new file mode 100644 index 000000000000..36abc1801d80 --- /dev/null +++ b/android/pylib/local/emulator/OWNERS @@ -0,0 +1,3 @@ +bpastene@chromium.org +hypan@google.com +jbudorick@chromium.org diff --git a/android/pylib/local/emulator/__init__.py b/android/pylib/local/emulator/__init__.py new file mode 100644 index 000000000000..401c54b0d9c6 --- /dev/null +++ b/android/pylib/local/emulator/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/local/emulator/avd.py b/android/pylib/local/emulator/avd.py new file mode 100644 index 000000000000..62db9b591aab --- /dev/null +++ b/android/pylib/local/emulator/avd.py @@ -0,0 +1,1161 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import contextlib +import glob +import json +import logging +import os +import socket +import stat +import subprocess +import threading +import time + +from google.protobuf import text_format # pylint: disable=import-error + +from devil.android import apk_helper +from devil.android import device_utils +from devil.android import settings +from devil.android.sdk import adb_wrapper +from devil.android.tools import system_app +from devil.utils import cmd_helper +from devil.utils import timeout_retry +from py_utils import tempfile_ext +from pylib import constants +from pylib.local.emulator import ini +from pylib.local.emulator.proto import avd_pb2 + +# A common root directory to store the CIPD packages for creating or starting +# the emulator instance, e.g. emulator binary, system images, AVDs. +COMMON_CIPD_ROOT = os.path.join(constants.DIR_SOURCE_ROOT, '.android_emulator') + +# Packages that are needed for runtime. +_PACKAGES_RUNTIME = object() +# Packages that are needed during AVD creation. +_PACKAGES_CREATION = object() +# All the packages that could exist in the AVD config file. +_PACKAGES_ALL = object() + +# These files are used as backing files for corresponding qcow2 images. +_BACKING_FILES = ('system.img', 'vendor.img') + +_DEFAULT_AVDMANAGER_PATH = os.path.join(constants.ANDROID_SDK_ROOT, + 'cmdline-tools', 'latest', 'bin', + 'avdmanager') +# Default to a 480dp mdpi screen (a relatively large phone). +# See https://developer.android.com/training/multiscreen/screensizes +# and https://developer.android.com/training/multiscreen/screendensities +# for more information. +_DEFAULT_SCREEN_DENSITY = 160 +_DEFAULT_SCREEN_HEIGHT = 960 +_DEFAULT_SCREEN_WIDTH = 480 + +# Default to swiftshader_indirect since it works for most cases. +_DEFAULT_GPU_MODE = 'swiftshader_indirect' + +# The snapshot name to load/save when writable_system=False. +# This is the default name used by the emulator binary. +_DEFAULT_SNAPSHOT_NAME = 'default_boot' + +# crbug.com/1275767: Set long press timeout to 1000ms to reduce the flakiness +# caused by click being incorrectly interpreted as longclick. +_LONG_PRESS_TIMEOUT = '1000' + +# The snapshot name to load/save when writable_system=True +_SYSTEM_SNAPSHOT_NAME = 'boot_with_system' + +_SDCARD_NAME = 'cr-sdcard.img' + + +class AvdException(Exception): + """Raised when this module has a problem interacting with an AVD.""" + + def __init__(self, summary, command=None, stdout=None, stderr=None): + message_parts = [summary] + if command: + message_parts.append(' command: %s' % ' '.join(command)) + if stdout: + message_parts.append(' stdout:') + message_parts.extend(' %s' % line for line in stdout.splitlines()) + if stderr: + message_parts.append(' stderr:') + message_parts.extend(' %s' % line for line in stderr.splitlines()) + + # avd.py is executed with python2. + # pylint: disable=R1725 + super(AvdException, self).__init__('\n'.join(message_parts)) + + +def _Load(avd_proto_path): + """Loads an Avd proto from a textpb file at the given path. + + Should not be called outside of this module. + + Args: + avd_proto_path: path to a textpb file containing an Avd message. + """ + with open(avd_proto_path) as avd_proto_file: + return text_format.Merge(avd_proto_file.read(), avd_pb2.Avd()) + + +def _FindMinSdkFile(apk_dir, min_sdk): + """Finds the apk file associated with the min_sdk file. + + This reads a version.json file located in the apk_dir to find an apk file + that is closest without going over the min_sdk. + + Args: + apk_dir: The directory to look for apk files. + min_sdk: The minimum sdk version supported by the device. + + Returns: + The path to the file that suits the minSdkFile or None + """ + json_file = os.path.join(apk_dir, 'version.json') + if not os.path.exists(json_file): + logging.error('Json version file not found: %s', json_file) + return None + + min_sdk_found = None + curr_min_sdk_version = 0 + with open(json_file) as f: + data = json.loads(f.read()) + # Finds the entry that is closest to min_sdk without going over. + for entry in data: + if (entry['min_sdk'] > curr_min_sdk_version + and entry['min_sdk'] <= min_sdk): + min_sdk_found = entry + curr_min_sdk_version = entry['min_sdk'] + + if not min_sdk_found: + logging.error('No suitable apk file found that suits the minimum sdk %d.', + min_sdk) + return None + + logging.info('Found apk file for mininum sdk %d: %r with version %r', + min_sdk, min_sdk_found['file_name'], + min_sdk_found['version_name']) + return os.path.join(apk_dir, min_sdk_found['file_name']) + + +class _AvdManagerAgent: + """Private utility for interacting with avdmanager.""" + + def __init__(self, avd_home, sdk_root): + """Create an _AvdManagerAgent. + + Args: + avd_home: path to ANDROID_AVD_HOME directory. + Typically something like /path/to/dir/.android/avd + sdk_root: path to SDK root directory. + """ + self._avd_home = avd_home + self._sdk_root = sdk_root + + self._env = dict(os.environ) + + # The avdmanager from cmdline-tools would look two levels + # up from toolsdir to find the SDK root. + # Pass avdmanager a fake directory under the directory in which + # we install the system images s.t. avdmanager can find the + # system images. + fake_tools_dir = os.path.join(self._sdk_root, 'non-existent-tools', + 'non-existent-version') + self._env.update({ + 'ANDROID_AVD_HOME': + self._avd_home, + 'AVDMANAGER_OPTS': + '-Dcom.android.sdkmanager.toolsdir=%s' % fake_tools_dir, + 'JAVA_HOME': + constants.JAVA_HOME, + }) + + def Create(self, avd_name, system_image, force=False): + """Call `avdmanager create`. + + Args: + avd_name: name of the AVD to create. + system_image: system image to use for the AVD. + force: whether to force creation, overwriting any existing + AVD with the same name. + """ + create_cmd = [ + _DEFAULT_AVDMANAGER_PATH, + '-v', + 'create', + 'avd', + '-n', + avd_name, + '-k', + system_image, + ] + if force: + create_cmd += ['--force'] + + create_proc = cmd_helper.Popen(create_cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=self._env) + output, error = create_proc.communicate(input='\n') + if create_proc.returncode != 0: + raise AvdException('AVD creation failed', + command=create_cmd, + stdout=output, + stderr=error) + + for line in output.splitlines(): + logging.info(' %s', line) + + def Delete(self, avd_name): + """Call `avdmanager delete`. + + Args: + avd_name: name of the AVD to delete. + """ + delete_cmd = [ + _DEFAULT_AVDMANAGER_PATH, + '-v', + 'delete', + 'avd', + '-n', + avd_name, + ] + try: + for line in cmd_helper.IterCmdOutputLines(delete_cmd, env=self._env): + logging.info(' %s', line) + except subprocess.CalledProcessError as e: + # avd.py is executed with python2. + # pylint: disable=W0707 + raise AvdException('AVD deletion failed: %s' % str(e), command=delete_cmd) + + def List(self): + """List existing AVDs by the name.""" + list_cmd = [ + _DEFAULT_AVDMANAGER_PATH, + '-v', + 'list', + 'avd', + '-c', + ] + output = cmd_helper.GetCmdOutput(list_cmd, env=self._env) + return output.splitlines() + + def IsAvailable(self, avd_name): + """Check if an AVD exists or not.""" + return avd_name in self.List() + + +class AvdConfig: + """Represents a particular AVD configuration. + + This class supports creation, installation, and execution of an AVD + from a given Avd proto message, as defined in + //build/android/pylib/local/emulator/proto/avd.proto. + """ + + def __init__(self, avd_proto_path): + """Create an AvdConfig object. + + Args: + avd_proto_path: path to a textpb file containing an Avd message. + """ + self.avd_proto_path = avd_proto_path + self._config = _Load(avd_proto_path) + + self._initialized = False + self._initializer_lock = threading.Lock() + + @property + def emulator_home(self): + """User-specific emulator configuration directory. + + It corresponds to the environment variable $ANDROID_EMULATOR_HOME. + Configs like advancedFeatures.ini are expected to be under this dir. + """ + return os.path.join(COMMON_CIPD_ROOT, self._config.avd_package.dest_path) + + @property + def emulator_sdk_root(self): + """The path to the SDK installation directory. + + It corresponds to the environment variable $ANDROID_HOME. + + To be a valid sdk root, it requires to have the subdirecotries "platforms" + and "platform-tools". See http://bit.ly/2YAkyFE for context. + + Also, it is expected to have subdirecotries "emulator" and "system-images". + """ + emulator_sdk_root = os.path.join(COMMON_CIPD_ROOT, + self._config.emulator_package.dest_path) + # Ensure this is a valid sdk root. + required_dirs = [ + os.path.join(emulator_sdk_root, 'platforms'), + os.path.join(emulator_sdk_root, 'platform-tools'), + ] + for d in required_dirs: + if not os.path.exists(d): + os.makedirs(d) + + return emulator_sdk_root + + @property + def emulator_path(self): + """The path to the emulator binary.""" + return os.path.join(self.emulator_sdk_root, 'emulator', 'emulator') + + @property + def qemu_img_path(self): + """The path to the qemu-img binary. + + This is used to rebase the paths in qcow2 images. + """ + return os.path.join(self.emulator_sdk_root, 'emulator', 'qemu-img') + + @property + def mksdcard_path(self): + """The path to the mksdcard binary. + + This is used to create a sdcard image. + """ + return os.path.join(self.emulator_sdk_root, 'emulator', 'mksdcard') + + @property + def avd_settings(self): + """The AvdSettings in the avd proto file. + + This defines how to configure the AVD at creation. + """ + return self._config.avd_settings + + @property + def avd_name(self): + """The name of the AVD to create or use.""" + return self._config.avd_name + + @property + def avd_home(self): + """The path that contains the files of one or multiple AVDs.""" + avd_home = os.path.join(self.emulator_home, 'avd') + if not os.path.exists(avd_home): + os.makedirs(avd_home) + + return avd_home + + @property + def _avd_dir(self): + """The path that contains the files of the given AVD.""" + return os.path.join(self.avd_home, '%s.avd' % self.avd_name) + + @property + def _system_image_dir(self): + """The path of the directory that directly contains the system images. + + For example, if the system_image_name is + "system-images;android-33;google_apis;x86_64" + + The _system_image_dir will be: + //system-images/android-33/google_apis/x86_64 + + This is used to rebase the paths in qcow2 images. + """ + return os.path.join(COMMON_CIPD_ROOT, + self._config.system_image_package.dest_path, + *self._config.system_image_name.split(';')) + + @property + def _root_ini_path(self): + """The .ini file of the given AVD.""" + return os.path.join(self.avd_home, '%s.ini' % self.avd_name) + + @property + def _config_ini_path(self): + """The config.ini file under _avd_dir.""" + return os.path.join(self._avd_dir, 'config.ini') + + @property + def _features_ini_path(self): + return os.path.join(self.emulator_home, 'advancedFeatures.ini') + + @property + def xdg_config_dir(self): + """The base directory to store qt config file. + + This dir should be added to the env variable $XDG_CONFIG_DIRS so that + _qt_config_path can take effect. See https://bit.ly/3HIQRZ3 for context. + """ + config_dir = os.path.join(self.emulator_home, '.config') + if not os.path.exists(config_dir): + os.makedirs(config_dir) + + return config_dir + + @property + def _qt_config_path(self): + """The qt config file for emulator.""" + qt_config_dir = os.path.join(self.xdg_config_dir, + 'Android Open Source Project') + if not os.path.exists(qt_config_dir): + os.makedirs(qt_config_dir) + + return os.path.join(qt_config_dir, 'Emulator.conf') + + def HasSnapshot(self, snapshot_name): + """Check if a given snapshot exists or not.""" + snapshot_path = os.path.join(self._avd_dir, 'snapshots', snapshot_name) + return os.path.exists(snapshot_path) + + def Create(self, + force=False, + snapshot=False, + keep=False, + additional_apks=None, + privileged_apk_tuples=None, + cipd_json_output=None, + dry_run=False): + """Create an instance of the AVD CIPD package. + + This method: + - installs the requisite system image + - creates the AVD + - modifies the AVD's ini files to support running chromium tests + in chromium infrastructure + - optionally starts, installs additional apks and/or privileged apks, and + stops the AVD for snapshotting (default no) + - By default creates and uploads an instance of the AVD CIPD package + (can be turned off by dry_run flag). + - optionally deletes the AVD (default yes) + + Args: + force: bool indicating whether to force create the AVD. + snapshot: bool indicating whether to snapshot the AVD before creating + the CIPD package. + keep: bool indicating whether to keep the AVD after creating + the CIPD package. + additional_apks: a list of strings contains the paths to the APKs. These + APKs will be installed after AVD is started. + privileged_apk_tuples: a list of (apk_path, device_partition) tuples where + |apk_path| is a string containing the path to the APK, and + |device_partition| is a string indicating the system image partition on + device that contains "priv-app" directory, e.g. "/system", "/product". + cipd_json_output: string path to pass to `cipd create` via -json-output. + dry_run: When set to True, it will skip the CIPD package creation + after creating the AVD. + """ + logging.info('Installing required packages.') + self._InstallCipdPackages(_PACKAGES_CREATION) + + avd_manager = _AvdManagerAgent(avd_home=self.avd_home, + sdk_root=self.emulator_sdk_root) + + logging.info('Creating AVD.') + avd_manager.Create(avd_name=self.avd_name, + system_image=self._config.system_image_name, + force=force) + + try: + logging.info('Modifying AVD configuration.') + + # Clear out any previous configuration or state from this AVD. + with ini.update_ini_file(self._root_ini_path) as r_ini_contents: + r_ini_contents['path.rel'] = 'avd/%s.avd' % self.avd_name + + with ini.update_ini_file(self._features_ini_path) as f_ini_contents: + # features_ini file will not be refreshed by avdmanager during + # creation. So explicitly clear its content to exclude any leftover + # from previous creation. + f_ini_contents.clear() + f_ini_contents.update(self.avd_settings.advanced_features) + + with ini.update_ini_file(self._config_ini_path) as config_ini_contents: + # Update avd_properties first so that they won't override settings + # like screen and ram_size + config_ini_contents.update(self.avd_settings.avd_properties) + + height = self.avd_settings.screen.height or _DEFAULT_SCREEN_HEIGHT + width = self.avd_settings.screen.width or _DEFAULT_SCREEN_WIDTH + density = self.avd_settings.screen.density or _DEFAULT_SCREEN_DENSITY + + config_ini_contents.update({ + 'disk.dataPartition.size': '4G', + 'hw.keyboard': 'yes', + 'hw.lcd.density': density, + 'hw.lcd.height': height, + 'hw.lcd.width': width, + 'hw.mainKeys': 'no', # Show nav buttons on screen + }) + + if self.avd_settings.ram_size: + config_ini_contents['hw.ramSize'] = self.avd_settings.ram_size + + config_ini_contents['hw.sdCard'] = 'yes' + if self.avd_settings.sdcard.size: + sdcard_path = os.path.join(self._avd_dir, _SDCARD_NAME) + cmd_helper.RunCmd([ + self.mksdcard_path, + self.avd_settings.sdcard.size, + sdcard_path, + ]) + config_ini_contents['hw.sdCard.path'] = sdcard_path + + if not additional_apks: + additional_apks = [] + for pkg in self._config.additional_apk: + apk_dir = os.path.join(COMMON_CIPD_ROOT, pkg.dest_path) + apk_file = _FindMinSdkFile(apk_dir, self._config.min_sdk) + # Some of these files come from chrome internal, so may not be + # available to non-internal permissioned users. + if os.path.exists(apk_file): + logging.info('Adding additional apk for install: %s', apk_file) + additional_apks.append(apk_file) + + if not privileged_apk_tuples: + privileged_apk_tuples = [] + for pkg in self._config.privileged_apk: + apk_dir = os.path.join(COMMON_CIPD_ROOT, pkg.dest_path) + apk_file = _FindMinSdkFile(apk_dir, self._config.min_sdk) + # Some of these files come from chrome internal, so may not be + # available to non-internal permissioned users. + if os.path.exists(apk_file): + logging.info('Adding privilege apk for install: %s', apk_file) + privileged_apk_tuples.append( + (apk_file, self._config.install_privileged_apk_partition)) + + # Start & stop the AVD. + self._Initialize() + instance = _AvdInstance(self) + # Enable debug for snapshot when it is set to True + debug_tags = 'time,init,snapshot' if snapshot else None + # Installing privileged apks requires modifying the system + # image. + writable_system = bool(privileged_apk_tuples) + instance.Start(ensure_system_settings=False, + read_only=False, + writable_system=writable_system, + gpu_mode=_DEFAULT_GPU_MODE, + debug_tags=debug_tags) + + assert instance.device is not None, '`instance.device` not initialized.' + # Android devices with full-disk encryption are encrypted on first boot, + # and then get decrypted to continue the boot process (See details in + # https://bit.ly/3agmjcM). + # Wait for this step to complete since it can take a while for old OSs + # like M, otherwise the avd may have "Encryption Unsuccessful" error. + instance.device.WaitUntilFullyBooted(decrypt=True, timeout=180, retries=0) + + if additional_apks: + for apk in additional_apks: + instance.device.Install(apk, allow_downgrade=True, reinstall=True) + package_name = apk_helper.GetPackageName(apk) + package_version = instance.device.GetApplicationVersion(package_name) + logging.info('The version for package %r on the device is %r', + package_name, package_version) + + if privileged_apk_tuples: + system_app.InstallPrivilegedApps(instance.device, privileged_apk_tuples) + for apk, _ in privileged_apk_tuples: + package_name = apk_helper.GetPackageName(apk) + package_version = instance.device.GetApplicationVersion(package_name) + logging.info('The version for package %r on the device is %r', + package_name, package_version) + + # Always disable the network to prevent built-in system apps from + # updating themselves, which could take over package manager and + # cause shell command timeout. + logging.info('Disabling the network.') + settings.ConfigureContentSettings(instance.device, + settings.NETWORK_DISABLED_SETTINGS) + + if snapshot: + # Reboot so that changes like disabling network can take effect. + instance.device.Reboot() + instance.SaveSnapshot() + + instance.Stop() + + # The multiinstance lock file seems to interfere with the emulator's + # operation in some circumstances (beyond the obvious -read-only ones), + # and there seems to be no mechanism by which it gets closed or deleted. + # See https://bit.ly/2pWQTH7 for context. + multiInstanceLockFile = os.path.join(self._avd_dir, 'multiinstance.lock') + if os.path.exists(multiInstanceLockFile): + os.unlink(multiInstanceLockFile) + + package_def_content = { + 'package': + self._config.avd_package.package_name, + 'root': + self.emulator_home, + 'install_mode': + 'copy', + 'data': [{ + 'dir': os.path.relpath(self._avd_dir, self.emulator_home) + }, { + 'file': + os.path.relpath(self._root_ini_path, self.emulator_home) + }, { + 'file': + os.path.relpath(self._features_ini_path, self.emulator_home) + }], + } + + logging.info('Creating AVD CIPD package.') + logging.info('ensure file content: %s', + json.dumps(package_def_content, indent=2)) + + with tempfile_ext.TemporaryFileName(suffix='.json') as package_def_path: + with open(package_def_path, 'w') as package_def_file: + json.dump(package_def_content, package_def_file) + + logging.info(' %s', self._config.avd_package.package_name) + cipd_create_cmd = [ + 'cipd', + 'create', + '-pkg-def', + package_def_path, + '-tag', + 'emulator_version:%s' % self._config.emulator_package.version, + '-tag', + 'system_image_version:%s' % + self._config.system_image_package.version, + ] + if cipd_json_output: + cipd_create_cmd.extend([ + '-json-output', + cipd_json_output, + ]) + logging.info('running %r%s', cipd_create_cmd, + ' (dry_run)' if dry_run else '') + if not dry_run: + try: + for line in cmd_helper.IterCmdOutputLines(cipd_create_cmd): + logging.info(' %s', line) + except subprocess.CalledProcessError as e: + # avd.py is executed with python2. + # pylint: disable=W0707 + raise AvdException('CIPD package creation failed: %s' % str(e), + command=cipd_create_cmd) + + finally: + if not keep: + logging.info('Deleting AVD.') + avd_manager.Delete(avd_name=self.avd_name) + + def IsAvailable(self): + """Returns whether emulator is up-to-date.""" + if not os.path.exists(self._config_ini_path): + return False + + # Skip when no version exists to prevent "IsAvailable()" returning False + # for emualtors set up using Create() (rather than Install()). + for cipd_root, pkgs in self._IterCipdPackages(_PACKAGES_RUNTIME, + check_version=False): + stdout = subprocess.run(['cipd', 'installed', '--root', cipd_root], + capture_output=True, + check=False, + encoding='utf8').stdout + # Output looks like: + # Packages: + # name1:version1 + # name2:version2 + installed = [l.strip().split(':', 1) for l in stdout.splitlines()[1:]] + + if any([p.package_name, p.version] not in installed for p in pkgs): + return False + return True + + def Uninstall(self): + """Uninstall all the artifacts associated with the given config. + + Artifacts includes: + - CIPD packages specified in the avd config. + - The local AVD created by `Create`, if present. + + """ + # Delete any existing local AVD. This must occur before deleting CIPD + # packages because a AVD needs system image to be recognized by avdmanager. + avd_manager = _AvdManagerAgent(avd_home=self.avd_home, + sdk_root=self.emulator_sdk_root) + if avd_manager.IsAvailable(self.avd_name): + logging.info('Deleting local AVD %s', self.avd_name) + avd_manager.Delete(self.avd_name) + + # Delete installed CIPD packages. + for cipd_root, _ in self._IterCipdPackages(_PACKAGES_ALL, + check_version=False): + logging.info('Uninstalling packages in %s', cipd_root) + if not os.path.exists(cipd_root): + continue + # Create an empty ensure file to removed any installed CIPD packages. + ensure_path = os.path.join(cipd_root, '.ensure') + with open(ensure_path, 'w') as ensure_file: + ensure_file.write('$ParanoidMode CheckIntegrity\n\n') + ensure_cmd = [ + 'cipd', + 'ensure', + '-ensure-file', + ensure_path, + '-root', + cipd_root, + ] + try: + for line in cmd_helper.IterCmdOutputLines(ensure_cmd): + logging.info(' %s', line) + except subprocess.CalledProcessError as e: + # avd.py is executed with python2. + # pylint: disable=W0707 + raise AvdException('Failed to uninstall CIPD packages: %s' % str(e), + command=ensure_cmd) + + def Install(self): + """Installs the requested CIPD packages and prepares them for use. + + This includes making files writeable and revising some of the + emulator's internal config files. + + Returns: None + Raises: AvdException on failure to install. + """ + self._InstallCipdPackages(_PACKAGES_RUNTIME) + self._MakeWriteable() + self._UpdateConfigs() + self._RebaseQcow2Images() + + def _RebaseQcow2Images(self): + """Rebase the paths in qcow2 images. + + qcow2 files may exists in avd directory which have hard-coded paths to the + backing files, e.g., system.img, vendor.img. Such paths need to be rebased + if the avd is moved to a different directory in order to boot successfully. + """ + for f in _BACKING_FILES: + qcow2_image_path = os.path.join(self._avd_dir, '%s.qcow2' % f) + if not os.path.exists(qcow2_image_path): + continue + backing_file_path = os.path.join(self._system_image_dir, f) + logging.info('Rebasing the qcow2 image %r with the backing file %r', + qcow2_image_path, backing_file_path) + cmd_helper.RunCmd([ + self.qemu_img_path, + 'rebase', + '-u', + '-f', + 'qcow2', + '-b', + # The path to backing file must be relative to the qcow2 image. + os.path.relpath(backing_file_path, os.path.dirname(qcow2_image_path)), + qcow2_image_path, + ]) + + def _ListPackages(self, packages): + if packages is _PACKAGES_RUNTIME: + packages = [ + self._config.avd_package, + self._config.emulator_package, + self._config.system_image_package, + ] + elif packages is _PACKAGES_CREATION: + packages = [ + self._config.emulator_package, + self._config.system_image_package, + *self._config.privileged_apk, + *self._config.additional_apk, + ] + elif packages is _PACKAGES_ALL: + packages = [ + self._config.avd_package, + self._config.emulator_package, + self._config.system_image_package, + *self._config.privileged_apk, + *self._config.additional_apk, + ] + return packages + + def _IterCipdPackages(self, packages, check_version=True): + """Iterate a list of CIPD packages by their CIPD roots. + + Args: + packages: a list of packages from an AVD config. + check_version: If set, raise Exception when a package has no version. + """ + pkgs_by_dir = collections.defaultdict(list) + for pkg in self._ListPackages(packages): + if pkg.version: + pkgs_by_dir[pkg.dest_path].append(pkg) + elif check_version: + raise AvdException('Expecting a version for the package %s' % + pkg.package_name) + + for pkg_dir, pkgs in pkgs_by_dir.items(): + cipd_root = os.path.join(COMMON_CIPD_ROOT, pkg_dir) + yield cipd_root, pkgs + + def _InstallCipdPackages(self, packages, check_version=True): + for cipd_root, pkgs in self._IterCipdPackages(packages, + check_version=check_version): + logging.info('Installing packages in %s', cipd_root) + if not os.path.exists(cipd_root): + os.makedirs(cipd_root) + ensure_path = os.path.join(cipd_root, '.ensure') + with open(ensure_path, 'w') as ensure_file: + # Make CIPD ensure that all files are present and correct, + # even if it thinks the package is installed. + ensure_file.write('$ParanoidMode CheckIntegrity\n\n') + for pkg in pkgs: + ensure_file.write('%s %s\n' % (pkg.package_name, pkg.version)) + logging.info(' %s %s', pkg.package_name, pkg.version) + ensure_cmd = [ + 'cipd', + 'ensure', + '-ensure-file', + ensure_path, + '-root', + cipd_root, + ] + try: + for line in cmd_helper.IterCmdOutputLines(ensure_cmd): + logging.info(' %s', line) + except subprocess.CalledProcessError as e: + # avd.py is executed with python2. + # pylint: disable=W0707 + raise AvdException('Failed to install CIPD packages: %s' % str(e), + command=ensure_cmd) + + def _MakeWriteable(self): + # The emulator requires that some files are writable. + for dirname, _, filenames in os.walk(self.emulator_home): + for f in filenames: + path = os.path.join(dirname, f) + mode = os.lstat(path).st_mode + if mode & stat.S_IRUSR: + mode = mode | stat.S_IWUSR + os.chmod(path, mode) + + def _UpdateConfigs(self): + """Update various properties in config files after installation. + + AVD config files contain some properties which can be different between AVD + creation and installation, e.g. hw.sdCard.path, which is an absolute path. + Update their values so that: + * Emulator instance can be booted correctly. + * The snapshot can be loaded successfully. + """ + logging.info('Updating AVD configurations.') + # Update the absolute avd path in root_ini file + with ini.update_ini_file(self._root_ini_path) as r_ini_contents: + r_ini_contents['path'] = self._avd_dir + + # Update hardware settings. + config_paths = [self._config_ini_path] + # The file hardware.ini within each snapshot need to be updated as well. + hw_ini_glob_pattern = os.path.join(self._avd_dir, 'snapshots', '*', + 'hardware.ini') + config_paths.extend(glob.glob(hw_ini_glob_pattern)) + + properties = {} + # Update hw.sdCard.path if applicable + sdcard_path = os.path.join(self._avd_dir, _SDCARD_NAME) + if os.path.exists(sdcard_path): + properties['hw.sdCard.path'] = sdcard_path + + for config_path in config_paths: + with ini.update_ini_file(config_path) as config_contents: + config_contents.update(properties) + + # Create qt config file to disable adb warning when launched in window mode. + with ini.update_ini_file(self._qt_config_path) as config_contents: + config_contents['set'] = {'autoFindAdb': 'false'} + + def _Initialize(self): + if self._initialized: + return + + with self._initializer_lock: + if self._initialized: + return + + # Emulator start-up looks for the adb daemon. Make sure it's running. + adb_wrapper.AdbWrapper.StartServer() + + # Emulator start-up requires a valid sdk root. + assert self.emulator_sdk_root + + def CreateInstance(self, output_manager=None): + """Creates an AVD instance without starting it. + + Returns: + An _AvdInstance. + """ + self._Initialize() + return _AvdInstance(self, output_manager=output_manager) + + def StartInstance(self): + """Starts an AVD instance. + + Returns: + An _AvdInstance. + """ + instance = self.CreateInstance() + instance.Start() + return instance + + +class _AvdInstance: + """Represents a single running instance of an AVD. + + This class should only be created directly by AvdConfig.StartInstance, + but its other methods can be freely called. + """ + + def __init__(self, avd_config, output_manager=None): + """Create an _AvdInstance object. + + Args: + avd_config: an AvdConfig instance. + output_manager: a pylib.base.output_manager.OutputManager instance. + """ + self._avd_config = avd_config + self._avd_name = avd_config.avd_name + self._emulator_home = avd_config.emulator_home + self._emulator_path = avd_config.emulator_path + self._emulator_proc = None + self._emulator_serial = None + self._emulator_device = None + + self._output_manager = output_manager + self._output_file = None + + self._writable_system = False + self._debug_tags = None + + def __str__(self): + return '%s|%s' % (self._avd_name, (self._emulator_serial or id(self))) + + def Start(self, + ensure_system_settings=True, + read_only=True, + window=False, + writable_system=False, + gpu_mode=_DEFAULT_GPU_MODE, + wipe_data=False, + debug_tags=None, + require_fast_start=False): + """Starts the emulator running an instance of the given AVD. + + Note when ensure_system_settings is True, the program will wait until the + emulator is fully booted, and then update system settings. + """ + is_slow_start = not require_fast_start + # Force to load system snapshot if detected. + if self.HasSystemSnapshot(): + if not writable_system: + logging.info('System snapshot found. Set "writable_system=True" ' + 'to load it properly.') + writable_system = True + if read_only: + logging.info('System snapshot found. Set "read_only=False" ' + 'to load it properly.') + read_only = False + elif writable_system: + is_slow_start = True + logging.warning('Emulator will be slow to start, as ' + '"writable_system=True" but system snapshot not found.') + + self._writable_system = writable_system + + with tempfile_ext.TemporaryFileName() as socket_path, (contextlib.closing( + socket.socket(socket.AF_UNIX))) as sock: + sock.bind(socket_path) + emulator_cmd = [ + self._emulator_path, + '-avd', + self._avd_name, + '-report-console', + 'unix:%s' % socket_path, + '-no-boot-anim', + # Explicitly prevent emulator from auto-saving to snapshot on exit. + '-no-snapshot-save', + # Explicitly set the snapshot name for auto-load + '-snapshot', + self.GetSnapshotName(), + ] + + if wipe_data: + emulator_cmd.append('-wipe-data') + if read_only: + emulator_cmd.append('-read-only') + if writable_system: + emulator_cmd.append('-writable-system') + # Note when "--gpu-mode" is set to "host": + # * It needs a valid DISPLAY env, even if "--emulator-window" is false. + # Otherwise it may throw errors like "Failed to initialize backend + # EGL display". See the code in https://bit.ly/3ruiMlB as an example + # to setup the DISPLAY env with xvfb. + # * It will not work under remote sessions like chrome remote desktop. + if gpu_mode: + emulator_cmd.extend(['-gpu', gpu_mode]) + if debug_tags: + self._debug_tags = set(debug_tags.split(',')) + # Always print timestamp when debug tags are set. + self._debug_tags.add('time') + emulator_cmd.extend(['-debug', ','.join(self._debug_tags)]) + if 'kernel' in self._debug_tags: + # TODO(crbug.com/1404176): newer API levels need "-virtio-console" + # as well to print kernel log. + emulator_cmd.append('-show-kernel') + + emulator_env = { + # kill immediately when emulator hang. + 'ANDROID_EMULATOR_WAIT_TIME_BEFORE_KILL': '0', + # Sets the emulator configuration directory + 'ANDROID_EMULATOR_HOME': self._emulator_home, + } + if 'DISPLAY' in os.environ: + emulator_env['DISPLAY'] = os.environ.get('DISPLAY') + if window: + if 'DISPLAY' not in emulator_env: + raise AvdException('Emulator failed to start: DISPLAY not defined') + else: + emulator_cmd.append('-no-window') + + # Need this for the qt config file to take effect. + xdg_config_dirs = [self._avd_config.xdg_config_dir] + if 'XDG_CONFIG_DIRS' in os.environ: + xdg_config_dirs.append(os.environ.get('XDG_CONFIG_DIRS')) + emulator_env['XDG_CONFIG_DIRS'] = ':'.join(xdg_config_dirs) + + sock.listen(1) + + logging.info('Starting emulator...') + logging.info( + ' With environments: %s', + ' '.join(['%s=%s' % (k, v) for k, v in emulator_env.items()])) + logging.info(' With commands: %s', ' '.join(emulator_cmd)) + + # Enable the emulator log when debug_tags is set. + if self._debug_tags: + # Write to an ArchivedFile if output manager is set, otherwise stdout. + if self._output_manager: + self._output_file = self._output_manager.CreateArchivedFile( + 'emulator_%s' % time.strftime('%Y%m%dT%H%M%S-UTC', time.gmtime()), + 'emulator') + else: + self._output_file = open('/dev/null', 'w') + self._emulator_proc = cmd_helper.Popen(emulator_cmd, + stdout=self._output_file, + stderr=self._output_file, + env=emulator_env) + + # Waits for the emulator to report its serial as requested via + # -report-console. See http://bit.ly/2lK3L18 for more. + def listen_for_serial(s): + logging.info('Waiting for connection from emulator.') + with contextlib.closing(s.accept()[0]) as conn: + val = conn.recv(1024) + return 'emulator-%d' % int(val) + + try: + self._emulator_serial = timeout_retry.Run( + listen_for_serial, + timeout=120 if is_slow_start else 30, + retries=0, + args=[sock]) + logging.info('%s started', self._emulator_serial) + except Exception: + self.Stop(force=True) + raise + + # Set the system settings in "Start" here instead of setting in "Create" + # because "Create" is used during AVD creation, and we want to avoid extra + # turn-around on rolling AVD. + if ensure_system_settings: + assert self.device is not None, '`instance.device` not initialized.' + logging.info('Waiting for device to be fully booted.') + self.device.WaitUntilFullyBooted(timeout=360 if is_slow_start else 90, + retries=0) + logging.info('Device fully booted, verifying system settings.') + _EnsureSystemSettings(self.device) + + def Stop(self, force=False): + """Stops the emulator process. + + When "force" is True, we will call "terminate" on the emulator process, + which is recommended when emulator is not responding to adb commands. + """ + # Close output file first in case emulator process killing goes wrong. + if self._output_file: + if self._debug_tags: + if self._output_manager: + self._output_manager.ArchiveArchivedFile(self._output_file, + delete=True) + link = self._output_file.Link() + if link: + logging.critical('Emulator logs saved to %s', link) + else: + self._output_file.close() + self._output_file = None + + if self._emulator_proc: + if self._emulator_proc.poll() is None: + if force or not self.device: + self._emulator_proc.terminate() + else: + self.device.adb.Emu('kill') + self._emulator_proc.wait() + self._emulator_proc = None + self._emulator_serial = None + self._emulator_device = None + + def GetSnapshotName(self): + """Return the snapshot name to load/save. + + Emulator has a different snapshot process when '-writable-system' flag is + set (See https://issuetracker.google.com/issues/135857816#comment8). + + """ + if self._writable_system: + return _SYSTEM_SNAPSHOT_NAME + + return _DEFAULT_SNAPSHOT_NAME + + def HasSystemSnapshot(self): + """Check if the instance has the snapshot named _SYSTEM_SNAPSHOT_NAME.""" + return self._avd_config.HasSnapshot(_SYSTEM_SNAPSHOT_NAME) + + def SaveSnapshot(self): + snapshot_name = self.GetSnapshotName() + if self.device: + logging.info('Saving snapshot to %r.', snapshot_name) + self.device.adb.Emu(['avd', 'snapshot', 'save', snapshot_name]) + + @property + def serial(self): + return self._emulator_serial + + @property + def device(self): + if not self._emulator_device and self._emulator_serial: + self._emulator_device = device_utils.DeviceUtils(self._emulator_serial) + return self._emulator_device + + +# TODO(crbug.com/1275767): Refactor it to a dict-based approach. +def _EnsureSystemSettings(device): + set_long_press_timeout_cmd = [ + 'settings', 'put', 'secure', 'long_press_timeout', _LONG_PRESS_TIMEOUT + ] + device.RunShellCommand(set_long_press_timeout_cmd, check_return=True) + + # Verify if long_press_timeout is set correctly. + get_long_press_timeout_cmd = [ + 'settings', 'get', 'secure', 'long_press_timeout' + ] + adb_output = device.RunShellCommand(get_long_press_timeout_cmd, + check_return=True) + if _LONG_PRESS_TIMEOUT in adb_output: + logging.info('long_press_timeout set to %r', _LONG_PRESS_TIMEOUT) + else: + logging.warning('long_press_timeout is not set correctly') diff --git a/android/pylib/local/emulator/ini.py b/android/pylib/local/emulator/ini.py new file mode 100644 index 000000000000..79eb01580d3b --- /dev/null +++ b/android/pylib/local/emulator/ini.py @@ -0,0 +1,104 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Basic .ini encoding and decoding. + +The basic element in an ini file is the key. Every key is constructed by a name +and a value, delimited by an equals sign (=). + +Keys may be grouped into sections. The secetion name will be a line by itself, +in square brackets ([ and ]). All keys after the section are associated with +that section until another section occurs. + +Keys that are not under any section are considered at the top level. + +Section and key names are case sensitive. +""" + + +import contextlib +import os + + +def add_key(line, config, strict=True): + key, val = line.split('=', 1) + key = key.strip() + val = val.strip() + if strict and key in config: + raise ValueError('Multiple entries present for key "%s"' % key) + config[key] = val + + +def loads(ini_str, strict=True): + """Deserialize int_str to a dict (nested dict when has sections) object. + + Duplicated sections will merge their keys. + + When there are multiple entries for a key, at the top level, or under the + same section: + - If strict is true, ValueError will be raised. + - If strict is false, only the last occurrence will be stored. + """ + ret = {} + section = None + for line in ini_str.splitlines(): + # Empty line + if not line: + continue + # Section line + if line[0] == '[' and line[-1] == ']': + section = line[1:-1] + if section not in ret: + ret[section] = {} + # Key line + else: + config = ret if section is None else ret[section] + add_key(line, config, strict=strict) + + return ret + + +def load(fp): + return loads(fp.read()) + + +def dumps(obj): + results = [] + key_str = '' + + for k, v in sorted(obj.items()): + if isinstance(v, dict): + results.append('[%s]\n' % k + dumps(v)) + else: + key_str += '%s = %s\n' % (k, str(v)) + + # Insert key_str at the first position, before any sections + if key_str: + results.insert(0, key_str) + + return '\n'.join(results) + + +def dump(obj, fp): + fp.write(dumps(obj)) + + +@contextlib.contextmanager +def update_ini_file(ini_file_path): + """Load and update the contents of an ini file. + + Args: + ini_file_path: A string containing the absolute path of the ini file. + Yields: + The contents of the file, as a dict + """ + ini_contents = {} + if os.path.exists(ini_file_path): + with open(ini_file_path) as ini_file: + ini_contents = load(ini_file) + + yield ini_contents + + with open(ini_file_path, 'w') as ini_file: + dump(ini_contents, ini_file) diff --git a/android/pylib/local/emulator/ini_test.py b/android/pylib/local/emulator/ini_test.py new file mode 100755 index 000000000000..327d6bf72731 --- /dev/null +++ b/android/pylib/local/emulator/ini_test.py @@ -0,0 +1,155 @@ +#! /usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests for ini.py.""" + + +import os +import sys +import textwrap +import unittest + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))) +from pylib.local.emulator import ini + + +class IniTest(unittest.TestCase): + def testLoadsBasic(self): + ini_str = textwrap.dedent("""\ + foo.bar = 1 + foo.baz= example + bar.bad =/path/to/thing + + [section_1] + foo.bar = 1 + foo.baz= example + + [section_2] + foo.baz= example + bar.bad =/path/to/thing + + [section_1] + bar.bad =/path/to/thing + """) + expected = { + 'foo.bar': '1', + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + 'section_1': { + 'foo.bar': '1', + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + 'section_2': { + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + } + self.assertEqual(expected, ini.loads(ini_str)) + + def testLoadsDuplicatedKeysStrictFailure(self): + ini_str = textwrap.dedent("""\ + foo.bar = 1 + foo.baz = example + bar.bad = /path/to/thing + foo.bar = duplicate + """) + with self.assertRaises(ValueError): + ini.loads(ini_str, strict=True) + + def testLoadsDuplicatedKeysInSectionStrictFailure(self): + ini_str = textwrap.dedent("""\ + [section_1] + foo.bar = 1 + foo.baz = example + bar.bad = /path/to/thing + foo.bar = duplicate + """) + with self.assertRaises(ValueError): + ini.loads(ini_str, strict=True) + + def testLoadsPermissive(self): + ini_str = textwrap.dedent("""\ + foo.bar = 1 + foo.baz = example + bar.bad = /path/to/thing + foo.bar = duplicate + + [section_1] + foo.bar = 1 + foo.baz = example + bar.bad = /path/to/thing + foo.bar = duplicate + """) + expected = { + 'foo.bar': 'duplicate', + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + 'section_1': { + 'foo.bar': 'duplicate', + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + } + self.assertEqual(expected, ini.loads(ini_str, strict=False)) + + def testDumpsBasic(self): + ini_contents = { + 'foo.bar': '1', + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + 'section_2': { + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + 'section_1': { + 'foo.bar': '1', + 'foo.baz': 'example', + }, + } + # ini.dumps is expected to dump to string alphabetically + # by key and section name. + expected = textwrap.dedent("""\ + bar.bad = /path/to/thing + foo.bar = 1 + foo.baz = example + + [section_1] + foo.bar = 1 + foo.baz = example + + [section_2] + bar.bad = /path/to/thing + foo.baz = example + """) + self.assertEqual(expected, ini.dumps(ini_contents)) + + def testDumpsSections(self): + ini_contents = { + 'section_2': { + 'foo.baz': 'example', + 'bar.bad': '/path/to/thing', + }, + 'section_1': { + 'foo.bar': '1', + 'foo.baz': 'example', + }, + } + # ini.dumps is expected to dump to string alphabetically + # by key first, and then by section and the associated keys + expected = textwrap.dedent("""\ + [section_1] + foo.bar = 1 + foo.baz = example + + [section_2] + bar.bad = /path/to/thing + foo.baz = example + """) + self.assertEqual(expected, ini.dumps(ini_contents)) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/pylib/local/emulator/local_emulator_environment.py b/android/pylib/local/emulator/local_emulator_environment.py new file mode 100644 index 000000000000..d71a38277fa2 --- /dev/null +++ b/android/pylib/local/emulator/local_emulator_environment.py @@ -0,0 +1,106 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import logging + +from six.moves import range # pylint: disable=redefined-builtin +from devil import base_error +from devil.android import device_errors +from devil.utils import parallelizer +from devil.utils import reraiser_thread +from devil.utils import timeout_retry +from pylib.local.device import local_device_environment +from pylib.local.emulator import avd + +# Mirroring https://bit.ly/2OjuxcS#23 +_MAX_ANDROID_EMULATORS = 16 + + +# TODO(1262303): After Telemetry is supported by python3 we can re-add +# super without arguments in this script. +# pylint: disable=super-with-arguments +class LocalEmulatorEnvironment(local_device_environment.LocalDeviceEnvironment): + + def __init__(self, args, output_manager, error_func): + super(LocalEmulatorEnvironment, self).__init__(args, output_manager, + error_func) + self._avd_config = avd.AvdConfig(args.avd_config) + if args.emulator_count < 1: + error_func('--emulator-count must be >= 1') + elif args.emulator_count >= _MAX_ANDROID_EMULATORS: + logging.warning('--emulator-count capped at 16.') + self._emulator_count = min(_MAX_ANDROID_EMULATORS, args.emulator_count) + self._emulator_window = args.emulator_window + self._emulator_debug_tags = args.emulator_debug_tags + self._writable_system = ((hasattr(args, 'use_webview_provider') + and args.use_webview_provider) + or (hasattr(args, 'replace_system_package') + and args.replace_system_package) + or (hasattr(args, 'system_packages_to_remove') + and args.system_packages_to_remove)) + + self._emulator_instances = [] + self._device_serials = [] + + #override + def SetUp(self): + self._avd_config.Install() + + emulator_instances = [ + self._avd_config.CreateInstance(output_manager=self.output_manager) + for _ in range(self._emulator_count) + ] + + def start_emulator_instance(inst): + def is_timeout_error(exc): + return isinstance( + exc, + (device_errors.CommandTimeoutError, reraiser_thread.TimeoutError)) + + def impl(inst): + try: + inst.Start(window=self._emulator_window, + writable_system=self._writable_system, + debug_tags=self._emulator_debug_tags, + require_fast_start=True) + except avd.AvdException: + logging.exception('Failed to start emulator instance.') + return None + except base_error.BaseError as e: + # Timeout error usually indicates the emulator is not responding. + # In this case, we should stop it forcely. + inst.Stop(force=is_timeout_error(e)) + raise + return inst + + return timeout_retry.Run(impl, + timeout=120 if self._writable_system else 60, + retries=2, + args=[inst], + retry_if_func=is_timeout_error) + + parallel_emulators = parallelizer.SyncParallelizer(emulator_instances) + self._emulator_instances = [ + emu + for emu in parallel_emulators.pMap(start_emulator_instance).pGet(None) + if emu is not None + ] + self._device_serials = [e.serial for e in self._emulator_instances] + + if not self._emulator_instances: + raise Exception('Failed to start any instances of the emulator.') + if len(self._emulator_instances) < self._emulator_count: + logging.warning( + 'Running with fewer emulator instances than requested (%d vs %d)', + len(self._emulator_instances), self._emulator_count) + + super(LocalEmulatorEnvironment, self).SetUp() + + #override + def TearDown(self): + try: + super(LocalEmulatorEnvironment, self).TearDown() + finally: + parallelizer.SyncParallelizer(self._emulator_instances).Stop() diff --git a/android/pylib/local/emulator/proto/__init__.py b/android/pylib/local/emulator/proto/__init__.py new file mode 100644 index 000000000000..401c54b0d9c6 --- /dev/null +++ b/android/pylib/local/emulator/proto/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/local/emulator/proto/avd.proto b/android/pylib/local/emulator/proto/avd.proto new file mode 100644 index 000000000000..957897fdf71c --- /dev/null +++ b/android/pylib/local/emulator/proto/avd.proto @@ -0,0 +1,94 @@ +// Copyright 2019 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +syntax = "proto3"; + +package tools.android.avd.proto; + +message CIPDPackage { + // CIPD package name. + string package_name = 1; + // CIPD package version to use. + // Ignored when creating AVD packages. + string version = 2; + // Path into which the package should be installed. + // relative to pylib.local.emulator.avd.COMMON_CIPD_ROOT. + string dest_path = 3; +} + +message ScreenSettings { + // Screen height in pixels. + uint32 height = 1; + + // Screen width in pixels. + uint32 width = 2; + + // Scren density in dpi. + uint32 density = 3; +} + +message SdcardSettings { + // Size of the sdcard that should be created for this AVD. + // Can be anything that `mksdcard` or `avdmanager -c` would accept: + // - a number of bytes + // - a number followed by K, M, or G, indicating that many + // KiB, MiB, or GiB, respectively. + string size = 1; +} + +message AvdSettings { + // Settings pertaining to the AVD's screen. + ScreenSettings screen = 1; + + // Settings pertaining to the AVD's sdcard. + SdcardSettings sdcard = 2; + + // Advanced Features for AVD. The pairs here will override the + // default ones in the given system image. + // See https://bit.ly/2P1qK2X for all the available keys. + // The values should be on, off, default, or null + map advanced_features = 3; + + // The physical RAM size on the device, in megabytes. + uint32 ram_size = 4; + + // The properties for AVD. The pairs here will override the + // default ones in the given system image. + // See https://bit.ly/3052c1V for all the available keys and values. + // + // Note the screen, sdcard, ram_size above are ultimately translated to + // AVD properties and they won't be overwritten by values here. + map avd_properties = 5; +} + +message Avd { + // The emulator to use in running the AVD. + CIPDPackage emulator_package = 1; + + // The system image to use. + CIPDPackage system_image_package = 2; + // The name of the system image to use, as reported by sdkmanager. + string system_image_name = 3; + + // The AVD to create or use. + // (Only the package_name is used during AVD creation.) + CIPDPackage avd_package = 4; + // The name of the AVD to create or use. + string avd_name = 5; + + // How to configure the AVD at creation. + AvdSettings avd_settings = 6; + + // min sdk level for emulator. + uint32 min_sdk = 7; + + // The partition to install the privileged apk. + // version 27 and below is /system. After that it can be + // /system, /product, or /vendor + string install_privileged_apk_partition = 8; + + // Needed for gmscore/phonesky support. + repeated CIPDPackage privileged_apk = 9; + repeated CIPDPackage additional_apk = 10; +} diff --git a/android/pylib/local/emulator/proto/avd_pb2.py b/android/pylib/local/emulator/proto/avd_pb2.py new file mode 100644 index 000000000000..e43534c2bcec --- /dev/null +++ b/android/pylib/local/emulator/proto/avd_pb2.py @@ -0,0 +1,455 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: build/android/pylib/local/emulator/proto/avd.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='build/android/pylib/local/emulator/proto/avd.proto', + package='tools.android.avd.proto', + syntax='proto3', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n2build/android/pylib/local/emulator/proto/avd.proto\x12\x17tools.android.avd.proto\"G\n\x0b\x43IPDPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x11\n\tdest_path\x18\x03 \x01(\t\"@\n\x0eScreenSettings\x12\x0e\n\x06height\x18\x01 \x01(\r\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0f\n\x07\x64\x65nsity\x18\x03 \x01(\r\"\x1e\n\x0eSdcardSettings\x12\x0c\n\x04size\x18\x01 \x01(\t\"\xa8\x03\n\x0b\x41vdSettings\x12\x37\n\x06screen\x18\x01 \x01(\x0b\x32\'.tools.android.avd.proto.ScreenSettings\x12\x37\n\x06sdcard\x18\x02 \x01(\x0b\x32\'.tools.android.avd.proto.SdcardSettings\x12U\n\x11\x61\x64vanced_features\x18\x03 \x03(\x0b\x32:.tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry\x12\x10\n\x08ram_size\x18\x04 \x01(\r\x12O\n\x0e\x61vd_properties\x18\x05 \x03(\x0b\x32\x37.tools.android.avd.proto.AvdSettings.AvdPropertiesEntry\x1a\x37\n\x15\x41\x64vancedFeaturesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12\x41vdPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe4\x03\n\x03\x41vd\x12>\n\x10\x65mulator_package\x18\x01 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x42\n\x14system_image_package\x18\x02 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x19\n\x11system_image_name\x18\x03 \x01(\t\x12\x39\n\x0b\x61vd_package\x18\x04 \x01(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12\x10\n\x08\x61vd_name\x18\x05 \x01(\t\x12:\n\x0c\x61vd_settings\x18\x06 \x01(\x0b\x32$.tools.android.avd.proto.AvdSettings\x12\x0f\n\x07min_sdk\x18\x07 \x01(\r\x12(\n install_privileged_apk_partition\x18\x08 \x01(\t\x12<\n\x0eprivileged_apk\x18\t \x03(\x0b\x32$.tools.android.avd.proto.CIPDPackage\x12<\n\x0e\x61\x64\x64itional_apk\x18\n \x03(\x0b\x32$.tools.android.avd.proto.CIPDPackageb\x06proto3' +) + + + + +_CIPDPACKAGE = _descriptor.Descriptor( + name='CIPDPackage', + full_name='tools.android.avd.proto.CIPDPackage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='package_name', full_name='tools.android.avd.proto.CIPDPackage.package_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='version', full_name='tools.android.avd.proto.CIPDPackage.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='dest_path', full_name='tools.android.avd.proto.CIPDPackage.dest_path', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=79, + serialized_end=150, +) + + +_SCREENSETTINGS = _descriptor.Descriptor( + name='ScreenSettings', + full_name='tools.android.avd.proto.ScreenSettings', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='height', full_name='tools.android.avd.proto.ScreenSettings.height', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='width', full_name='tools.android.avd.proto.ScreenSettings.width', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='density', full_name='tools.android.avd.proto.ScreenSettings.density', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=152, + serialized_end=216, +) + + +_SDCARDSETTINGS = _descriptor.Descriptor( + name='SdcardSettings', + full_name='tools.android.avd.proto.SdcardSettings', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='size', full_name='tools.android.avd.proto.SdcardSettings.size', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=218, + serialized_end=248, +) + + +_AVDSETTINGS_ADVANCEDFEATURESENTRY = _descriptor.Descriptor( + name='AdvancedFeaturesEntry', + full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=566, + serialized_end=621, +) + +_AVDSETTINGS_AVDPROPERTIESENTRY = _descriptor.Descriptor( + name='AvdPropertiesEntry', + full_name='tools.android.avd.proto.AvdSettings.AvdPropertiesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='tools.android.avd.proto.AvdSettings.AvdPropertiesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='tools.android.avd.proto.AvdSettings.AvdPropertiesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=623, + serialized_end=675, +) + +_AVDSETTINGS = _descriptor.Descriptor( + name='AvdSettings', + full_name='tools.android.avd.proto.AvdSettings', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='screen', full_name='tools.android.avd.proto.AvdSettings.screen', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='sdcard', full_name='tools.android.avd.proto.AvdSettings.sdcard', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='advanced_features', full_name='tools.android.avd.proto.AvdSettings.advanced_features', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='ram_size', full_name='tools.android.avd.proto.AvdSettings.ram_size', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='avd_properties', full_name='tools.android.avd.proto.AvdSettings.avd_properties', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_AVDSETTINGS_ADVANCEDFEATURESENTRY, _AVDSETTINGS_AVDPROPERTIESENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=251, + serialized_end=675, +) + + +_AVD = _descriptor.Descriptor( + name='Avd', + full_name='tools.android.avd.proto.Avd', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='emulator_package', full_name='tools.android.avd.proto.Avd.emulator_package', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='system_image_package', full_name='tools.android.avd.proto.Avd.system_image_package', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='system_image_name', full_name='tools.android.avd.proto.Avd.system_image_name', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='avd_package', full_name='tools.android.avd.proto.Avd.avd_package', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='avd_name', full_name='tools.android.avd.proto.Avd.avd_name', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='avd_settings', full_name='tools.android.avd.proto.Avd.avd_settings', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='min_sdk', full_name='tools.android.avd.proto.Avd.min_sdk', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='install_privileged_apk_partition', full_name='tools.android.avd.proto.Avd.install_privileged_apk_partition', index=7, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='privileged_apk', full_name='tools.android.avd.proto.Avd.privileged_apk', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='additional_apk', full_name='tools.android.avd.proto.Avd.additional_apk', index=9, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=678, + serialized_end=1162, +) + +_AVDSETTINGS_ADVANCEDFEATURESENTRY.containing_type = _AVDSETTINGS +_AVDSETTINGS_AVDPROPERTIESENTRY.containing_type = _AVDSETTINGS +_AVDSETTINGS.fields_by_name['screen'].message_type = _SCREENSETTINGS +_AVDSETTINGS.fields_by_name['sdcard'].message_type = _SDCARDSETTINGS +_AVDSETTINGS.fields_by_name['advanced_features'].message_type = _AVDSETTINGS_ADVANCEDFEATURESENTRY +_AVDSETTINGS.fields_by_name['avd_properties'].message_type = _AVDSETTINGS_AVDPROPERTIESENTRY +_AVD.fields_by_name['emulator_package'].message_type = _CIPDPACKAGE +_AVD.fields_by_name['system_image_package'].message_type = _CIPDPACKAGE +_AVD.fields_by_name['avd_package'].message_type = _CIPDPACKAGE +_AVD.fields_by_name['avd_settings'].message_type = _AVDSETTINGS +_AVD.fields_by_name['privileged_apk'].message_type = _CIPDPACKAGE +_AVD.fields_by_name['additional_apk'].message_type = _CIPDPACKAGE +DESCRIPTOR.message_types_by_name['CIPDPackage'] = _CIPDPACKAGE +DESCRIPTOR.message_types_by_name['ScreenSettings'] = _SCREENSETTINGS +DESCRIPTOR.message_types_by_name['SdcardSettings'] = _SDCARDSETTINGS +DESCRIPTOR.message_types_by_name['AvdSettings'] = _AVDSETTINGS +DESCRIPTOR.message_types_by_name['Avd'] = _AVD +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CIPDPackage = _reflection.GeneratedProtocolMessageType('CIPDPackage', (_message.Message,), { + 'DESCRIPTOR' : _CIPDPACKAGE, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.CIPDPackage) + }) +_sym_db.RegisterMessage(CIPDPackage) + +ScreenSettings = _reflection.GeneratedProtocolMessageType('ScreenSettings', (_message.Message,), { + 'DESCRIPTOR' : _SCREENSETTINGS, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.ScreenSettings) + }) +_sym_db.RegisterMessage(ScreenSettings) + +SdcardSettings = _reflection.GeneratedProtocolMessageType('SdcardSettings', (_message.Message,), { + 'DESCRIPTOR' : _SDCARDSETTINGS, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.SdcardSettings) + }) +_sym_db.RegisterMessage(SdcardSettings) + +AvdSettings = _reflection.GeneratedProtocolMessageType('AvdSettings', (_message.Message,), { + + 'AdvancedFeaturesEntry' : _reflection.GeneratedProtocolMessageType('AdvancedFeaturesEntry', (_message.Message,), { + 'DESCRIPTOR' : _AVDSETTINGS_ADVANCEDFEATURESENTRY, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings.AdvancedFeaturesEntry) + }) + , + + 'AvdPropertiesEntry' : _reflection.GeneratedProtocolMessageType('AvdPropertiesEntry', (_message.Message,), { + 'DESCRIPTOR' : _AVDSETTINGS_AVDPROPERTIESENTRY, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings.AvdPropertiesEntry) + }) + , + 'DESCRIPTOR' : _AVDSETTINGS, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.AvdSettings) + }) +_sym_db.RegisterMessage(AvdSettings) +_sym_db.RegisterMessage(AvdSettings.AdvancedFeaturesEntry) +_sym_db.RegisterMessage(AvdSettings.AvdPropertiesEntry) + +Avd = _reflection.GeneratedProtocolMessageType('Avd', (_message.Message,), { + 'DESCRIPTOR' : _AVD, + '__module__' : 'build.android.pylib.local.emulator.proto.avd_pb2' + # @@protoc_insertion_point(class_scope:tools.android.avd.proto.Avd) + }) +_sym_db.RegisterMessage(Avd) + + +_AVDSETTINGS_ADVANCEDFEATURESENTRY._options = None +_AVDSETTINGS_AVDPROPERTIESENTRY._options = None +# @@protoc_insertion_point(module_scope) diff --git a/android/pylib/local/local_test_server_spawner.py b/android/pylib/local/local_test_server_spawner.py new file mode 100644 index 000000000000..453d9aa4e87f --- /dev/null +++ b/android/pylib/local/local_test_server_spawner.py @@ -0,0 +1,101 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import json +import time + +from six.moves import range # pylint: disable=redefined-builtin +from devil.android import forwarder +from devil.android import ports +from pylib.base import test_server +from pylib.constants import host_paths + +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH): + import chrome_test_server_spawner + + +# The tests should not need more than one test server instance. +MAX_TEST_SERVER_INSTANCES = 1 + + +def _WaitUntil(predicate, max_attempts=5): + """Blocks until the provided predicate (function) is true. + + Returns: + Whether the provided predicate was satisfied once (before the timeout). + """ + sleep_time_sec = 0.025 + for _ in range(1, max_attempts): + if predicate(): + return True + time.sleep(sleep_time_sec) + sleep_time_sec = min(1, sleep_time_sec * 2) # Don't wait more than 1 sec. + return False + + +class PortForwarderAndroid(chrome_test_server_spawner.PortForwarder): + def __init__(self, device, tool): + self.device = device + self.tool = tool + + def Map(self, port_pairs): + forwarder.Forwarder.Map(port_pairs, self.device, self.tool) + + def GetDevicePortForHostPort(self, host_port): + return forwarder.Forwarder.DevicePortForHostPort(host_port) + + def WaitHostPortAvailable(self, port): + return _WaitUntil(lambda: ports.IsHostPortAvailable(port)) + + def WaitPortNotAvailable(self, port): + return _WaitUntil(lambda: not ports.IsHostPortAvailable(port)) + + def WaitDevicePortReady(self, port): + return _WaitUntil(lambda: ports.IsDevicePortUsed(self.device, port)) + + def Unmap(self, device_port): + forwarder.Forwarder.UnmapDevicePort(device_port, self.device) + + +class LocalTestServerSpawner(test_server.TestServer): + + def __init__(self, port, device, tool): + super().__init__() + self._device = device + self._spawning_server = chrome_test_server_spawner.SpawningServer( + port, PortForwarderAndroid(device, tool), MAX_TEST_SERVER_INSTANCES) + self._tool = tool + + @property + def server_address(self): + return self._spawning_server.server.server_address + + @property + def port(self): + return self.server_address[1] + + #override + def SetUp(self): + # See net/test/spawned_test_server/remote_test_server.h for description of + # the fields in the config file. + test_server_config = json.dumps({ + 'spawner_url_base': 'http://localhost:%d' % self.port + }) + self._device.WriteFile( + '%s/net-test-server-config' % self._device.GetExternalStoragePath(), + test_server_config) + forwarder.Forwarder.Map( + [(self.port, self.port)], self._device, self._tool) + self._spawning_server.Start() + + #override + def Reset(self): + self._spawning_server.CleanupState() + + #override + def TearDown(self): + self.Reset() + self._spawning_server.Stop() + forwarder.Forwarder.UnmapDevicePort(self.port, self._device) diff --git a/android/pylib/local/machine/__init__.py b/android/pylib/local/machine/__init__.py new file mode 100644 index 000000000000..68130d5941d9 --- /dev/null +++ b/android/pylib/local/machine/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/local/machine/local_machine_environment.py b/android/pylib/local/machine/local_machine_environment.py new file mode 100644 index 000000000000..d75dc8869d99 --- /dev/null +++ b/android/pylib/local/machine/local_machine_environment.py @@ -0,0 +1,19 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib.base import environment + + +class LocalMachineEnvironment(environment.Environment): + + def __init__(self, _args, output_manager, _error_func): + super().__init__(output_manager) + + #override + def SetUp(self): + pass + + #override + def TearDown(self): + pass diff --git a/android/pylib/local/machine/local_machine_junit_test_run.py b/android/pylib/local/machine/local_machine_junit_test_run.py new file mode 100644 index 000000000000..a923d6a21ba9 --- /dev/null +++ b/android/pylib/local/machine/local_machine_junit_test_run.py @@ -0,0 +1,443 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import logging +import multiprocessing +import os +import queue +import re +import subprocess +import sys +import tempfile +import threading +import time +import zipfile + +from six.moves import range # pylint: disable=redefined-builtin +from devil.utils import cmd_helper +from py_utils import tempfile_ext +from pylib import constants +from pylib.base import base_test_result +from pylib.base import test_run +from pylib.constants import host_paths +from pylib.results import json_results + + +# These Test classes are used for running tests and are excluded in the test +# runner. See: +# https://android.googlesource.com/platform/frameworks/testing/+/android-support-test/runner/src/main/java/android/support/test/internal/runner/TestRequestBuilder.java +# base/test/android/javatests/src/org/chromium/base/test/BaseChromiumAndroidJUnitRunner.java # pylint: disable=line-too-long +_EXCLUDED_CLASSES_PREFIXES = ('android', 'junit', 'org/bouncycastle/util', + 'org/hamcrest', 'org/junit', 'org/mockito') + +# Suites we shouldn't shard, usually because they don't contain enough test +# cases. +_EXCLUDED_SUITES = { + 'password_check_junit_tests', + 'touch_to_fill_junit_tests', +} + + +# It can actually take longer to run if you shard too much, especially on +# smaller suites. Locally media_base_junit_tests takes 4.3 sec with 1 shard, +# and 6 sec with 2 or more shards. +_MIN_CLASSES_PER_SHARD = 8 + +# Running the largest test suite with a single shard takes about 22 minutes. +_SHARD_TIMEOUT = 30 * 60 + +# RegExp to detect logcat lines, e.g., 'I/AssetManager: not found'. +_LOGCAT_RE = re.compile(r'[A-Z]/[\w\d_-]+:') + + +class LocalMachineJunitTestRun(test_run.TestRun): + # override + def TestPackage(self): + return self._test_instance.suite + + # override + def SetUp(self): + pass + + def _GetFilterArgs(self, shard_test_filter=None): + ret = [] + if shard_test_filter: + ret += ['-gtest-filter', ':'.join(shard_test_filter)] + + for test_filter in self._test_instance.test_filters: + ret += ['-gtest-filter', test_filter] + + if self._test_instance.package_filter: + ret += ['-package-filter', self._test_instance.package_filter] + if self._test_instance.runner_filter: + ret += ['-runner-filter', self._test_instance.runner_filter] + + return ret + + def _CreateJarArgsList(self, json_result_file_paths, group_test_list, shards): + # Creates a list of jar_args. The important thing is each jar_args list + # has a different json_results file for writing test results to and that + # each list of jar_args has its own test to run as specified in the + # -gtest-filter. + jar_args_list = [['-json-results-file', result_file] + for result_file in json_result_file_paths] + for index, jar_arg in enumerate(jar_args_list): + shard_test_filter = group_test_list[index] if shards > 1 else None + jar_arg += self._GetFilterArgs(shard_test_filter) + + return jar_args_list + + def _CreateJvmArgsList(self, for_listing=False): + # Creates a list of jvm_args (robolectric, code coverage, etc...) + jvm_args = [ + '-Drobolectric.dependency.dir=%s' % + self._test_instance.robolectric_runtime_deps_dir, + '-Ddir.source.root=%s' % constants.DIR_SOURCE_ROOT, + # Use locally available sdk jars from 'robolectric.dependency.dir' + '-Drobolectric.offline=true', + '-Drobolectric.resourcesMode=binary', + '-Drobolectric.logging=stdout', + '-Djava.library.path=%s' % self._test_instance.native_libs_dir, + ] + if self._test_instance.debug_socket and not for_listing: + jvm_args += [ + '-Dchromium.jdwp_active=true', + ('-agentlib:jdwp=transport=dt_socket' + ',server=y,suspend=y,address=%s' % self._test_instance.debug_socket) + ] + + if self._test_instance.coverage_dir and not for_listing: + if not os.path.exists(self._test_instance.coverage_dir): + os.makedirs(self._test_instance.coverage_dir) + elif not os.path.isdir(self._test_instance.coverage_dir): + raise Exception('--coverage-dir takes a directory, not file path.') + # Jacoco supports concurrent processes using the same output file: + # https://github.com/jacoco/jacoco/blob/6cd3f0bd8e348f8fba7bffec5225407151f1cc91/org.jacoco.agent.rt/src/org/jacoco/agent/rt/internal/output/FileOutput.java#L67 + # So no need to vary the output based on shard number. + jacoco_coverage_file = os.path.join(self._test_instance.coverage_dir, + '%s.exec' % self._test_instance.suite) + if self._test_instance.coverage_on_the_fly: + jacoco_agent_path = os.path.join(host_paths.DIR_SOURCE_ROOT, + 'third_party', 'jacoco', 'lib', + 'jacocoagent.jar') + + # inclnolocationclasses is false to prevent no class def found error. + jacoco_args = '-javaagent:{}=destfile={},inclnolocationclasses=false' + jvm_args.append( + jacoco_args.format(jacoco_agent_path, jacoco_coverage_file)) + else: + jvm_args.append('-Djacoco-agent.destfile=%s' % jacoco_coverage_file) + + return jvm_args + + @property + def _wrapper_path(self): + return os.path.join(constants.GetOutDirectory(), 'bin', 'helper', + self._test_instance.suite) + + #override + def GetTestsForListing(self): + with tempfile_ext.NamedTemporaryDirectory() as temp_dir: + cmd = [self._wrapper_path, '--list-tests'] + self._GetFilterArgs() + jvm_args = self._CreateJvmArgsList(for_listing=True) + if jvm_args: + cmd += ['--jvm-args', '"%s"' % ' '.join(jvm_args)] + AddPropertiesJar([cmd], temp_dir, self._test_instance.resource_apk) + lines = subprocess.check_output(cmd, encoding='utf8').splitlines() + + PREFIX = '#TEST# ' + prefix_len = len(PREFIX) + # Filter log messages other than test names (Robolectric logs to stdout). + return sorted(l[prefix_len:] for l in lines if l.startswith(PREFIX)) + + # override + def RunTests(self, results, raw_logs_fh=None): + # This avoids searching through the classparth jars for tests classes, + # which takes about 1-2 seconds. + if (self._test_instance.shards == 1 + # TODO(crbug.com/1383650): remove this + or self._test_instance.has_literal_filters or + self._test_instance.suite in _EXCLUDED_SUITES): + test_classes = [] + shards = 1 + else: + test_classes = _GetTestClasses(self._wrapper_path) + shards = ChooseNumOfShards(test_classes, self._test_instance.shards) + + logging.info('Running tests on %d shard(s).', shards) + group_test_list = GroupTestsForShard(shards, test_classes) + + with tempfile_ext.NamedTemporaryDirectory() as temp_dir: + cmd_list = [[self._wrapper_path] for _ in range(shards)] + json_result_file_paths = [ + os.path.join(temp_dir, 'results%d.json' % i) for i in range(shards) + ] + jar_args_list = self._CreateJarArgsList(json_result_file_paths, + group_test_list, shards) + if jar_args_list: + for i in range(shards): + cmd_list[i].extend( + ['--jar-args', '"%s"' % ' '.join(jar_args_list[i])]) + + jvm_args = self._CreateJvmArgsList() + if jvm_args: + for cmd in cmd_list: + cmd.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)]) + + AddPropertiesJar(cmd_list, temp_dir, self._test_instance.resource_apk) + + show_logcat = logging.getLogger().isEnabledFor(logging.INFO) + num_omitted_lines = 0 + for line in _RunCommandsAndSerializeOutput(cmd_list): + if raw_logs_fh: + raw_logs_fh.write(line) + if show_logcat or not _LOGCAT_RE.match(line): + sys.stdout.write(line) + else: + num_omitted_lines += 1 + + if num_omitted_lines > 0: + logging.critical('%d log lines omitted.', num_omitted_lines) + sys.stdout.flush() + if raw_logs_fh: + raw_logs_fh.flush() + + results_list = [] + try: + for json_file_path in json_result_file_paths: + with open(json_file_path, 'r') as f: + results_list += json_results.ParseResultsFromJson( + json.loads(f.read())) + except IOError: + # In the case of a failure in the JUnit or Robolectric test runner + # the output json file may never be written. + results_list = [ + base_test_result.BaseTestResult('Test Runner Failure', + base_test_result.ResultType.UNKNOWN) + ] + + test_run_results = base_test_result.TestRunResults() + test_run_results.AddResults(results_list) + results.append(test_run_results) + + # override + def TearDown(self): + pass + + +def AddPropertiesJar(cmd_list, temp_dir, resource_apk): + # Create properties file for Robolectric test runners so they can find the + # binary resources. + properties_jar_path = os.path.join(temp_dir, 'properties.jar') + with zipfile.ZipFile(properties_jar_path, 'w') as z: + z.writestr('com/android/tools/test_config.properties', + 'android_resource_apk=%s\n' % resource_apk) + props = [ + 'application = android.app.Application', + 'sdk = 28', + ('shadows = org.chromium.testing.local.' + 'CustomShadowApplicationPackageManager'), + ] + z.writestr('robolectric.properties', '\n'.join(props)) + + for cmd in cmd_list: + cmd.extend(['--classpath', properties_jar_path]) + + +def ChooseNumOfShards(test_classes, shards): + # Don't override requests to not shard. + if shards == 1: + return 1 + + # Sharding doesn't reduce runtime on just a few tests. + if shards > (len(test_classes) // _MIN_CLASSES_PER_SHARD) or shards < 1: + shards = max(1, (len(test_classes) // _MIN_CLASSES_PER_SHARD)) + + # Local tests of explicit --shard values show that max speed is achieved + # at cpu_count() / 2. + # Using -XX:TieredStopAtLevel=1 is required for this result. The flag reduces + # CPU time by two-thirds, making sharding more effective. + shards = max(1, min(shards, multiprocessing.cpu_count() // 2)) + # Can have at minimum one test_class per shard. + shards = min(len(test_classes), shards) + + return shards + + +def GroupTestsForShard(num_of_shards, test_classes): + """Groups tests that will be ran on each shard. + + Args: + num_of_shards: number of shards to split tests between. + test_classes: A list of test_class files in the jar. + + Return: + Returns a dictionary containing a list of test classes. + """ + test_dict = {i: [] for i in range(num_of_shards)} + + # Round robin test distribiution to reduce chance that a sequential group of + # classes all have an unusually high number of tests. + for count, test_cls in enumerate(test_classes): + test_cls = test_cls.replace('.class', '*') + test_cls = test_cls.replace('/', '.') + test_dict[count % num_of_shards].append(test_cls) + + return test_dict + + +def _DumpJavaStacks(pid): + jcmd = os.path.join(constants.JAVA_HOME, 'bin', 'jcmd') + cmd = [jcmd, str(pid), 'Thread.print'] + result = subprocess.run(cmd, + check=False, + stdout=subprocess.PIPE, + encoding='utf8') + if result.returncode: + return 'Failed to dump stacks\n' + result.stdout + return result.stdout + + +def _RunCommandsAndSerializeOutput(cmd_list): + """Runs multiple commands in parallel and yields serialized output lines. + + Args: + cmd_list: List of commands. + + Returns: N/A + + Raises: + TimeoutError: If timeout is exceeded. + """ + num_shards = len(cmd_list) + assert num_shards > 0 + procs = [] + temp_files = [] + for i, cmd in enumerate(cmd_list): + # Shard 0 yields results immediately, the rest write to files. + if i == 0: + temp_files.append(None) # Placeholder. + procs.append( + cmd_helper.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + )) + else: + temp_file = tempfile.TemporaryFile(mode='w+t', encoding='utf-8') + temp_files.append(temp_file) + procs.append(cmd_helper.Popen( + cmd, + stdout=temp_file, + stderr=temp_file, + )) + + deadline = time.time() + (_SHARD_TIMEOUT / (num_shards // 2 + 1)) + + yield '\n' + yield 'Shard 0 output:\n' + + # The following will be run from a thread to pump Shard 0 results, allowing + # live output while allowing timeout. + def pump_stream_to_queue(f, q): + for line in f: + q.put(line) + q.put(None) + + shard_0_q = queue.Queue() + shard_0_pump = threading.Thread(target=pump_stream_to_queue, + args=(procs[0].stdout, shard_0_q)) + shard_0_pump.start() + + timeout_dumps = {} + + # Print the first process until timeout or completion. + while shard_0_pump.is_alive(): + try: + line = shard_0_q.get(timeout=deadline - time.time()) + if line is None: + break + yield line + except queue.Empty: + if time.time() > deadline: + break + + # Wait for remaining processes to finish. + for i, proc in enumerate(procs): + try: + proc.wait(timeout=deadline - time.time()) + except subprocess.TimeoutExpired: + timeout_dumps[i] = _DumpJavaStacks(proc.pid) + proc.kill() + + # Output any remaining output from a timed-out first shard. + shard_0_pump.join() + while not shard_0_q.empty(): + yield shard_0_q.get() + + for i in range(1, num_shards): + f = temp_files[i] + yield '\n' + yield 'Shard %d output:\n' % i + f.seek(0) + for line in f.readlines(): + yield line + f.close() + + # Output stacks + if timeout_dumps: + yield '\n' + yield ('=' * 80) + '\n' + yield '\nOne or mord shards timed out.\n' + yield ('=' * 80) + '\n' + for i, dump in timeout_dumps.items(): + yield 'Index of timed out shard: %d\n' % i + yield 'Thread dump:\n' + yield dump + yield '\n' + + raise cmd_helper.TimeoutError('Junit shards timed out.') + + +def _GetTestClasses(file_path): + test_jar_paths = subprocess.check_output([file_path, + '--print-classpath']).decode() + test_jar_paths = test_jar_paths.split(':') + + test_classes = [] + for test_jar_path in test_jar_paths: + # Avoid searching through jars that are for the test runner. + # TODO(crbug.com/1144077): Use robolectric buildconfig file arg. + if 'third_party/robolectric/' in test_jar_path: + continue + + test_classes += _GetTestClassesFromJar(test_jar_path) + + logging.info('Found %d test classes in class_path jars.', len(test_classes)) + return test_classes + + +def _GetTestClassesFromJar(test_jar_path): + """Returns a list of test classes from a jar. + + Test files end in Test, this is enforced: + //tools/android/errorprone_plugin/src/org/chromium/tools/errorprone + /plugin/TestClassNameCheck.java + + Args: + test_jar_path: Path to the jar. + + Return: + Returns a list of test classes that were in the jar. + """ + class_list = [] + with zipfile.ZipFile(test_jar_path, 'r') as zip_f: + for test_class in zip_f.namelist(): + if test_class.startswith(_EXCLUDED_CLASSES_PREFIXES): + continue + if test_class.endswith('Test.class') and '$' not in test_class: + class_list.append(test_class) + + return class_list diff --git a/android/pylib/local/machine/local_machine_junit_test_run_test.py b/android/pylib/local/machine/local_machine_junit_test_run_test.py new file mode 100755 index 000000000000..d8913b44b54e --- /dev/null +++ b/android/pylib/local/machine/local_machine_junit_test_run_test.py @@ -0,0 +1,89 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + + +import os +import unittest + +from pylib.local.machine import local_machine_junit_test_run +from py_utils import tempfile_ext +from mock import patch # pylint: disable=import-error + + +class LocalMachineJunitTestRunTests(unittest.TestCase): + def testAddPropertiesJar(self): + with tempfile_ext.NamedTemporaryDirectory() as temp_dir: + apk = 'resource_apk' + cmd_list = [] + local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk) + self.assertEqual(cmd_list, []) + cmd_list = [['test1']] + local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk) + self.assertEqual( + cmd_list[0], + ['test1', '--classpath', + os.path.join(temp_dir, 'properties.jar')]) + cmd_list = [['test1'], ['test2']] + local_machine_junit_test_run.AddPropertiesJar(cmd_list, temp_dir, apk) + self.assertEqual(len(cmd_list[0]), 3) + self.assertEqual( + cmd_list[1], + ['test2', '--classpath', + os.path.join(temp_dir, 'properties.jar')]) + + @patch('multiprocessing.cpu_count') + def testChooseNumOfShards(self, mock_cpu_count): + mock_cpu_count.return_value = 36 + # Test shards is 1 when filter is set. + test_shards = 1 + test_classes = [1] * 50 + shards = local_machine_junit_test_run.ChooseNumOfShards( + test_classes, test_shards) + self.assertEqual(1, shards) + + # Tests setting shards. + test_shards = 4 + shards = local_machine_junit_test_run.ChooseNumOfShards( + test_classes, test_shards) + self.assertEqual(4, shards) + + # Tests using min_class per shards. + test_classes = [1] * 20 + test_shards = 8 + shards = local_machine_junit_test_run.ChooseNumOfShards( + test_classes, test_shards) + self.assertEqual(2, shards) + + def testGroupTestsForShard(self): + test_classes = [] + results = local_machine_junit_test_run.GroupTestsForShard(1, test_classes) + self.assertDictEqual(results, {0: []}) + + test_classes = ['dir/test.class'] * 5 + results = local_machine_junit_test_run.GroupTestsForShard(1, test_classes) + self.assertDictEqual(results, {0: ['dir.test*'] * 5}) + + test_classes = ['dir/test.class'] * 5 + results = local_machine_junit_test_run.GroupTestsForShard(2, test_classes) + ans_dict = { + 0: ['dir.test*'] * 3, + 1: ['dir.test*'] * 2, + } + self.assertDictEqual(results, ans_dict) + + test_classes = ['a10 warthog', 'b17', 'SR71'] + results = local_machine_junit_test_run.GroupTestsForShard(3, test_classes) + ans_dict = { + 0: ['a10 warthog'], + 1: ['b17'], + 2: ['SR71'], + } + self.assertDictEqual(results, ans_dict) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/pylib/monkey/__init__.py b/android/pylib/monkey/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/android/pylib/monkey/monkey_test_instance.py b/android/pylib/monkey/monkey_test_instance.py new file mode 100644 index 000000000000..d53f5cdeefc8 --- /dev/null +++ b/android/pylib/monkey/monkey_test_instance.py @@ -0,0 +1,73 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import random + +from pylib import constants +from pylib.base import test_instance + + +_SINGLE_EVENT_TIMEOUT = 100 # Milliseconds + +class MonkeyTestInstance(test_instance.TestInstance): + + def __init__(self, args, _): + super().__init__() + + self._categories = args.categories + self._event_count = args.event_count + self._seed = args.seed or random.randint(1, 100) + self._throttle = args.throttle + self._verbose_count = args.verbose_count + + self._package = constants.PACKAGE_INFO[args.browser].package + self._activity = constants.PACKAGE_INFO[args.browser].activity + + self._timeout_s = ( + self.event_count * (self.throttle + _SINGLE_EVENT_TIMEOUT)) / 1000 + + #override + def TestType(self): + return 'monkey' + + #override + def SetUp(self): + pass + + #override + def TearDown(self): + pass + + @property + def activity(self): + return self._activity + + @property + def categories(self): + return self._categories + + @property + def event_count(self): + return self._event_count + + @property + def package(self): + return self._package + + @property + def seed(self): + return self._seed + + @property + def throttle(self): + return self._throttle + + @property + def timeout(self): + return self._timeout_s + + @property + def verbose_count(self): + return self._verbose_count diff --git a/android/pylib/output/__init__.py b/android/pylib/output/__init__.py new file mode 100644 index 000000000000..b8e1dbd6e92a --- /dev/null +++ b/android/pylib/output/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/output/local_output_manager.py b/android/pylib/output/local_output_manager.py new file mode 100644 index 000000000000..74b4b95b8b56 --- /dev/null +++ b/android/pylib/output/local_output_manager.py @@ -0,0 +1,48 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import time +import os +import shutil + +try: + from urllib.parse import quote +except ImportError: + from urllib import quote + +from pylib.base import output_manager + + +class LocalOutputManager(output_manager.OutputManager): + """Saves and manages test output files locally in output directory. + + Location files will be saved in {output_dir}/TEST_RESULTS_{timestamp}. + """ + + def __init__(self, output_dir): + super().__init__() + timestamp = time.strftime( + '%Y_%m_%dT%H_%M_%S', time.localtime()) + self._output_root = os.path.abspath(os.path.join( + output_dir, 'TEST_RESULTS_%s' % timestamp)) + + #override + def _CreateArchivedFile(self, out_filename, out_subdir, datatype): + return LocalArchivedFile( + out_filename, out_subdir, datatype, self._output_root) + + +class LocalArchivedFile(output_manager.ArchivedFile): + + def __init__(self, out_filename, out_subdir, datatype, out_root): + super().__init__(out_filename, out_subdir, datatype) + self._output_path = os.path.join(out_root, out_subdir, out_filename) + + def _Link(self): + return 'file://%s' % quote(self._output_path) + + def _Archive(self): + if not os.path.exists(os.path.dirname(self._output_path)): + os.makedirs(os.path.dirname(self._output_path)) + shutil.copy(self.name, self._output_path) diff --git a/android/pylib/output/local_output_manager_test.py b/android/pylib/output/local_output_manager_test.py new file mode 100755 index 000000000000..d2388140b2f6 --- /dev/null +++ b/android/pylib/output/local_output_manager_test.py @@ -0,0 +1,34 @@ +#! /usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + +import tempfile +import shutil +import unittest + +from pylib.base import output_manager +from pylib.base import output_manager_test_case +from pylib.output import local_output_manager + + +class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase): + + def setUp(self): + self._output_dir = tempfile.mkdtemp() + self._output_manager = local_output_manager.LocalOutputManager( + self._output_dir) + + def testUsableTempFile(self): + self.assertUsableTempFile( + self._output_manager._CreateArchivedFile( + 'test_file', 'test_subdir', output_manager.Datatype.TEXT)) + + def tearDown(self): + shutil.rmtree(self._output_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/pylib/output/noop_output_manager.py b/android/pylib/output/noop_output_manager.py new file mode 100644 index 000000000000..acabd30dc18f --- /dev/null +++ b/android/pylib/output/noop_output_manager.py @@ -0,0 +1,38 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from pylib.base import output_manager + +# TODO(jbudorick): This class is currently mostly unused. +# Add a --bot-mode argument that all bots pass. If --bot-mode and +# --local-output args are both not passed to test runner then use this +# as the output manager impl. + +# pylint: disable=no-self-use + +class NoopOutputManager(output_manager.OutputManager): + + #override + def _CreateArchivedFile(self, out_filename, out_subdir, datatype): + del out_filename, out_subdir, datatype + return NoopArchivedFile() + + +class NoopArchivedFile(output_manager.ArchivedFile): + + def __init__(self): + super().__init__(None, None, None) + + def Link(self): + """NoopArchivedFiles are not retained.""" + return '' + + def _Link(self): + pass + + def Archive(self): + """NoopArchivedFiles are not retained.""" + + def _Archive(self): + pass diff --git a/android/pylib/output/noop_output_manager_test.py b/android/pylib/output/noop_output_manager_test.py new file mode 100755 index 000000000000..ff4c805d9c56 --- /dev/null +++ b/android/pylib/output/noop_output_manager_test.py @@ -0,0 +1,27 @@ +#! /usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + +import unittest + +from pylib.base import output_manager +from pylib.base import output_manager_test_case +from pylib.output import noop_output_manager + + +class NoopOutputManagerTest(output_manager_test_case.OutputManagerTestCase): + + def setUp(self): + self._output_manager = noop_output_manager.NoopOutputManager() + + def testUsableTempFile(self): + self.assertUsableTempFile( + self._output_manager._CreateArchivedFile( + 'test_file', 'test_subdir', output_manager.Datatype.TEXT)) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/pylib/output/remote_output_manager.py b/android/pylib/output/remote_output_manager.py new file mode 100644 index 000000000000..bf585bbe5b0f --- /dev/null +++ b/android/pylib/output/remote_output_manager.py @@ -0,0 +1,88 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import hashlib +import os + +from pylib.base import output_manager +from pylib.output import noop_output_manager +from pylib.utils import logdog_helper +from pylib.utils import google_storage_helper + + +class RemoteOutputManager(output_manager.OutputManager): + + def __init__(self, bucket): + """Uploads output files to Google Storage or LogDog. + + Files will either be uploaded directly to Google Storage or LogDog + depending on the datatype. + + Args + bucket: Bucket to use when saving to Google Storage. + """ + super().__init__() + self._bucket = bucket + + #override + def _CreateArchivedFile(self, out_filename, out_subdir, datatype): + if datatype == output_manager.Datatype.TEXT: + try: + logdog_helper.get_logdog_client() + return LogdogArchivedFile(out_filename, out_subdir, datatype) + except RuntimeError: + return noop_output_manager.NoopArchivedFile() + else: + if self._bucket is None: + return noop_output_manager.NoopArchivedFile() + return GoogleStorageArchivedFile( + out_filename, out_subdir, datatype, self._bucket) + + +class LogdogArchivedFile(output_manager.ArchivedFile): + + def __init__(self, out_filename, out_subdir, datatype): + super().__init__(out_filename, out_subdir, datatype) + self._stream_name = '%s_%s' % (out_subdir, out_filename) + + def _Link(self): + return logdog_helper.get_viewer_url(self._stream_name) + + def _Archive(self): + with open(self.name, 'r') as f: + logdog_helper.text(self._stream_name, f.read()) + + +class GoogleStorageArchivedFile(output_manager.ArchivedFile): + + def __init__(self, out_filename, out_subdir, datatype, bucket): + super().__init__(out_filename, out_subdir, datatype) + self._bucket = bucket + self._upload_path = None + self._content_addressed = None + + def _PrepareArchive(self): + self._content_addressed = (self._datatype in ( + output_manager.Datatype.HTML, + output_manager.Datatype.PNG, + output_manager.Datatype.JSON)) + if self._content_addressed: + sha1 = hashlib.sha1() + with open(self.name, 'rb') as f: + sha1.update(f.read()) + self._upload_path = sha1.hexdigest() + else: + self._upload_path = os.path.join(self._out_subdir, self._out_filename) + + def _Link(self): + return google_storage_helper.get_url_link( + self._upload_path, self._bucket) + + def _Archive(self): + if (self._content_addressed and + google_storage_helper.exists(self._upload_path, self._bucket)): + return + + google_storage_helper.upload( + self._upload_path, self.name, self._bucket, content_type=self._datatype) diff --git a/android/pylib/output/remote_output_manager_test.py b/android/pylib/output/remote_output_manager_test.py new file mode 100755 index 000000000000..875451c8bde5 --- /dev/null +++ b/android/pylib/output/remote_output_manager_test.py @@ -0,0 +1,32 @@ +#! /usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + +import unittest + +from pylib.base import output_manager +from pylib.base import output_manager_test_case +from pylib.output import remote_output_manager + +import mock # pylint: disable=import-error + + +@mock.patch('pylib.utils.google_storage_helper') +class RemoteOutputManagerTest(output_manager_test_case.OutputManagerTestCase): + + def setUp(self): + self._output_manager = remote_output_manager.RemoteOutputManager( + 'this-is-a-fake-bucket') + + def testUsableTempFile(self, google_storage_helper_mock): + del google_storage_helper_mock + self.assertUsableTempFile( + self._output_manager._CreateArchivedFile( + 'test_file', 'test_subdir', output_manager.Datatype.TEXT)) + + +if __name__ == '__main__': + unittest.main() diff --git a/android/pylib/pexpect.py b/android/pylib/pexpect.py new file mode 100644 index 000000000000..6ed6451b1ab3 --- /dev/null +++ b/android/pylib/pexpect.py @@ -0,0 +1,21 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +from __future__ import absolute_import + +import os +import sys + +_CHROME_SRC = os.path.join( + os.path.abspath(os.path.dirname(__file__)), '..', '..', '..') + +_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect') +if _PEXPECT_PATH not in sys.path: + sys.path.append(_PEXPECT_PATH) + +# pexpect is not available on all platforms. We allow this file to be imported +# on platforms without pexpect and only fail when pexpect is actually used. +try: + from pexpect import * # pylint: disable=W0401,W0614 +except ImportError: + pass diff --git a/android/pylib/restart_adbd.sh b/android/pylib/restart_adbd.sh new file mode 100755 index 000000000000..201628629eb6 --- /dev/null +++ b/android/pylib/restart_adbd.sh @@ -0,0 +1,20 @@ +#!/system/bin/sh + +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Android shell script to restart adbd on the device. This has to be run +# atomically as a shell script because stopping adbd prevents further commands +# from running (even if called in the same adb shell). + +trap '' HUP +trap '' TERM +trap '' PIPE + +function restart() { + stop adbd + start adbd +} + +restart & diff --git a/android/pylib/results/__init__.py b/android/pylib/results/__init__.py new file mode 100644 index 000000000000..d46d7b496679 --- /dev/null +++ b/android/pylib/results/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/results/flakiness_dashboard/__init__.py b/android/pylib/results/flakiness_dashboard/__init__.py new file mode 100644 index 000000000000..d46d7b496679 --- /dev/null +++ b/android/pylib/results/flakiness_dashboard/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/results/flakiness_dashboard/json_results_generator.py b/android/pylib/results/flakiness_dashboard/json_results_generator.py new file mode 100644 index 000000000000..3e753e55da02 --- /dev/null +++ b/android/pylib/results/flakiness_dashboard/json_results_generator.py @@ -0,0 +1,702 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# +# Most of this file was ported over from Blink's +# tools/blinkpy/web_tests/layout_package/json_results_generator.py +# tools/blinkpy/common/net/file_uploader.py +# + +import json +import logging +import mimetypes +import os +import time +try: + from urllib.request import urlopen, Request + from urllib.error import HTTPError, URLError + from urllib.parse import quote +except ImportError: + from urllib import quote + from urllib2 import urlopen, HTTPError, URLError, Request + +_log = logging.getLogger(__name__) + +_JSON_PREFIX = 'ADD_RESULTS(' +_JSON_SUFFIX = ');' + + +def HasJSONWrapper(string): + return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX) + + +def StripJSONWrapper(json_content): + # FIXME: Kill this code once the server returns json instead of jsonp. + if HasJSONWrapper(json_content): + return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)] + return json_content + + +def WriteJSON(json_object, file_path, callback=None): + # Specify separators in order to get compact encoding. + json_string = json.dumps(json_object, separators=(',', ':')) + if callback: + json_string = callback + '(' + json_string + ');' + with open(file_path, 'w') as fp: + fp.write(json_string) + + +def ConvertTrieToFlatPaths(trie, prefix=None): + """Flattens the trie of paths, prepending a prefix to each.""" + result = {} + for name, data in trie.items(): + if prefix: + name = prefix + '/' + name + + if len(data) != 0 and not 'results' in data: + result.update(ConvertTrieToFlatPaths(data, name)) + else: + result[name] = data + + return result + + +def AddPathToTrie(path, value, trie): + """Inserts a single path and value into a directory trie structure.""" + if not '/' in path: + trie[path] = value + return + + directory, _, rest = path.partition('/') + if not directory in trie: + trie[directory] = {} + AddPathToTrie(rest, value, trie[directory]) + + +def TestTimingsTrie(individual_test_timings): + """Breaks a test name into dicts by directory + + foo/bar/baz.html: 1ms + foo/bar/baz1.html: 3ms + + becomes + foo: { + bar: { + baz.html: 1, + baz1.html: 3 + } + } + """ + trie = {} + for test_result in individual_test_timings: + test = test_result.test_name + + AddPathToTrie(test, int(1000 * test_result.test_run_time), trie) + + return trie + + +class TestResult: + """A simple class that represents a single test result.""" + + # Test modifier constants. + (NONE, FAILS, FLAKY, DISABLED) = list(range(4)) + + def __init__(self, test, failed=False, elapsed_time=0): + self.test_name = test + self.failed = failed + self.test_run_time = elapsed_time + + test_name = test + try: + test_name = test.split('.')[1] + except IndexError: + _log.warning('Invalid test name: %s.', test) + + if test_name.startswith('FAILS_'): + self.modifier = self.FAILS + elif test_name.startswith('FLAKY_'): + self.modifier = self.FLAKY + elif test_name.startswith('DISABLED_'): + self.modifier = self.DISABLED + else: + self.modifier = self.NONE + + def Fixable(self): + return self.failed or self.modifier == self.DISABLED + + +class JSONResultsGeneratorBase: + """A JSON results generator for generic tests.""" + + MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750 + # Min time (seconds) that will be added to the JSON. + MIN_TIME = 1 + + # Note that in non-chromium tests those chars are used to indicate + # test modifiers (FAILS, FLAKY, etc) but not actual test results. + PASS_RESULT = 'P' + SKIP_RESULT = 'X' + FAIL_RESULT = 'F' + FLAKY_RESULT = 'L' + NO_DATA_RESULT = 'N' + + MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT, + TestResult.DISABLED: SKIP_RESULT, + TestResult.FAILS: FAIL_RESULT, + TestResult.FLAKY: FLAKY_RESULT} + + VERSION = 4 + VERSION_KEY = 'version' + RESULTS = 'results' + TIMES = 'times' + BUILD_NUMBERS = 'buildNumbers' + TIME = 'secondsSinceEpoch' + TESTS = 'tests' + + FIXABLE_COUNT = 'fixableCount' + FIXABLE = 'fixableCounts' + ALL_FIXABLE_COUNT = 'allFixableCount' + + RESULTS_FILENAME = 'results.json' + TIMES_MS_FILENAME = 'times_ms.json' + INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json' + + # line too long pylint: disable=line-too-long + URL_FOR_TEST_LIST_JSON = ( + 'https://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&' + 'master=%s') + # pylint: enable=line-too-long + + def __init__(self, builder_name, build_name, build_number, + results_file_base_path, builder_base_url, + test_results_map, svn_repositories=None, + test_results_server=None, + test_type='', + master_name=''): + """Modifies the results.json file. Grabs it off the archive directory + if it is not found locally. + + Args + builder_name: the builder name (e.g. Webkit). + build_name: the build name (e.g. webkit-rel). + build_number: the build number. + results_file_base_path: Absolute path to the directory containing the + results json file. + builder_base_url: the URL where we have the archived test results. + If this is None no archived results will be retrieved. + test_results_map: A dictionary that maps test_name to TestResult. + svn_repositories: A (json_field_name, svn_path) pair for SVN + repositories that tests rely on. The SVN revision will be + included in the JSON with the given json_field_name. + test_results_server: server that hosts test results json. + test_type: test type string (e.g. 'layout-tests'). + master_name: the name of the buildbot master. + """ + self._builder_name = builder_name + self._build_name = build_name + self._build_number = build_number + self._builder_base_url = builder_base_url + self._results_directory = results_file_base_path + + self._test_results_map = test_results_map + self._test_results = list(test_results_map.values()) + + self._svn_repositories = svn_repositories + if not self._svn_repositories: + self._svn_repositories = {} + + self._test_results_server = test_results_server + self._test_type = test_type + self._master_name = master_name + + self._archived_results = None + + def GenerateJSONOutput(self): + json_object = self.GetJSON() + if json_object: + file_path = ( + os.path.join( + self._results_directory, + self.INCREMENTAL_RESULTS_FILENAME)) + WriteJSON(json_object, file_path) + + def GenerateTimesMSFile(self): + times = TestTimingsTrie(list(self._test_results_map.values())) + file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME) + WriteJSON(times, file_path) + + def GetJSON(self): + """Gets the results for the results.json file.""" + results_json = {} + + if not results_json: + results_json, error = self._GetArchivedJSONResults() + if error: + # If there was an error don't write a results.json + # file at all as it would lose all the information on the + # bot. + _log.error( + 'Archive directory is inaccessible. Not ' + 'modifying or clobbering the results.json ' + 'file: %s', error) + return None + + builder_name = self._builder_name + if results_json and builder_name not in results_json: + _log.debug('Builder name (%s) is not in the results.json file.', + builder_name) + + self._ConvertJSONToCurrentVersion(results_json) + + if builder_name not in results_json: + results_json[builder_name] = ( + self._CreateResultsForBuilderJSON()) + + results_for_builder = results_json[builder_name] + + if builder_name: + self._InsertGenericMetaData(results_for_builder) + + self._InsertFailureSummaries(results_for_builder) + + # Update the all failing tests with result type and time. + tests = results_for_builder[self.TESTS] + all_failing_tests = self._GetFailedTestNames() + all_failing_tests.update(ConvertTrieToFlatPaths(tests)) + + for test in all_failing_tests: + self._InsertTestTimeAndResult(test, tests) + + return results_json + + def SetArchivedResults(self, archived_results): + self._archived_results = archived_results + + def UploadJSONFiles(self, json_files): + """Uploads the given json_files to the test_results_server (if the + test_results_server is given).""" + if not self._test_results_server: + return + + if not self._master_name: + _log.error( + '--test-results-server was set, but --master-name was not. Not ' + 'uploading JSON files.') + return + + _log.info('Uploading JSON files for builder: %s', self._builder_name) + attrs = [('builder', self._builder_name), + ('testtype', self._test_type), + ('master', self._master_name)] + + files = [(json_file, os.path.join(self._results_directory, json_file)) + for json_file in json_files] + + url = 'https://%s/testfile/upload' % self._test_results_server + # Set uploading timeout in case appengine server is having problems. + # 120 seconds are more than enough to upload test results. + uploader = _FileUploader(url, 120) + try: + response = uploader.UploadAsMultipartFormData(files, attrs) + if response: + if response.code == 200: + _log.info('JSON uploaded.') + else: + _log.debug( + "JSON upload failed, %d: '%s'", response.code, response.read()) + else: + _log.error('JSON upload failed; no response returned') + except Exception as err: # pylint: disable=broad-except + _log.error('Upload failed: %s', err) + return + + def _GetTestTiming(self, test_name): + """Returns test timing data (elapsed time) in second + for the given test_name.""" + if test_name in self._test_results_map: + # Floor for now to get time in seconds. + return int(self._test_results_map[test_name].test_run_time) + return 0 + + def _GetFailedTestNames(self): + """Returns a set of failed test names.""" + return set(r.test_name for r in self._test_results if r.failed) + + def _GetModifierChar(self, test_name): + """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT, + PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier + for the given test_name. + """ + if test_name not in self._test_results_map: + return self.__class__.NO_DATA_RESULT + + test_result = self._test_results_map[test_name] + if test_result.modifier in list(self.MODIFIER_TO_CHAR.keys()): + return self.MODIFIER_TO_CHAR[test_result.modifier] + + return self.__class__.PASS_RESULT + + def _get_result_char(self, test_name): + """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT, + PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result + for the given test_name. + """ + if test_name not in self._test_results_map: + return self.__class__.NO_DATA_RESULT + + test_result = self._test_results_map[test_name] + if test_result.modifier == TestResult.DISABLED: + return self.__class__.SKIP_RESULT + + if test_result.failed: + return self.__class__.FAIL_RESULT + + return self.__class__.PASS_RESULT + + def _GetSVNRevision(self, in_directory): + """Returns the svn revision for the given directory. + + Args: + in_directory: The directory where svn is to be run. + """ + # This is overridden in flakiness_dashboard_results_uploader.py. + raise NotImplementedError() + + def _GetArchivedJSONResults(self): + """Download JSON file that only contains test + name list from test-results server. This is for generating incremental + JSON so the file generated has info for tests that failed before but + pass or are skipped from current run. + + Returns (archived_results, error) tuple where error is None if results + were successfully read. + """ + results_json = {} + old_results = None + error = None + + if not self._test_results_server: + return {}, None + + results_file_url = (self.URL_FOR_TEST_LIST_JSON % + (quote(self._test_results_server), + quote(self._builder_name), self.RESULTS_FILENAME, + quote(self._test_type), quote(self._master_name))) + + try: + # FIXME: We should talk to the network via a Host object. + results_file = urlopen(results_file_url) + old_results = results_file.read() + except HTTPError as http_error: + # A non-4xx status code means the bot is hosed for some reason + # and we can't grab the results.json file off of it. + if http_error.code < 400 and http_error.code >= 500: + error = http_error + except URLError as url_error: + error = url_error + + if old_results: + # Strip the prefix and suffix so we can get the actual JSON object. + old_results = StripJSONWrapper(old_results) + + try: + results_json = json.loads(old_results) + except Exception: # pylint: disable=broad-except + _log.debug('results.json was not valid JSON. Clobbering.') + # The JSON file is not valid JSON. Just clobber the results. + results_json = {} + else: + _log.debug('Old JSON results do not exist. Starting fresh.') + results_json = {} + + return results_json, error + + def _InsertFailureSummaries(self, results_for_builder): + """Inserts aggregate pass/failure statistics into the JSON. + This method reads self._test_results and generates + FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries. + + Args: + results_for_builder: Dictionary containing the test results for a + single builder. + """ + # Insert the number of tests that failed or skipped. + fixable_count = len([r for r in self._test_results if r.Fixable()]) + self._InsertItemIntoRawList(results_for_builder, + fixable_count, self.FIXABLE_COUNT) + + # Create a test modifiers (FAILS, FLAKY etc) summary dictionary. + entry = {} + for test_name in self._test_results_map.keys(): + result_char = self._GetModifierChar(test_name) + entry[result_char] = entry.get(result_char, 0) + 1 + + # Insert the pass/skip/failure summary dictionary. + self._InsertItemIntoRawList(results_for_builder, entry, + self.FIXABLE) + + # Insert the number of all the tests that are supposed to pass. + all_test_count = len(self._test_results) + self._InsertItemIntoRawList(results_for_builder, + all_test_count, self.ALL_FIXABLE_COUNT) + + def _InsertItemIntoRawList(self, results_for_builder, item, key): + """Inserts the item into the list with the given key in the results for + this builder. Creates the list if no such list exists. + + Args: + results_for_builder: Dictionary containing the test results for a + single builder. + item: Number or string to insert into the list. + key: Key in results_for_builder for the list to insert into. + """ + if key in results_for_builder: + raw_list = results_for_builder[key] + else: + raw_list = [] + + raw_list.insert(0, item) + raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG] + results_for_builder[key] = raw_list + + def _InsertItemRunLengthEncoded(self, item, encoded_results): + """Inserts the item into the run-length encoded results. + + Args: + item: String or number to insert. + encoded_results: run-length encoded results. An array of arrays, e.g. + [[3,'A'],[1,'Q']] encodes AAAQ. + """ + if len(encoded_results) != 0 and item == encoded_results[0][1]: + num_results = encoded_results[0][0] + if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG: + encoded_results[0][0] = num_results + 1 + else: + # Use a list instead of a class for the run-length encoding since + # we want the serialized form to be concise. + encoded_results.insert(0, [1, item]) + + def _InsertGenericMetaData(self, results_for_builder): + """ Inserts generic metadata (such as version number, current time etc) + into the JSON. + + Args: + results_for_builder: Dictionary containing the test results for + a single builder. + """ + self._InsertItemIntoRawList(results_for_builder, + self._build_number, self.BUILD_NUMBERS) + + # Include SVN revisions for the given repositories. + for (name, path) in self._svn_repositories: + # Note: for JSON file's backward-compatibility we use 'chrome' rather + # than 'chromium' here. + lowercase_name = name.lower() + if lowercase_name == 'chromium': + lowercase_name = 'chrome' + self._InsertItemIntoRawList(results_for_builder, + self._GetSVNRevision(path), + lowercase_name + 'Revision') + + self._InsertItemIntoRawList(results_for_builder, + int(time.time()), + self.TIME) + + def _InsertTestTimeAndResult(self, test_name, tests): + """ Insert a test item with its results to the given tests dictionary. + + Args: + tests: Dictionary containing test result entries. + """ + + result = self._get_result_char(test_name) + test_time = self._GetTestTiming(test_name) + + this_test = tests + for segment in test_name.split('/'): + if segment not in this_test: + this_test[segment] = {} + this_test = this_test[segment] + + if len(this_test) == 0: + self._PopulateResultsAndTimesJSON(this_test) + + if self.RESULTS in this_test: + self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS]) + else: + this_test[self.RESULTS] = [[1, result]] + + if self.TIMES in this_test: + self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES]) + else: + this_test[self.TIMES] = [[1, test_time]] + + def _ConvertJSONToCurrentVersion(self, results_json): + """If the JSON does not match the current version, converts it to the + current version and adds in the new version number. + """ + if self.VERSION_KEY in results_json: + archive_version = results_json[self.VERSION_KEY] + if archive_version == self.VERSION: + return + else: + archive_version = 3 + + # version 3->4 + if archive_version == 3: + for results in list(results_json.values()): + self._ConvertTestsToTrie(results) + + results_json[self.VERSION_KEY] = self.VERSION + + def _ConvertTestsToTrie(self, results): + if not self.TESTS in results: + return + + test_results = results[self.TESTS] + test_results_trie = {} + for test in test_results.keys(): + single_test_result = test_results[test] + AddPathToTrie(test, single_test_result, test_results_trie) + + results[self.TESTS] = test_results_trie + + def _PopulateResultsAndTimesJSON(self, results_and_times): + results_and_times[self.RESULTS] = [] + results_and_times[self.TIMES] = [] + return results_and_times + + def _CreateResultsForBuilderJSON(self): + results_for_builder = {} + results_for_builder[self.TESTS] = {} + return results_for_builder + + def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list): + """Removes items from the run-length encoded list after the final + item that exceeds the max number of builds to track. + + Args: + encoded_results: run-length encoded results. An array of arrays, e.g. + [[3,'A'],[1,'Q']] encodes AAAQ. + """ + num_builds = 0 + index = 0 + for result in encoded_list: + num_builds = num_builds + result[0] + index = index + 1 + if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG: + return encoded_list[:index] + return encoded_list + + def _NormalizeResultsJSON(self, test, test_name, tests): + """ Prune tests where all runs pass or tests that no longer exist and + truncate all results to maxNumberOfBuilds. + + Args: + test: ResultsAndTimes object for this test. + test_name: Name of the test. + tests: The JSON object with all the test results for this builder. + """ + test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds( + test[self.RESULTS]) + test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds( + test[self.TIMES]) + + is_all_pass = self._IsResultsAllOfType(test[self.RESULTS], + self.PASS_RESULT) + is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS], + self.NO_DATA_RESULT) + max_time = max([test_time[1] for test_time in test[self.TIMES]]) + + # Remove all passes/no-data from the results to reduce noise and + # filesize. If a test passes every run, but takes > MIN_TIME to run, + # don't throw away the data. + if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME): + del tests[test_name] + + # method could be a function pylint: disable=R0201 + def _IsResultsAllOfType(self, results, result_type): + """Returns whether all the results are of the given type + (e.g. all passes).""" + return len(results) == 1 and results[0][1] == result_type + + +class _FileUploader: + + def __init__(self, url, timeout_seconds): + self._url = url + self._timeout_seconds = timeout_seconds + + def UploadAsMultipartFormData(self, files, attrs): + file_objs = [] + for filename, path in files: + with open(path, 'rb') as fp: + file_objs.append(('file', filename, fp.read())) + + # FIXME: We should use the same variable names for the formal and actual + # parameters. + content_type, data = _EncodeMultipartFormData(attrs, file_objs) + return self._UploadData(content_type, data) + + def _UploadData(self, content_type, data): + start = time.time() + end = start + self._timeout_seconds + while time.time() < end: + try: + request = Request(self._url, data, {'Content-Type': content_type}) + return urlopen(request) + except HTTPError as e: + _log.warning( + 'Received HTTP status %s loading "%s". ' + 'Retrying in 10 seconds...', e.code, e.filename) + time.sleep(10) + + +def _GetMIMEType(filename): + return mimetypes.guess_type(filename)[0] or 'application/octet-stream' + + +# FIXME: Rather than taking tuples, this function should take more +# structured data. +def _EncodeMultipartFormData(fields, files): + """Encode form fields for multipart/form-data. + + Args: + fields: A sequence of (name, value) elements for regular form fields. + files: A sequence of (name, filename, value) elements for data to be + uploaded as files. + Returns: + (content_type, body) ready for httplib.HTTP instance. + + Source: + http://code.google.com/p/rietveld/source/browse/trunk/upload.py + """ + BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-' + CRLF = '\r\n' + lines = [] + + for key, value in fields: + lines.append('--' + BOUNDARY) + lines.append('Content-Disposition: form-data; name="%s"' % key) + lines.append('') + if isinstance(value, str): + value = value.encode('utf-8') + lines.append(value) + + for key, filename, value in files: + lines.append('--' + BOUNDARY) + lines.append('Content-Disposition: form-data; name="%s"; ' + 'filename="%s"' % (key, filename)) + lines.append('Content-Type: %s' % _GetMIMEType(filename)) + lines.append('') + if isinstance(value, str): + value = value.encode('utf-8') + lines.append(value) + + lines.append('--' + BOUNDARY + '--') + lines.append('') + body = CRLF.join(lines) + content_type = 'multipart/form-data; boundary=%s' % BOUNDARY + return content_type, body diff --git a/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py new file mode 100644 index 000000000000..b1d8bfdc91a7 --- /dev/null +++ b/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py @@ -0,0 +1,210 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# +# Most of this file was ported over from Blink's +# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py +# + +import unittest +import json + +from pylib.results.flakiness_dashboard import json_results_generator + + +class JSONGeneratorTest(unittest.TestCase): + + def setUp(self): + self.builder_name = 'DUMMY_BUILDER_NAME' + self.build_name = 'DUMMY_BUILD_NAME' + self.build_number = 'DUMMY_BUILDER_NUMBER' + + # For archived results. + self._json = None + self._num_runs = 0 + self._tests_set = set([]) + self._test_timings = {} + self._failed_count_map = {} + + self._PASS_count = 0 + self._DISABLED_count = 0 + self._FLAKY_count = 0 + self._FAILS_count = 0 + self._fixable_count = 0 + + self._orig_write_json = json_results_generator.WriteJSON + + # unused arguments ... pylint: disable=W0613 + def _WriteJSONStub(json_object, file_path, callback=None): + pass + + json_results_generator.WriteJSON = _WriteJSONStub + + def tearDown(self): + json_results_generator.WriteJSON = self._orig_write_json + + def _TestJSONGeneration(self, passed_tests_list, failed_tests_list): + tests_set = set(passed_tests_list) | set(failed_tests_list) + + DISABLED_tests = set(t for t in tests_set if t.startswith('DISABLED_')) + FLAKY_tests = set(t for t in tests_set if t.startswith('FLAKY_')) + FAILS_tests = set(t for t in tests_set if t.startswith('FAILS_')) + PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests) + + failed_tests = set(failed_tests_list) - DISABLED_tests + failed_count_map = dict((t, 1) for t in failed_tests) + + test_timings = {} + i = 0 + for test in tests_set: + test_timings[test] = float(self._num_runs * 100 + i) + i += 1 + + test_results_map = {} + for test in tests_set: + test_results_map[test] = json_results_generator.TestResult( + test, failed=(test in failed_tests), + elapsed_time=test_timings[test]) + + generator = json_results_generator.JSONResultsGeneratorBase( + self.builder_name, self.build_name, self.build_number, + '', + None, # don't fetch past json results archive + test_results_map) + + failed_count_map = dict((t, 1) for t in failed_tests) + + # Test incremental json results + incremental_json = generator.GetJSON() + self._VerifyJSONResults( + tests_set, + test_timings, + failed_count_map, + len(PASS_tests), + len(DISABLED_tests), + len(FLAKY_tests), + len(DISABLED_tests | failed_tests), + incremental_json, + 1) + + # We don't verify the results here, but at least we make sure the code + # runs without errors. + generator.GenerateJSONOutput() + generator.GenerateTimesMSFile() + + def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map, + PASS_count, DISABLED_count, FLAKY_count, + fixable_count, json_obj, num_runs): + # Aliasing to a short name for better access to its constants. + JRG = json_results_generator.JSONResultsGeneratorBase + + self.assertIn(JRG.VERSION_KEY, json_obj) + self.assertIn(self.builder_name, json_obj) + + buildinfo = json_obj[self.builder_name] + self.assertIn(JRG.FIXABLE, buildinfo) + self.assertIn(JRG.TESTS, buildinfo) + self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs) + self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number) + + if tests_set or DISABLED_count: + fixable = {} + for fixable_items in buildinfo[JRG.FIXABLE]: + for (result_type, count) in fixable_items.items(): + if result_type in fixable: + fixable[result_type] = fixable[result_type] + count + else: + fixable[result_type] = count + + if PASS_count: + self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count) + else: + self.assertTrue(JRG.PASS_RESULT not in fixable or + fixable[JRG.PASS_RESULT] == 0) + if DISABLED_count: + self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count) + else: + self.assertTrue(JRG.SKIP_RESULT not in fixable or + fixable[JRG.SKIP_RESULT] == 0) + if FLAKY_count: + self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count) + else: + self.assertTrue(JRG.FLAKY_RESULT not in fixable or + fixable[JRG.FLAKY_RESULT] == 0) + + if failed_count_map: + tests = buildinfo[JRG.TESTS] + for test_name in failed_count_map.keys(): + test = self._FindTestInTrie(test_name, tests) + + failed = 0 + for result in test[JRG.RESULTS]: + if result[1] == JRG.FAIL_RESULT: + failed += result[0] + self.assertEqual(failed_count_map[test_name], failed) + + timing_count = 0 + for timings in test[JRG.TIMES]: + if timings[1] == test_timings[test_name]: + timing_count = timings[0] + self.assertEqual(1, timing_count) + + if fixable_count: + self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count) + + def _FindTestInTrie(self, path, trie): + nodes = path.split('/') + sub_trie = trie + for node in nodes: + self.assertIn(node, sub_trie) + sub_trie = sub_trie[node] + return sub_trie + + def testJSONGeneration(self): + self._TestJSONGeneration([], []) + self._TestJSONGeneration(['A1', 'B1'], []) + self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2']) + self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], []) + self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4']) + self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5']) + self._TestJSONGeneration( + ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'], + ['FAILS_D6']) + + # Generate JSON with the same test sets. (Both incremental results and + # archived results must be updated appropriately.) + self._TestJSONGeneration( + ['A', 'FLAKY_B', 'DISABLED_C'], + ['FAILS_D', 'FLAKY_E']) + self._TestJSONGeneration( + ['A', 'DISABLED_C', 'FLAKY_E'], + ['FLAKY_B', 'FAILS_D']) + self._TestJSONGeneration( + ['FLAKY_B', 'DISABLED_C', 'FAILS_D'], + ['A', 'FLAKY_E']) + + def testHierarchicalJSNGeneration(self): + # FIXME: Re-work tests to be more comprehensible and comprehensive. + self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C']) + + def testTestTimingsTrie(self): + individual_test_timings = [] + individual_test_timings.append( + json_results_generator.TestResult( + 'foo/bar/baz.html', + elapsed_time=1.2)) + individual_test_timings.append( + json_results_generator.TestResult('bar.html', elapsed_time=0.0001)) + trie = json_results_generator.TestTimingsTrie(individual_test_timings) + + expected_trie = { + 'bar.html': 0, + 'foo': { + 'bar': { + 'baz.html': 1200, + } + } + } + + self.assertEqual(json.dumps(trie), json.dumps(expected_trie)) diff --git a/android/pylib/results/flakiness_dashboard/results_uploader.py b/android/pylib/results/flakiness_dashboard/results_uploader.py new file mode 100644 index 000000000000..e3843358b01b --- /dev/null +++ b/android/pylib/results/flakiness_dashboard/results_uploader.py @@ -0,0 +1,174 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Uploads the results to the flakiness dashboard server.""" +# pylint: disable=R0201 + +import logging +import os +import shutil +import tempfile +import xml + + +from devil.utils import cmd_helper +from pylib.constants import host_paths +from pylib.results.flakiness_dashboard import json_results_generator +from pylib.utils import repo_utils + + + +class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase): + """Writes test results to a JSON file and handles uploading that file to + the test results server. + """ + def __init__(self, builder_name, build_name, build_number, tmp_folder, + test_results_map, test_results_server, test_type, master_name): + super().__init__(builder_name=builder_name, + build_name=build_name, + build_number=build_number, + results_file_base_path=tmp_folder, + builder_base_url=None, + test_results_map=test_results_map, + svn_repositories=(('webkit', 'third_party/WebKit'), + ('chrome', '.')), + test_results_server=test_results_server, + test_type=test_type, + master_name=master_name) + + #override + def _GetModifierChar(self, test_name): + if test_name not in self._test_results_map: + return self.__class__.NO_DATA_RESULT + + return self._test_results_map[test_name].modifier + + #override + def _GetSVNRevision(self, in_directory): + """Returns the git/svn revision for the given directory. + + Args: + in_directory: The directory relative to src. + """ + def _is_git_directory(in_directory): + """Returns true if the given directory is in a git repository. + + Args: + in_directory: The directory path to be tested. + """ + if os.path.exists(os.path.join(in_directory, '.git')): + return True + parent = os.path.dirname(in_directory) + if parent in (host_paths.DIR_SOURCE_ROOT, in_directory): + return False + return _is_git_directory(parent) + + in_directory = os.path.join(host_paths.DIR_SOURCE_ROOT, in_directory) + + if not os.path.exists(os.path.join(in_directory, '.svn')): + if _is_git_directory(in_directory): + return repo_utils.GetGitHeadSHA1(in_directory) + return '' + + output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory) + try: + dom = xml.dom.minidom.parseString(output) + return dom.getElementsByTagName('entry')[0].getAttribute('revision') + except xml.parsers.expat.ExpatError: + return '' + return '' + + +class ResultsUploader: + """Handles uploading buildbot tests results to the flakiness dashboard.""" + def __init__(self, tests_type): + self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER') + self._master_name = os.environ.get('BUILDBOT_MASTERNAME') + self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME') + self._tests_type = tests_type + self._build_name = None + + if not self._build_number or not self._builder_name: + raise Exception('You should not be uploading tests results to the server' + 'from your local machine.') + + upstream = (tests_type != 'Chromium_Android_Instrumentation') + if not upstream: + self._build_name = 'chromium-android' + buildbot_branch = os.environ.get('BUILDBOT_BRANCH') + if not buildbot_branch: + buildbot_branch = 'master' + else: + # Ensure there's no leading "origin/" + buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:] + self._master_name = '%s-%s' % (self._build_name, buildbot_branch) + + self._test_results_map = {} + + def AddResults(self, test_results): + # TODO(frankf): Differentiate between fail/crash/timeouts. + conversion_map = [ + (test_results.GetPass(), False, + json_results_generator.JSONResultsGeneratorBase.PASS_RESULT), + (test_results.GetFail(), True, + json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT), + (test_results.GetCrash(), True, + json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT), + (test_results.GetTimeout(), True, + json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT), + (test_results.GetUnknown(), True, + json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT), + ] + + for results_list, failed, modifier in conversion_map: + for single_test_result in results_list: + test_result = json_results_generator.TestResult( + test=single_test_result.GetName(), + failed=failed, + elapsed_time=single_test_result.GetDuration() / 1000) + # The WebKit TestResult object sets the modifier it based on test name. + # Since we don't use the same test naming convention as WebKit the + # modifier will be wrong, so we need to overwrite it. + test_result.modifier = modifier + + self._test_results_map[single_test_result.GetName()] = test_result + + def Upload(self, test_results_server): + if not self._test_results_map: + return + + tmp_folder = tempfile.mkdtemp() + + try: + results_generator = JSONResultsGenerator( + builder_name=self._builder_name, + build_name=self._build_name, + build_number=self._build_number, + tmp_folder=tmp_folder, + test_results_map=self._test_results_map, + test_results_server=test_results_server, + test_type=self._tests_type, + master_name=self._master_name) + + json_files = ["incremental_results.json", "times_ms.json"] + results_generator.GenerateJSONOutput() + results_generator.GenerateTimesMSFile() + results_generator.UploadJSONFiles(json_files) + except Exception as e: # pylint: disable=broad-except + logging.error("Uploading results to test server failed: %s.", e) + finally: + shutil.rmtree(tmp_folder) + + +def Upload(results, flakiness_dashboard_server, test_type): + """Reports test results to the flakiness dashboard for Chrome for Android. + + Args: + results: test results. + flakiness_dashboard_server: the server to upload the results to. + test_type: the type of the tests (as displayed by the flakiness dashboard). + """ + uploader = ResultsUploader(test_type) + uploader.AddResults(results) + uploader.Upload(flakiness_dashboard_server) diff --git a/android/pylib/results/json_results.py b/android/pylib/results/json_results.py new file mode 100644 index 000000000000..c19096ae75c0 --- /dev/null +++ b/android/pylib/results/json_results.py @@ -0,0 +1,239 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import collections +import itertools +import json +import logging +import time + +import six + +from pylib.base import base_test_result + +def GenerateResultsDict(test_run_results, global_tags=None): + """Create a results dict from |test_run_results| suitable for writing to JSON. + Args: + test_run_results: a list of base_test_result.TestRunResults objects. + Returns: + A results dict that mirrors the one generated by + base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON. + """ + # Example json output. + # { + # "global_tags": [], + # "all_tests": [ + # "test1", + # "test2", + # ], + # "disabled_tests": [], + # "per_iteration_data": [ + # { + # "test1": [ + # { + # "status": "SUCCESS", + # "elapsed_time_ms": 1, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ... + # ], + # "test2": [ + # { + # "status": "FAILURE", + # "elapsed_time_ms": 12, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ... + # ], + # }, + # { + # "test1": [ + # { + # "status": "SUCCESS", + # "elapsed_time_ms": 1, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ], + # "test2": [ + # { + # "status": "FAILURE", + # "elapsed_time_ms": 12, + # "output_snippet": "", + # "output_snippet_base64": "", + # "losless_snippet": "", + # }, + # ], + # }, + # ... + # ], + # } + + all_tests = set() + per_iteration_data = [] + test_run_links = {} + + for test_run_result in test_run_results: + iteration_data = collections.defaultdict(list) + if isinstance(test_run_result, list): + results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result)) + for tr in test_run_result: + test_run_links.update(tr.GetLinks()) + + else: + results_iterable = test_run_result.GetAll() + test_run_links.update(test_run_result.GetLinks()) + + for r in results_iterable: + result_dict = { + 'status': r.GetType(), + 'elapsed_time_ms': r.GetDuration(), + 'output_snippet': six.ensure_text(r.GetLog(), errors='replace'), + 'losless_snippet': True, + 'output_snippet_base64': '', + 'links': r.GetLinks(), + } + iteration_data[r.GetName()].append(result_dict) + + all_tests = all_tests.union(set(six.iterkeys(iteration_data))) + per_iteration_data.append(iteration_data) + + return { + 'global_tags': global_tags or [], + 'all_tests': sorted(list(all_tests)), + # TODO(jbudorick): Add support for disabled tests within base_test_result. + 'disabled_tests': [], + 'per_iteration_data': per_iteration_data, + 'links': test_run_links, + } + + +def GenerateJsonTestResultFormatDict(test_run_results, interrupted): + """Create a results dict from |test_run_results| suitable for writing to JSON. + + Args: + test_run_results: a list of base_test_result.TestRunResults objects. + interrupted: True if tests were interrupted, e.g. timeout listing tests + Returns: + A results dict that mirrors the standard JSON Test Results Format. + """ + + tests = {} + counts = {'PASS': 0, 'FAIL': 0, 'SKIP': 0, 'CRASH': 0, 'TIMEOUT': 0} + + for test_run_result in test_run_results: + if isinstance(test_run_result, list): + results_iterable = itertools.chain(*(t.GetAll() for t in test_run_result)) + else: + results_iterable = test_run_result.GetAll() + + for r in results_iterable: + element = tests + for key in r.GetName().split('.'): + if key not in element: + element[key] = {} + element = element[key] + + element['expected'] = 'PASS' + + if r.GetType() == base_test_result.ResultType.PASS: + result = 'PASS' + elif r.GetType() == base_test_result.ResultType.SKIP: + result = 'SKIP' + elif r.GetType() == base_test_result.ResultType.CRASH: + result = 'CRASH' + elif r.GetType() == base_test_result.ResultType.TIMEOUT: + result = 'TIMEOUT' + else: + result = 'FAIL' + + if 'actual' in element: + element['actual'] += ' ' + result + else: + counts[result] += 1 + element['actual'] = result + if result == 'FAIL': + element['is_unexpected'] = True + + if r.GetDuration() != 0: + element['time'] = r.GetDuration() + + # Fill in required fields. + return { + 'interrupted': interrupted, + 'num_failures_by_type': counts, + 'path_delimiter': '.', + 'seconds_since_epoch': time.time(), + 'tests': tests, + 'version': 3, + } + + +def GenerateJsonResultsFile(test_run_result, file_path, global_tags=None, + **kwargs): + """Write |test_run_result| to JSON. + + This emulates the format of the JSON emitted by + base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON. + + Args: + test_run_result: a base_test_result.TestRunResults object. + file_path: The path to the JSON file to write. + """ + with open(file_path, 'w') as json_result_file: + json_result_file.write(json.dumps( + GenerateResultsDict(test_run_result, global_tags=global_tags), + **kwargs)) + logging.info('Generated json results file at %s', file_path) + + +def GenerateJsonTestResultFormatFile(test_run_result, interrupted, file_path, + **kwargs): + """Write |test_run_result| to JSON. + + This uses the official Chromium Test Results Format. + + Args: + test_run_result: a base_test_result.TestRunResults object. + interrupted: True if tests were interrupted, e.g. timeout listing tests + file_path: The path to the JSON file to write. + """ + with open(file_path, 'w') as json_result_file: + json_result_file.write( + json.dumps( + GenerateJsonTestResultFormatDict(test_run_result, interrupted), + **kwargs)) + logging.info('Generated json results file at %s', file_path) + + +def ParseResultsFromJson(json_results): + """Creates a list of BaseTestResult objects from JSON. + + Args: + json_results: A JSON dict in the format created by + GenerateJsonResultsFile. + """ + + def string_as_status(s): + if s in base_test_result.ResultType.GetTypes(): + return s + return base_test_result.ResultType.UNKNOWN + + results_list = [] + testsuite_runs = json_results['per_iteration_data'] + for testsuite_run in testsuite_runs: + for test, test_runs in six.iteritems(testsuite_run): + results_list.extend( + [base_test_result.BaseTestResult(test, + string_as_status(tr['status']), + duration=tr['elapsed_time_ms'], + log=tr.get('output_snippet')) + for tr in test_runs]) + return results_list diff --git a/android/pylib/results/json_results_test.py b/android/pylib/results/json_results_test.py new file mode 100755 index 000000000000..6cf6487e54f8 --- /dev/null +++ b/android/pylib/results/json_results_test.py @@ -0,0 +1,311 @@ +#!/usr/bin/env vpython3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import unittest + +import six +from pylib.base import base_test_result +from pylib.results import json_results + + +class JsonResultsTest(unittest.TestCase): + + def testGenerateResultsDict_passedResult(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.PASS) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) + self.assertEqual('SUCCESS', test_iteration_result['status']) + + def testGenerateResultsDict_skippedResult(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.SKIP) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) + self.assertEqual('SKIPPED', test_iteration_result['status']) + + def testGenerateResultsDict_failedResult(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.FAIL) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) + self.assertEqual('FAILURE', test_iteration_result['status']) + + def testGenerateResultsDict_duration(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.PASS, duration=123) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('elapsed_time_ms' in test_iteration_result) + self.assertEqual(123, test_iteration_result['elapsed_time_ms']) + + def testGenerateResultsDict_multipleResults(self): + result1 = base_test_result.BaseTestResult( + 'test.package.TestName1', base_test_result.ResultType.PASS) + result2 = base_test_result.BaseTestResult( + 'test.package.TestName2', base_test_result.ResultType.PASS) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result1) + all_results.AddResult(result2) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEqual(['test.package.TestName1', 'test.package.TestName2'], + results_dict['all_tests']) + + self.assertTrue('per_iteration_data' in results_dict) + iterations = results_dict['per_iteration_data'] + self.assertEqual(1, len(iterations)) + + expected_tests = set([ + 'test.package.TestName1', + 'test.package.TestName2', + ]) + + for test_name, iteration_result in six.iteritems(iterations[0]): + self.assertTrue(test_name in expected_tests) + expected_tests.remove(test_name) + self.assertEqual(1, len(iteration_result)) + + test_iteration_result = iteration_result[0] + self.assertTrue('status' in test_iteration_result) + self.assertEqual('SUCCESS', test_iteration_result['status']) + + def testGenerateResultsDict_passOnRetry(self): + raw_results = [] + + result1 = base_test_result.BaseTestResult( + 'test.package.TestName1', base_test_result.ResultType.FAIL) + run_results1 = base_test_result.TestRunResults() + run_results1.AddResult(result1) + raw_results.append(run_results1) + + result2 = base_test_result.BaseTestResult( + 'test.package.TestName1', base_test_result.ResultType.PASS) + run_results2 = base_test_result.TestRunResults() + run_results2.AddResult(result2) + raw_results.append(run_results2) + + results_dict = json_results.GenerateResultsDict([raw_results]) + self.assertEqual(['test.package.TestName1'], results_dict['all_tests']) + + # Check that there's only one iteration. + self.assertIn('per_iteration_data', results_dict) + iterations = results_dict['per_iteration_data'] + self.assertEqual(1, len(iterations)) + + # Check that test.package.TestName1 is the only test in the iteration. + self.assertEqual(1, len(iterations[0])) + self.assertIn('test.package.TestName1', iterations[0]) + + # Check that there are two results for test.package.TestName1. + actual_test_results = iterations[0]['test.package.TestName1'] + self.assertEqual(2, len(actual_test_results)) + + # Check that the first result is a failure. + self.assertIn('status', actual_test_results[0]) + self.assertEqual('FAILURE', actual_test_results[0]['status']) + + # Check that the second result is a success. + self.assertIn('status', actual_test_results[1]) + self.assertEqual('SUCCESS', actual_test_results[1]['status']) + + def testGenerateResultsDict_globalTags(self): + raw_results = [] + global_tags = ['UNRELIABLE_RESULTS'] + + results_dict = json_results.GenerateResultsDict( + [raw_results], global_tags=global_tags) + self.assertEqual(['UNRELIABLE_RESULTS'], results_dict['global_tags']) + + def testGenerateResultsDict_loslessSnippet(self): + result = base_test_result.BaseTestResult( + 'test.package.TestName', base_test_result.ResultType.FAIL) + log = 'blah-blah' + result.SetLog(log) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) + self.assertEqual(['test.package.TestName'], results_dict['all_tests']) + self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) + self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('losless_snippet' in test_iteration_result) + self.assertTrue(test_iteration_result['losless_snippet']) + self.assertTrue('output_snippet' in test_iteration_result) + self.assertEqual(log, test_iteration_result['output_snippet']) + self.assertTrue('output_snippet_base64' in test_iteration_result) + self.assertEqual('', test_iteration_result['output_snippet_base64']) + + def testGenerateJsonTestResultFormatDict_passedResult(self): + result = base_test_result.BaseTestResult('test.package.TestName', + base_test_result.ResultType.PASS) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateJsonTestResultFormatDict([all_results], + False) + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( + 'PASS', + results_dict['tests']['test']['package']['TestName']['expected']) + self.assertEqual( + 'PASS', results_dict['tests']['test']['package']['TestName']['actual']) + + self.assertTrue('FAIL' not in results_dict['num_failures_by_type'] + or results_dict['num_failures_by_type']['FAIL'] == 0) + self.assertIn('PASS', results_dict['num_failures_by_type']) + self.assertEqual(1, results_dict['num_failures_by_type']['PASS']) + + def testGenerateJsonTestResultFormatDict_failedResult(self): + result = base_test_result.BaseTestResult('test.package.TestName', + base_test_result.ResultType.FAIL) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateJsonTestResultFormatDict([all_results], + False) + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( + 'PASS', + results_dict['tests']['test']['package']['TestName']['expected']) + self.assertEqual( + 'FAIL', results_dict['tests']['test']['package']['TestName']['actual']) + self.assertEqual( + True, + results_dict['tests']['test']['package']['TestName']['is_unexpected']) + + self.assertTrue('PASS' not in results_dict['num_failures_by_type'] + or results_dict['num_failures_by_type']['PASS'] == 0) + self.assertIn('FAIL', results_dict['num_failures_by_type']) + self.assertEqual(1, results_dict['num_failures_by_type']['FAIL']) + + def testGenerateJsonTestResultFormatDict_skippedResult(self): + result = base_test_result.BaseTestResult('test.package.TestName', + base_test_result.ResultType.SKIP) + + all_results = base_test_result.TestRunResults() + all_results.AddResult(result) + + results_dict = json_results.GenerateJsonTestResultFormatDict([all_results], + False) + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( + 'PASS', + results_dict['tests']['test']['package']['TestName']['expected']) + self.assertEqual( + 'SKIP', results_dict['tests']['test']['package']['TestName']['actual']) + # Should only be set if the test fails. + self.assertNotIn('is_unexpected', + results_dict['tests']['test']['package']['TestName']) + + self.assertTrue('FAIL' not in results_dict['num_failures_by_type'] + or results_dict['num_failures_by_type']['FAIL'] == 0) + self.assertTrue('PASS' not in results_dict['num_failures_by_type'] + or results_dict['num_failures_by_type']['PASS'] == 0) + self.assertIn('SKIP', results_dict['num_failures_by_type']) + self.assertEqual(1, results_dict['num_failures_by_type']['SKIP']) + + def testGenerateJsonTestResultFormatDict_failedResultWithRetry(self): + result_1 = base_test_result.BaseTestResult('test.package.TestName', + base_test_result.ResultType.FAIL) + run_results_1 = base_test_result.TestRunResults() + run_results_1.AddResult(result_1) + + # Simulate a second retry with failure. + result_2 = base_test_result.BaseTestResult('test.package.TestName', + base_test_result.ResultType.FAIL) + run_results_2 = base_test_result.TestRunResults() + run_results_2.AddResult(result_2) + + all_results = [run_results_1, run_results_2] + + results_dict = json_results.GenerateJsonTestResultFormatDict( + all_results, False) + self.assertEqual(1, len(results_dict['tests'])) + self.assertEqual(1, len(results_dict['tests']['test'])) + self.assertEqual(1, len(results_dict['tests']['test']['package'])) + self.assertEqual( + 'PASS', + results_dict['tests']['test']['package']['TestName']['expected']) + self.assertEqual( + 'FAIL FAIL', + results_dict['tests']['test']['package']['TestName']['actual']) + self.assertEqual( + True, + results_dict['tests']['test']['package']['TestName']['is_unexpected']) + + self.assertTrue('PASS' not in results_dict['num_failures_by_type'] + or results_dict['num_failures_by_type']['PASS'] == 0) + # According to the spec: If a test was run more than once, only the first + # invocation's result is included in the totals. + self.assertIn('FAIL', results_dict['num_failures_by_type']) + self.assertEqual(1, results_dict['num_failures_by_type']['FAIL']) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/android/pylib/results/presentation/__init__.py b/android/pylib/results/presentation/__init__.py new file mode 100644 index 000000000000..b8e1dbd6e92a --- /dev/null +++ b/android/pylib/results/presentation/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/android/pylib/results/presentation/javascript/main_html.js b/android/pylib/results/presentation/javascript/main_html.js new file mode 100644 index 000000000000..e4bf2cc3fd9b --- /dev/null +++ b/android/pylib/results/presentation/javascript/main_html.js @@ -0,0 +1,193 @@ +// Copyright 2017 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +function getArguments() { + // Returns the URL arguments as a dictionary. + args = {} + var s = location.search; + if (s) { + var vals = s.substring(1).split('&'); + for (var i = 0; i < vals.length; i++) { + var pair = vals[i].split('='); + args[pair[0]] = pair[1]; + } + } + return args; +} + +function showSuiteTable(show_the_table) { + document.getElementById('suite-table').style.display = ( + show_the_table ? 'table' : 'none'); +} + +function showTestTable(show_the_table) { + document.getElementById('test-table').style.display = ( + show_the_table ? 'table' : 'none'); +} + +function showTestsOfOneSuiteOnly(suite_name) { + setTitle('Test Results of Suite: ' + suite_name) + show_all = (suite_name == 'TOTAL') + var testTableBlocks = document.getElementById('test-table') + .getElementsByClassName('row_block'); + Array.prototype.slice.call(testTableBlocks) + .forEach(function(testTableBlock) { + if (!show_all) { + var table_block_in_suite = (testTableBlock.firstElementChild + .firstElementChild.firstElementChild.innerHTML) + .startsWith(suite_name); + if (!table_block_in_suite) { + testTableBlock.style.display = 'none'; + return; + } + } + testTableBlock.style.display = 'table-row-group'; + }); + showTestTable(true); + showSuiteTable(false); + window.scrollTo(0, 0); +} + +function showTestsOfOneSuiteOnlyWithNewState(suite_name) { + showTestsOfOneSuiteOnly(suite_name); + history.pushState({suite: suite_name}, suite_name, ''); +} + +function showSuiteTableOnly() { + setTitle('Suites Summary') + showTestTable(false); + showSuiteTable(true); + window.scrollTo(0, 0); +} + +function showSuiteTableOnlyWithReplaceState() { + showSuiteTableOnly(); + history.replaceState({}, 'suite_table', ''); +} + +function setBrowserBackButtonLogic() { + window.onpopstate = function(event) { + if (!event.state || !event.state.suite) { + showSuiteTableOnly(); + } else { + showTestsOfOneSuiteOnly(event.state.suite); + } + }; +} + +function setTitle(title) { + document.getElementById('summary-header').textContent = title; +} + +function sortByColumn(head) { + var table = head.parentNode.parentNode.parentNode; + var rowBlocks = Array.prototype.slice.call( + table.getElementsByTagName('tbody')); + + // Determine whether to asc or desc and set arrows. + var headers = head.parentNode.getElementsByTagName('th'); + var headIndex = Array.prototype.slice.call(headers).indexOf(head); + var asc = -1; + for (var i = 0; i < headers.length; i++) { + if (headers[i].dataset.ascSorted != 0) { + if (headers[i].dataset.ascSorted == 1) { + headers[i].getElementsByClassName('up')[0] + .style.display = 'none'; + } else { + headers[i].getElementsByClassName('down')[0] + .style.display = 'none'; + } + if (headers[i] == head) { + asc = headers[i].dataset.ascSorted * -1; + } else { + headers[i].dataset.ascSorted = 0; + } + break; + } + } + headers[headIndex].dataset.ascSorted = asc; + if (asc == 1) { + headers[headIndex].getElementsByClassName('up')[0] + .style.display = 'inline'; + } else { + headers[headIndex].getElementsByClassName('down')[0] + .style.display = 'inline'; + } + + // Sort the array by the specified column number (col) and order (asc). + rowBlocks.sort(function (a, b) { + if (a.style.display == 'none') { + return -1; + } else if (b.style.display == 'none') { + return 1; + } + var a_rows = Array.prototype.slice.call(a.children); + var b_rows = Array.prototype.slice.call(b.children); + if (head.className == "text") { + // If sorting by text, we only compare the entry on the first row. + var aInnerHTML = a_rows[0].children[headIndex].innerHTML; + var bInnerHTML = b_rows[0].children[headIndex].innerHTML; + return (aInnerHTML == bInnerHTML) ? 0 : ( + (aInnerHTML > bInnerHTML) ? asc : -1 * asc); + } else if (head.className == "number") { + // If sorting by number, for example, duration, + // we will sum up the durations of different test runs + // for one specific test case and sort by the sum. + var avalue = 0; + var bvalue = 0; + a_rows.forEach(function (row, i) { + var index = (i > 0) ? headIndex - 1 : headIndex; + avalue += Number(row.children[index].innerHTML); + }); + b_rows.forEach(function (row, i) { + var index = (i > 0) ? headIndex - 1 : headIndex; + bvalue += Number(row.children[index].innerHTML); + }); + } else if (head.className == "flaky") { + // Flakiness = (#total - #success - #skipped) / (#total - #skipped) + var a_success_or_skipped = 0; + var a_skipped = 0; + var b_success_or_skipped = 0; + var b_skipped = 0; + a_rows.forEach(function (row, i) { + var index = (i > 0) ? headIndex - 1 : headIndex; + var status = row.children[index].innerHTML.trim(); + if (status == 'SUCCESS') { + a_success_or_skipped += 1; + } + if (status == 'SKIPPED') { + a_success_or_skipped += 1; + a_skipped += 1; + } + }); + b_rows.forEach(function (row, i) { + var index = (i > 0) ? headIndex - 1 : headIndex; + var status = row.children[index].innerHTML.trim(); + if (status == 'SUCCESS') { + b_success_or_skipped += 1; + } + if (status == 'SKIPPED') { + b_success_or_skipped += 1; + b_skipped += 1; + } + }); + var atotal_minus_skipped = a_rows.length - a_skipped; + var btotal_minus_skipped = b_rows.length - b_skipped; + + var avalue = ((atotal_minus_skipped == 0) ? -1 : + (a_rows.length - a_success_or_skipped) / atotal_minus_skipped); + var bvalue = ((btotal_minus_skipped == 0) ? -1 : + (b_rows.length - b_success_or_skipped) / btotal_minus_skipped); + } + return asc * (avalue - bvalue); + }); + + for (var i = 0; i < rowBlocks.length; i++) { + table.appendChild(rowBlocks[i]); + } +} + +function sortSuiteTableByFailedTestCases() { + sortByColumn(document.getElementById('number_fail_tests')); +} diff --git a/android/pylib/results/presentation/standard_gtest_merge.py b/android/pylib/results/presentation/standard_gtest_merge.py new file mode 100755 index 000000000000..ab1074e268f3 --- /dev/null +++ b/android/pylib/results/presentation/standard_gtest_merge.py @@ -0,0 +1,175 @@ +#! /usr/bin/env python3 +# +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import json +import os +import sys + + +def merge_shard_results(summary_json, jsons_to_merge): + """Reads JSON test output from all shards and combines them into one. + + Returns dict with merged test output on success or None on failure. Emits + annotations. + """ + try: + with open(summary_json) as f: + summary = json.load(f) + except (IOError, ValueError): + # TODO(crbug.com/1245494):Re-enable this check after the recipe module + # chromium_swarming can run it with py3 + # pylint: disable=raise-missing-from + raise Exception('Summary json cannot be loaded.') + + # Merge all JSON files together. Keep track of missing shards. + merged = { + 'all_tests': set(), + 'disabled_tests': set(), + 'global_tags': set(), + 'missing_shards': [], + 'per_iteration_data': [], + 'swarming_summary': summary, + 'links': set() + } + for index, result in enumerate(summary['shards']): + if result is None: + merged['missing_shards'].append(index) + continue + + # Author note: this code path doesn't trigger convert_to_old_format() in + # client/swarming.py, which means the state enum is saved in its string + # name form, not in the number form. + state = result.get('state') + if state == 'BOT_DIED': + print( + 'Shard #%d had a Swarming internal failure' % index, file=sys.stderr) + elif state == 'EXPIRED': + print('There wasn\'t enough capacity to run your test', file=sys.stderr) + elif state == 'TIMED_OUT': + print('Test runtime exceeded allocated time' + 'Either it ran for too long (hard timeout) or it didn\'t produce ' + 'I/O for an extended period of time (I/O timeout)', + file=sys.stderr) + elif state != 'COMPLETED': + print('Invalid Swarming task state: %s' % state, file=sys.stderr) + + json_data, err_msg = load_shard_json(index, result.get('task_id'), + jsons_to_merge) + if json_data: + # Set-like fields. + for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'): + merged[key].update(json_data.get(key), []) + + # 'per_iteration_data' is a list of dicts. Dicts should be merged + # together, not the 'per_iteration_data' list itself. + merged['per_iteration_data'] = merge_list_of_dicts( + merged['per_iteration_data'], json_data.get('per_iteration_data', [])) + else: + merged['missing_shards'].append(index) + print('No result was found: %s' % err_msg, file=sys.stderr) + + # If some shards are missing, make it known. Continue parsing anyway. Step + # should be red anyway, since swarming.py return non-zero exit code in that + # case. + if merged['missing_shards']: + as_str = ', '.join([str(shard) for shard in merged['missing_shards']]) + print('some shards did not complete: %s' % as_str, file=sys.stderr) + # Not all tests run, combined JSON summary can not be trusted. + merged['global_tags'].add('UNRELIABLE_RESULTS') + + # Convert to jsonish dict. + for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'): + merged[key] = sorted(merged[key]) + return merged + + +OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB + + +def load_shard_json(index, task_id, jsons_to_merge): + """Reads JSON output of the specified shard. + + Args: + output_dir: The directory in which to look for the JSON output to load. + index: The index of the shard to load data for, this is for old api. + task_id: The directory of the shard to load data for, this is for new api. + + Returns: A tuple containing: + * The contents of path, deserialized into a python object. + * An error string. + (exactly one of the tuple elements will be non-None). + """ + matching_json_files = [ + j for j in jsons_to_merge + if (os.path.basename(j) == 'output.json' and + (os.path.basename(os.path.dirname(j)) == str(index) or + os.path.basename(os.path.dirname(j)) == task_id))] + + if not matching_json_files: + print('shard %s test output missing' % index, file=sys.stderr) + return (None, 'shard %s test output was missing' % index) + if len(matching_json_files) > 1: + print('duplicate test output for shard %s' % index, file=sys.stderr) + return (None, 'shard %s test output was duplicated' % index) + + path = matching_json_files[0] + + try: + filesize = os.stat(path).st_size + if filesize > OUTPUT_JSON_SIZE_LIMIT: + print( + 'output.json is %d bytes. Max size is %d' % (filesize, + OUTPUT_JSON_SIZE_LIMIT), + file=sys.stderr) + return (None, 'shard %s test output exceeded the size limit' % index) + + with open(path) as f: + return (json.load(f), None) + except (IOError, ValueError, OSError) as e: + print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr) + print('%s: %s' % (type(e).__name__, e), file=sys.stderr) + + return (None, 'shard %s test output was missing or invalid' % index) + + +def merge_list_of_dicts(left, right): + """Merges dicts left[0] with right[0], left[1] with right[1], etc.""" + output = [] + for i in range(max(len(left), len(right))): + left_dict = left[i] if i < len(left) else {} + right_dict = right[i] if i < len(right) else {} + merged_dict = left_dict.copy() + merged_dict.update(right_dict) + output.append(merged_dict) + return output + + +def standard_gtest_merge( + output_json, summary_json, jsons_to_merge): + + output = merge_shard_results(summary_json, jsons_to_merge) + with open(output_json, 'w') as f: + json.dump(output, f) + + return 0 + + +def main(raw_args): + parser = argparse.ArgumentParser() + parser.add_argument('--summary-json') + parser.add_argument('-o', '--output-json', required=True) + parser.add_argument('jsons_to_merge', nargs='*') + + args = parser.parse_args(raw_args) + + return standard_gtest_merge( + args.output_json, args.summary_json, args.jsons_to_merge) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/pylib/results/presentation/template/main.html b/android/pylib/results/presentation/template/main.html new file mode 100644 index 000000000000..e30d7d3f239b --- /dev/null +++ b/android/pylib/results/presentation/template/main.html @@ -0,0 +1,93 @@ + + + + + + + + +

    +

    + {% for tb_value in tb_values %} + {% include 'template/table.html' %} + {% endfor %} +
    + {% if feedback_url %} +
    + Feedback + + {%- endif %} + + diff --git a/android/pylib/results/presentation/template/table.html b/android/pylib/results/presentation/template/table.html new file mode 100644 index 000000000000..4240043490dc --- /dev/null +++ b/android/pylib/results/presentation/template/table.html @@ -0,0 +1,60 @@ + + + + {% for cell in tb_value.table_headers -%} + + {%- endfor %} + + + {% for block in tb_value.table_row_blocks -%} + + {% for row in block -%} + + {% for cell in row -%} + {% if cell.rowspan -%} + + {%- endfor %} + + {%- endfor %} + + {%- endfor %} + + + {% for cell in tb_value.table_footer -%} + + {%- endfor %} + + + diff --git a/android/pylib/results/presentation/test_results_presentation.py b/android/pylib/results/presentation/test_results_presentation.py new file mode 100755 index 000000000000..9e8b2804156a --- /dev/null +++ b/android/pylib/results/presentation/test_results_presentation.py @@ -0,0 +1,544 @@ +#!/usr/bin/env python3 +# +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + + +import argparse +import collections +import contextlib +import json +import logging +import tempfile +import os +import sys +try: + from urllib.parse import urlencode + from urllib.request import urlopen +except ImportError: + from urllib import urlencode + from urllib2 import urlopen + + +CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) +BASE_DIR = os.path.abspath(os.path.join( + CURRENT_DIR, '..', '..', '..', '..', '..')) + +sys.path.append(os.path.join(BASE_DIR, 'build', 'android')) +from pylib.results.presentation import standard_gtest_merge +from pylib.utils import google_storage_helper # pylint: disable=import-error + +sys.path.append(os.path.join(BASE_DIR, 'third_party')) +import jinja2 # pylint: disable=import-error +JINJA_ENVIRONMENT = jinja2.Environment( + loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), + autoescape=True) + + +def cell(data, html_class='center'): + """Formats table cell data for processing in jinja template.""" + return { + 'data': data, + 'class': html_class, + } + + +def pre_cell(data, html_class='center'): + """Formats table
     cell data for processing in jinja template."""
    +  return {
    +    'cell_type': 'pre',
    +    'data': data,
    +    'class': html_class,
    +  }
    +
    +
    +class LinkTarget:
    +  # Opens the linked document in a new window or tab.
    +  NEW_TAB = '_blank'
    +  # Opens the linked document in the same frame as it was clicked.
    +  CURRENT_TAB = '_self'
    +
    +
    +def link(data, href, target=LinkTarget.CURRENT_TAB):
    +  """Formats  tag data for processing in jinja template.
    +
    +  Args:
    +    data: String link appears as on HTML page.
    +    href: URL where link goes.
    +    target: Where link should be opened (e.g. current tab or new tab).
    +  """
    +  return {
    +    'data': data,
    +    'href': href,
    +    'target': target,
    +  }
    +
    +
    +def links_cell(links, html_class='center', rowspan=None):
    +  """Formats table cell with links for processing in jinja template.
    +
    +  Args:
    +    links: List of link dictionaries. Use |link| function to generate them.
    +    html_class: Class for table cell.
    +    rowspan: Rowspan HTML attribute.
    +  """
    +  return {
    +    'cell_type': 'links',
    +    'class': html_class,
    +    'links': links,
    +    'rowspan': rowspan,
    +  }
    +
    +
    +def action_cell(action, data, html_class):
    +  """Formats table cell with javascript actions.
    +
    +  Args:
    +    action: Javscript action.
    +    data: Data in cell.
    +    class: Class for table cell.
    +  """
    +  return {
    +    'cell_type': 'action',
    +    'action': action,
    +    'data': data,
    +    'class': html_class,
    +  }
    +
    +
    +def flakiness_dashbord_link(test_name, suite_name, bucket):
    +  # Assume the bucket will be like "foo-bar-baz", we will take "foo"
    +  # as the test_project.
    +  # Fallback to "chromium" if bucket is not passed, e.g. local_output=True
    +  test_project = bucket.split('-')[0] if bucket else 'chromium'
    +  query = '%s/%s' % (suite_name, test_name)
    +  url_args = urlencode([('t', 'TESTS'), ('q', query), ('tp', test_project)])
    +  return 'https://ci.chromium.org/ui/search?%s' % url_args
    +
    +
    +def logs_cell(result, test_name, suite_name, bucket):
    +  """Formats result logs data for processing in jinja template."""
    +  link_list = []
    +  result_link_dict = result.get('links', {})
    +  result_link_dict['flakiness'] = flakiness_dashbord_link(
    +      test_name, suite_name, bucket)
    +  for name, href in sorted(result_link_dict.items()):
    +    link_list.append(link(
    +        data=name,
    +        href=href,
    +        target=LinkTarget.NEW_TAB))
    +  if link_list:
    +    return links_cell(link_list)
    +  return cell('(no logs)')
    +
    +
    +def code_search(test, cs_base_url):
    +  """Returns URL for test on codesearch."""
    +  search = test.replace('#', '.')
    +  return '%s/search/?q=%s&type=cs' % (cs_base_url, search)
    +
    +
    +def status_class(status):
    +  """Returns HTML class for test status."""
    +  if not status:
    +    return 'failure unknown'
    +  status = status.lower()
    +  if status not in ('success', 'skipped'):
    +    return 'failure %s' % status
    +  return status
    +
    +
    +def create_test_table(results_dict, cs_base_url, suite_name, bucket):
    +  """Format test data for injecting into HTML table."""
    +
    +  header_row = [
    +    cell(data='test_name', html_class='text'),
    +    cell(data='status', html_class='flaky'),
    +    cell(data='elapsed_time_ms', html_class='number'),
    +    cell(data='logs', html_class='text'),
    +    cell(data='output_snippet', html_class='text'),
    +  ]
    +
    +  test_row_blocks = []
    +  for test_name, test_results in results_dict.items():
    +    test_runs = []
    +    for index, result in enumerate(test_results):
    +      if index == 0:
    +        test_run = [links_cell(
    +            links=[
    +                link(href=code_search(test_name, cs_base_url),
    +                     target=LinkTarget.NEW_TAB,
    +                     data=test_name)],
    +            rowspan=len(test_results),
    +            html_class='left %s' % test_name
    +        )]                                          # test_name
    +      else:
    +        test_run = []
    +
    +      test_run.extend([
    +          cell(data=result['status'] or 'UNKNOWN',
    +                                                    # status
    +               html_class=('center %s' %
    +                  status_class(result['status']))),
    +          cell(data=result['elapsed_time_ms']),     # elapsed_time_ms
    +          logs_cell(result, test_name, suite_name, bucket),
    +                                                    # logs
    +          pre_cell(data=result['output_snippet'],   # output_snippet
    +                   html_class='left'),
    +      ])
    +      test_runs.append(test_run)
    +    test_row_blocks.append(test_runs)
    +  return header_row, test_row_blocks
    +
    +
    +def create_suite_table(results_dict):
    +  """Format test suite data for injecting into HTML table."""
    +
    +  SUCCESS_COUNT_INDEX = 1
    +  FAIL_COUNT_INDEX = 2
    +  ALL_COUNT_INDEX = 3
    +  TIME_INDEX = 4
    +
    +  header_row = [
    +    cell(data='suite_name', html_class='text'),
    +    cell(data='number_success_tests', html_class='number'),
    +    cell(data='number_fail_tests', html_class='number'),
    +    cell(data='all_tests', html_class='number'),
    +    cell(data='elapsed_time_ms', html_class='number'),
    +  ]
    +
    +  footer_row = [
    +    action_cell(
    +          'showTestsOfOneSuiteOnlyWithNewState("TOTAL")',
    +          'TOTAL',
    +          'center'
    +        ),         # TOTAL
    +    cell(data=0),  # number_success_tests
    +    cell(data=0),  # number_fail_tests
    +    cell(data=0),  # all_tests
    +    cell(data=0),  # elapsed_time_ms
    +  ]
    +
    +  suite_row_dict = collections.defaultdict(lambda: [
    +      # Note: |suite_name| will be given in the following for loop.
    +      # It is not assigned yet here.
    +      action_cell('showTestsOfOneSuiteOnlyWithNewState("%s")' % suite_name,
    +                  suite_name, 'left'),  # suite_name
    +      cell(data=0),  # number_success_tests
    +      cell(data=0),  # number_fail_tests
    +      cell(data=0),  # all_tests
    +      cell(data=0),  # elapsed_time_ms
    +  ])
    +  for test_name, test_results in results_dict.items():
    +    # TODO(mikecase): This logic doesn't work if there are multiple test runs.
    +    # That is, if 'per_iteration_data' has multiple entries.
    +    # Since we only care about the result of the last test run.
    +    result = test_results[-1]
    +
    +    suite_name = (test_name.split('#')[0]
    +                  if '#' in test_name else test_name.split('.')[0])
    +    suite_row = suite_row_dict[suite_name]
    +
    +    suite_row[ALL_COUNT_INDEX]['data'] += 1
    +    footer_row[ALL_COUNT_INDEX]['data'] += 1
    +
    +    if result['status'] == 'SUCCESS':
    +      suite_row[SUCCESS_COUNT_INDEX]['data'] += 1
    +      footer_row[SUCCESS_COUNT_INDEX]['data'] += 1
    +    elif result['status'] != 'SKIPPED':
    +      suite_row[FAIL_COUNT_INDEX]['data'] += 1
    +      footer_row[FAIL_COUNT_INDEX]['data'] += 1
    +
    +    # Some types of crashes can have 'null' values for elapsed_time_ms.
    +    if result['elapsed_time_ms'] is not None:
    +      suite_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
    +      footer_row[TIME_INDEX]['data'] += result['elapsed_time_ms']
    +
    +  for suite in list(suite_row_dict.values()):
    +    if suite[FAIL_COUNT_INDEX]['data'] > 0:
    +      suite[FAIL_COUNT_INDEX]['class'] += ' failure'
    +    else:
    +      suite[FAIL_COUNT_INDEX]['class'] += ' success'
    +
    +  if footer_row[FAIL_COUNT_INDEX]['data'] > 0:
    +    footer_row[FAIL_COUNT_INDEX]['class'] += ' failure'
    +  else:
    +    footer_row[FAIL_COUNT_INDEX]['class'] += ' success'
    +
    +  return (header_row, [[suite_row]
    +                       for suite_row in list(suite_row_dict.values())],
    +          footer_row)
    +
    +
    +def feedback_url(result_details_link):
    +  url_args = [
    +      ('labels', 'Pri-2,Type-Bug,Restrict-View-Google'),
    +      ('summary', 'Result Details Feedback:'),
    +      ('components', 'Test>Android'),
    +  ]
    +  if result_details_link:
    +    url_args.append(('comment', 'Please check out: %s' % result_details_link))
    +  url_args = urlencode(url_args)
    +  return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args
    +
    +
    +def results_to_html(results_dict, cs_base_url, bucket, test_name,
    +                    builder_name, build_number, local_output):
    +  """Convert list of test results into html format.
    +
    +  Args:
    +    local_output: Whether this results file is uploaded to Google Storage or
    +        just a local file.
    +  """
    +  test_rows_header, test_rows = create_test_table(
    +      results_dict, cs_base_url, test_name, bucket)
    +  suite_rows_header, suite_rows, suite_row_footer = create_suite_table(
    +      results_dict)
    +
    +  suite_table_values = {
    +    'table_id': 'suite-table',
    +    'table_headers': suite_rows_header,
    +    'table_row_blocks': suite_rows,
    +    'table_footer': suite_row_footer,
    +  }
    +
    +  test_table_values = {
    +    'table_id': 'test-table',
    +    'table_headers': test_rows_header,
    +    'table_row_blocks': test_rows,
    +  }
    +
    +  main_template = JINJA_ENVIRONMENT.get_template(
    +      os.path.join('template', 'main.html'))
    +
    +  if local_output:
    +    html_render = main_template.render(  #  pylint: disable=no-member
    +        {
    +          'tb_values': [suite_table_values, test_table_values],
    +          'feedback_url': feedback_url(None),
    +        })
    +    return (html_render, None, None)
    +  dest = google_storage_helper.unique_name(
    +      '%s_%s_%s' % (test_name, builder_name, build_number))
    +  result_details_link = google_storage_helper.get_url_link(
    +      dest, '%s/html' % bucket)
    +  html_render = main_template.render(  #  pylint: disable=no-member
    +      {
    +        'tb_values': [suite_table_values, test_table_values],
    +        'feedback_url': feedback_url(result_details_link),
    +      })
    +  return (html_render, dest, result_details_link)
    +
    +
    +def result_details(json_path, test_name, cs_base_url, bucket=None,
    +                   builder_name=None, build_number=None, local_output=False):
    +  """Get result details from json path and then convert results to html.
    +
    +  Args:
    +    local_output: Whether this results file is uploaded to Google Storage or
    +        just a local file.
    +  """
    +
    +  with open(json_path) as json_file:
    +    json_object = json.loads(json_file.read())
    +
    +  if not 'per_iteration_data' in json_object:
    +    return 'Error: json file missing per_iteration_data.'
    +
    +  results_dict = collections.defaultdict(list)
    +  for testsuite_run in json_object['per_iteration_data']:
    +    for test, test_runs in testsuite_run.items():
    +      results_dict[test].extend(test_runs)
    +  return results_to_html(results_dict, cs_base_url, bucket, test_name,
    +                         builder_name, build_number, local_output)
    +
    +
    +def upload_to_google_bucket(html, bucket, dest):
    +  with tempfile.NamedTemporaryFile(suffix='.html') as temp_file:
    +    temp_file.write(html)
    +    temp_file.flush()
    +    return google_storage_helper.upload(
    +        name=dest,
    +        filepath=temp_file.name,
    +        bucket='%s/html' % bucket,
    +        content_type='text/html',
    +        authenticated_link=True)
    +
    +
    +def ui_screenshot_set(json_path):
    +  with open(json_path) as json_file:
    +    json_object = json.loads(json_file.read())
    +  if not 'per_iteration_data' in json_object:
    +    # This will be reported as an error by result_details, no need to duplicate.
    +    return None
    +  ui_screenshots = []
    +  # pylint: disable=too-many-nested-blocks
    +  for testsuite_run in json_object['per_iteration_data']:
    +    for _, test_runs in testsuite_run.items():
    +      for test_run in test_runs:
    +        if 'ui screenshot' in test_run['links']:
    +          screenshot_link = test_run['links']['ui screenshot']
    +          if screenshot_link.startswith('file:'):
    +            with contextlib.closing(urlopen(screenshot_link)) as f:
    +              test_screenshots = json.load(f)
    +          else:
    +            # Assume anything that isn't a file link is a google storage link
    +            screenshot_string = google_storage_helper.read_from_link(
    +                screenshot_link)
    +            if not screenshot_string:
    +              logging.error('Bad screenshot link %s', screenshot_link)
    +              continue
    +            test_screenshots = json.loads(
    +                screenshot_string)
    +          ui_screenshots.extend(test_screenshots)
    +  # pylint: enable=too-many-nested-blocks
    +
    +  if ui_screenshots:
    +    return json.dumps(ui_screenshots)
    +  return None
    +
    +
    +def upload_screenshot_set(json_path, test_name, bucket, builder_name,
    +                          build_number):
    +  screenshot_set = ui_screenshot_set(json_path)
    +  if not screenshot_set:
    +    return None
    +  dest = google_storage_helper.unique_name(
    +    'screenshots_%s_%s_%s' % (test_name, builder_name, build_number),
    +    suffix='.json')
    +  with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as temp_file:
    +    temp_file.write(screenshot_set)
    +    temp_file.flush()
    +    return google_storage_helper.upload(
    +        name=dest,
    +        filepath=temp_file.name,
    +        bucket='%s/json' % bucket,
    +        content_type='application/json',
    +        authenticated_link=True)
    +
    +
    +def main():
    +  parser = argparse.ArgumentParser()
    +  parser.add_argument('--json-file', help='Path of json file.')
    +  parser.add_argument('--cs-base-url', help='Base url for code search.',
    +                      default='http://cs.chromium.org')
    +  parser.add_argument('--bucket', help='Google storage bucket.', required=True)
    +  parser.add_argument('--builder-name', help='Builder name.')
    +  parser.add_argument('--build-number', help='Build number.')
    +  parser.add_argument('--test-name', help='The name of the test.',
    +                      required=True)
    +  parser.add_argument(
    +      '-o', '--output-json',
    +      help='(Swarming Merge Script API) '
    +           'Output JSON file to create.')
    +  parser.add_argument(
    +      '--build-properties',
    +      help='(Swarming Merge Script API) '
    +           'Build property JSON file provided by recipes.')
    +  parser.add_argument(
    +      '--summary-json',
    +      help='(Swarming Merge Script API) '
    +           'Summary of shard state running on swarming. '
    +           '(Output of the swarming.py collect '
    +           '--task-summary-json=XXX command.)')
    +  parser.add_argument(
    +      '--task-output-dir',
    +      help='(Swarming Merge Script API) '
    +           'Directory containing all swarming task results.')
    +  parser.add_argument(
    +      'positional', nargs='*',
    +      help='output.json from shards.')
    +
    +  args = parser.parse_args()
    +
    +  if ((args.build_properties is None) ==
    +         (args.build_number is None or args.builder_name is None)):
    +    raise parser.error('Exactly one of build_perperties or '
    +                       '(build_number or builder_name) should be given.')
    +
    +  if (args.build_number is None) != (args.builder_name is None):
    +    raise parser.error('args.build_number and args.builder_name '
    +                       'has to be be given together'
    +                       'or not given at all.')
    +
    +  if len(args.positional) == 0 and args.json_file is None:
    +    if args.output_json:
    +      with open(args.output_json, 'w') as f:
    +        json.dump({}, f)
    +    return
    +  if len(args.positional) != 0 and args.json_file:
    +    raise parser.error('Exactly one of args.positional and '
    +                       'args.json_file should be given.')
    +
    +  if args.build_properties:
    +    build_properties = json.loads(args.build_properties)
    +    if ((not 'buildnumber' in build_properties) or
    +        (not 'buildername' in build_properties)):
    +      raise parser.error('Build number/builder name not specified.')
    +    build_number = build_properties['buildnumber']
    +    builder_name = build_properties['buildername']
    +  elif args.build_number and args.builder_name:
    +    build_number = args.build_number
    +    builder_name = args.builder_name
    +
    +  if args.positional:
    +    if len(args.positional) == 1:
    +      json_file = args.positional[0]
    +    else:
    +      if args.output_json and args.summary_json:
    +        standard_gtest_merge.standard_gtest_merge(
    +            args.output_json, args.summary_json, args.positional)
    +        json_file = args.output_json
    +      elif not args.output_json:
    +        raise Exception('output_json required by merge API is missing.')
    +      else:
    +        raise Exception('summary_json required by merge API is missing.')
    +  elif args.json_file:
    +    json_file = args.json_file
    +
    +  if not os.path.exists(json_file):
    +    raise IOError('--json-file %s not found.' % json_file)
    +
    +  # Link to result details presentation page is a part of the page.
    +  result_html_string, dest, result_details_link = result_details(
    +      json_file, args.test_name, args.cs_base_url, args.bucket,
    +      builder_name, build_number)
    +
    +  result_details_link_2 = upload_to_google_bucket(
    +      result_html_string.encode('UTF-8'),
    +      args.bucket, dest)
    +  assert result_details_link == result_details_link_2, (
    +      'Result details link do not match. The link returned by get_url_link'
    +      ' should be the same as that returned by upload.')
    +
    +  ui_screenshot_set_link = upload_screenshot_set(json_file, args.test_name,
    +      args.bucket, builder_name, build_number)
    +
    +  if ui_screenshot_set_link:
    +    ui_catalog_url = 'https://chrome-ui-catalog.appspot.com/'
    +    ui_catalog_query = urlencode({'screenshot_source': ui_screenshot_set_link})
    +    ui_screenshot_link = '%s?%s' % (ui_catalog_url, ui_catalog_query)
    +
    +  if args.output_json:
    +    with open(json_file) as original_json_file:
    +      json_object = json.load(original_json_file)
    +      json_object['links'] = {
    +          'result_details (logcats, flakiness links)': result_details_link
    +      }
    +
    +      if ui_screenshot_set_link:
    +        json_object['links']['ui screenshots'] = ui_screenshot_link
    +
    +      with open(args.output_json, 'w') as f:
    +        json.dump(json_object, f)
    +  else:
    +    print('Result Details: %s' % result_details_link)
    +
    +    if ui_screenshot_set_link:
    +      print('UI Screenshots %s' % ui_screenshot_link)
    +
    +
    +if __name__ == '__main__':
    +  sys.exit(main())
    diff --git a/android/pylib/results/presentation/test_results_presentation.pydeps b/android/pylib/results/presentation/test_results_presentation.pydeps
    new file mode 100644
    index 000000000000..031e1793bf52
    --- /dev/null
    +++ b/android/pylib/results/presentation/test_results_presentation.pydeps
    @@ -0,0 +1,46 @@
    +# Generated by running:
    +#   build/print_python_deps.py --root build/android/pylib/results/presentation --output build/android/pylib/results/presentation/test_results_presentation.pydeps build/android/pylib/results/presentation/test_results_presentation.py
    +../../../../../third_party/catapult/devil/devil/__init__.py
    +../../../../../third_party/catapult/devil/devil/android/__init__.py
    +../../../../../third_party/catapult/devil/devil/android/constants/__init__.py
    +../../../../../third_party/catapult/devil/devil/android/constants/chrome.py
    +../../../../../third_party/catapult/devil/devil/android/sdk/__init__.py
    +../../../../../third_party/catapult/devil/devil/android/sdk/keyevent.py
    +../../../../../third_party/catapult/devil/devil/android/sdk/version_codes.py
    +../../../../../third_party/catapult/devil/devil/base_error.py
    +../../../../../third_party/catapult/devil/devil/constants/__init__.py
    +../../../../../third_party/catapult/devil/devil/constants/exit_codes.py
    +../../../../../third_party/catapult/devil/devil/utils/__init__.py
    +../../../../../third_party/catapult/devil/devil/utils/cmd_helper.py
    +../../../../../third_party/jinja2/__init__.py
    +../../../../../third_party/jinja2/_identifier.py
    +../../../../../third_party/jinja2/async_utils.py
    +../../../../../third_party/jinja2/bccache.py
    +../../../../../third_party/jinja2/compiler.py
    +../../../../../third_party/jinja2/defaults.py
    +../../../../../third_party/jinja2/environment.py
    +../../../../../third_party/jinja2/exceptions.py
    +../../../../../third_party/jinja2/filters.py
    +../../../../../third_party/jinja2/idtracking.py
    +../../../../../third_party/jinja2/lexer.py
    +../../../../../third_party/jinja2/loaders.py
    +../../../../../third_party/jinja2/nodes.py
    +../../../../../third_party/jinja2/optimizer.py
    +../../../../../third_party/jinja2/parser.py
    +../../../../../third_party/jinja2/runtime.py
    +../../../../../third_party/jinja2/tests.py
    +../../../../../third_party/jinja2/utils.py
    +../../../../../third_party/jinja2/visitor.py
    +../../../../../third_party/markupsafe/__init__.py
    +../../../../../third_party/markupsafe/_compat.py
    +../../../../../third_party/markupsafe/_native.py
    +../../__init__.py
    +../../constants/__init__.py
    +../../constants/host_paths.py
    +../../utils/__init__.py
    +../../utils/decorators.py
    +../../utils/google_storage_helper.py
    +../__init__.py
    +__init__.py
    +standard_gtest_merge.py
    +test_results_presentation.py
    diff --git a/android/pylib/results/report_results.py b/android/pylib/results/report_results.py
    new file mode 100644
    index 000000000000..de19860bd7f3
    --- /dev/null
    +++ b/android/pylib/results/report_results.py
    @@ -0,0 +1,135 @@
    +# Copyright 2013 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Module containing utility functions for reporting results."""
    +
    +
    +import logging
    +import os
    +import re
    +
    +from pylib import constants
    +from pylib.results.flakiness_dashboard import results_uploader
    +from pylib.utils import logging_utils
    +
    +
    +def _LogToFile(results, test_type, suite_name):
    +  """Log results to local files which can be used for aggregation later."""
    +  log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
    +  if not os.path.exists(log_file_path):
    +    os.mkdir(log_file_path)
    +  full_file_name = os.path.join(
    +      log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
    +  if not os.path.exists(full_file_name):
    +    with open(full_file_name, 'w') as log_file:
    +      print(
    +          '\n%s results for %s build %s:' %
    +          (test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
    +           os.environ.get('BUILDBOT_BUILDNUMBER')),
    +          file=log_file)
    +    logging.info('Writing results to %s.', full_file_name)
    +
    +  logging.info('Writing results to %s.', full_file_name)
    +  with open(full_file_name, 'a') as log_file:
    +    shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
    +    print(
    +        '%s%s' % (shortened_suite_name.ljust(30), results.GetShortForm()),
    +        file=log_file)
    +
    +
    +def _LogToFlakinessDashboard(results, test_type, test_package,
    +                             flakiness_server):
    +  """Upload results to the flakiness dashboard"""
    +  logging.info('Upload results for test type "%s", test package "%s" to %s',
    +               test_type, test_package, flakiness_server)
    +
    +  try:
    +    # TODO(jbudorick): remove Instrumentation once instrumentation tests
    +    # switch to platform mode.
    +    if test_type in ('instrumentation', 'Instrumentation'):
    +      if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
    +        assert test_package in ['ContentShellTest',
    +                                'ChromePublicTest',
    +                                'ChromeSyncShellTest',
    +                                'SystemWebViewShellLayoutTest',
    +                                'WebViewInstrumentationTest']
    +        dashboard_test_type = ('%s_instrumentation_tests' %
    +                               test_package.lower().rstrip('test'))
    +      # Downstream server.
    +      else:
    +        dashboard_test_type = 'Chromium_Android_Instrumentation'
    +
    +    elif test_type == 'gtest':
    +      dashboard_test_type = test_package
    +
    +    else:
    +      logging.warning('Invalid test type')
    +      return
    +
    +    results_uploader.Upload(
    +        results, flakiness_server, dashboard_test_type)
    +
    +  except Exception: # pylint: disable=broad-except
    +    logging.exception('Failure while logging to %s', flakiness_server)
    +
    +
    +def LogFull(results, test_type, test_package, annotation=None,
    +            flakiness_server=None):
    +  """Log the tests results for the test suite.
    +
    +  The results will be logged three different ways:
    +    1. Log to stdout.
    +    2. Log to local files for aggregating multiple test steps
    +       (on buildbots only).
    +    3. Log to flakiness dashboard (on buildbots only).
    +
    +  Args:
    +    results: An instance of TestRunResults object.
    +    test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
    +    test_package: Test package name (e.g. 'ipc_tests' for gtests,
    +                  'ContentShellTest' for instrumentation tests)
    +    annotation: If instrumenation test type, this is a list of annotations
    +                (e.g. ['Feature', 'SmallTest']).
    +    flakiness_server: If provider, upload the results to flakiness dashboard
    +                      with this URL.
    +    """
    +  # pylint doesn't like how colorama set up its color enums.
    +  # pylint: disable=no-member
    +  black_on_white = (logging_utils.BACK.WHITE, logging_utils.FORE.BLACK)
    +  with logging_utils.OverrideColor(logging.CRITICAL, black_on_white):
    +    if not results.DidRunPass():
    +      logging.critical('*' * 80)
    +      logging.critical('Detailed Logs')
    +      logging.critical('*' * 80)
    +      for line in results.GetLogs().splitlines():
    +        logging.critical(line)
    +    logging.critical('*' * 80)
    +    logging.critical('Summary')
    +    logging.critical('*' * 80)
    +    # Assign uniform color, depending on presence of 'FAILED' over lines.
    +    if any('FAILED' in line for line in results.GetGtestForm().splitlines()):
    +      # Red on white, dim.
    +      color = (logging_utils.BACK.WHITE, logging_utils.FORE.RED,
    +               logging_utils.STYLE.DIM)
    +    else:
    +      # Green on white, dim.
    +      color = (logging_utils.BACK.WHITE, logging_utils.FORE.GREEN,
    +               logging_utils.STYLE.DIM)
    +    with logging_utils.OverrideColor(logging.CRITICAL, color):
    +      for line in results.GetGtestForm().splitlines():
    +        logging.critical(line)
    +    logging.critical('*' * 80)
    +
    +  if os.environ.get('BUILDBOT_BUILDERNAME'):
    +    # It is possible to have multiple buildbot steps for the same
    +    # instrumenation test package using different annotations.
    +    if annotation and len(annotation) == 1:
    +      suite_name = annotation[0]
    +    else:
    +      suite_name = test_package
    +    _LogToFile(results, test_type, suite_name)
    +
    +    if flakiness_server:
    +      _LogToFlakinessDashboard(results, test_type, test_package,
    +                               flakiness_server)
    diff --git a/android/pylib/symbols/__init__.py b/android/pylib/symbols/__init__.py
    new file mode 100644
    index 000000000000..e69de29bb2d1
    diff --git a/android/pylib/symbols/deobfuscator.py b/android/pylib/symbols/deobfuscator.py
    new file mode 100644
    index 000000000000..710609854f2b
    --- /dev/null
    +++ b/android/pylib/symbols/deobfuscator.py
    @@ -0,0 +1,50 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import os
    +
    +from pylib import constants
    +from .expensive_line_transformer import ExpensiveLineTransformer
    +from .expensive_line_transformer import ExpensiveLineTransformerPool
    +
    +_MINIMUM_TIMEOUT = 10.0
    +_PER_LINE_TIMEOUT = .005  # Should be able to process 200 lines per second.
    +_PROCESS_START_TIMEOUT = 20.0
    +_MAX_RESTARTS = 4  # Should be plenty unless tool is crashing on start-up.
    +_POOL_SIZE = 4
    +_PASSTHROUH_ON_FAILURE = False
    +
    +
    +class Deobfuscator(ExpensiveLineTransformer):
    +  def __init__(self, mapping_path):
    +    super().__init__(_PROCESS_START_TIMEOUT, _MINIMUM_TIMEOUT,
    +                     _PER_LINE_TIMEOUT)
    +    script_path = os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android',
    +                               'stacktrace', 'java_deobfuscate.py')
    +    self._command = [script_path, mapping_path]
    +    self.start()
    +
    +  @property
    +  def name(self):
    +    return "deobfuscator"
    +
    +  @property
    +  def command(self):
    +    return self._command
    +
    +
    +class DeobfuscatorPool(ExpensiveLineTransformerPool):
    +  def __init__(self, mapping_path):
    +    # As of Sep 2017, each instance requires about 500MB of RAM, as measured by:
    +    # /usr/bin/time -v build/android/stacktrace/java_deobfuscate.py \
    +    #     out/Release/apks/ChromePublic.apk.mapping
    +    self.mapping_path = mapping_path
    +    super().__init__(_MAX_RESTARTS, _POOL_SIZE, _PASSTHROUH_ON_FAILURE)
    +
    +  @property
    +  def name(self):
    +    return "deobfuscator-pool"
    +
    +  def CreateTransformer(self):
    +    return Deobfuscator(self.mapping_path)
    diff --git a/android/pylib/symbols/expensive_line_transformer.py b/android/pylib/symbols/expensive_line_transformer.py
    new file mode 100644
    index 000000000000..08cbe52a673e
    --- /dev/null
    +++ b/android/pylib/symbols/expensive_line_transformer.py
    @@ -0,0 +1,233 @@
    +# Copyright 2023 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +from abc import ABC, abstractmethod
    +import logging
    +import subprocess
    +import threading
    +import time
    +import uuid
    +
    +from devil.utils import reraiser_thread
    +
    +
    +class ExpensiveLineTransformer(ABC):
    +  def __init__(self, process_start_timeout, minimum_timeout, per_line_timeout):
    +    self._process_start_timeout = process_start_timeout
    +    self._minimum_timeout = minimum_timeout
    +    self._per_line_timeout = per_line_timeout
    +    self._started = False
    +    # Allow only one thread to call TransformLines() at a time.
    +    self._lock = threading.Lock()
    +    # Ensure that only one thread attempts to kill self._proc in Close().
    +    self._close_lock = threading.Lock()
    +    self._closed_called = False
    +    # Assign to None so that attribute exists if Popen() throws.
    +    self._proc = None
    +    # Start process eagerly to hide start-up latency.
    +    self._proc_start_time = None
    +
    +  def start(self):
    +    # delay the start of the process, to allow the initialization of the
    +    # descendant classes first.
    +    if self._started:
    +      logging.error('%s: Trying to start an already started command', self.name)
    +      return
    +
    +    # Start process eagerly to hide start-up latency.
    +    self._proc_start_time = time.time()
    +
    +    if not self.command:
    +      logging.error('%s: No command available', self.name)
    +      return
    +
    +    self._proc = subprocess.Popen(self.command,
    +                                  bufsize=1,
    +                                  stdin=subprocess.PIPE,
    +                                  stdout=subprocess.PIPE,
    +                                  universal_newlines=True,
    +                                  close_fds=True)
    +    self._started = True
    +
    +  def IsClosed(self):
    +    return (not self._started or self._closed_called
    +            or self._proc.returncode is not None)
    +
    +  def IsBusy(self):
    +    return self._lock.locked()
    +
    +  def IsReady(self):
    +    return self._started and not self.IsClosed() and not self.IsBusy()
    +
    +  def TransformLines(self, lines):
    +    """Symbolizes names found in the given lines.
    +
    +    If anything goes wrong (process crashes, timeout, etc), returns |lines|.
    +
    +    Args:
    +      lines: A list of strings without trailing newlines.
    +
    +    Returns:
    +      A list of strings without trailing newlines.
    +    """
    +    if not lines:
    +      return []
    +
    +    # symbolized output contain more lines than the input, as the symbolized
    +    # stacktraces will be added. To account for the extra output lines, keep
    +    # reading until this eof_line token is reached. Using a format that will
    +    # be considered a "useful line" without modifying its output by
    +    # third_party/android_platform/development/scripts/stack_core.py
    +    eof_line = self.getEofLine()
    +    out_lines = []
    +
    +    def _reader():
    +      while True:
    +        line = self._proc.stdout.readline()
    +        # Return an empty string at EOF (when stdin is closed).
    +        if not line:
    +          break
    +        line = line[:-1]
    +        if line == eof_line:
    +          break
    +        out_lines.append(line)
    +
    +    if self.IsBusy():
    +      logging.warning('%s: Having to wait for transformation.', self.name)
    +
    +    # Allow only one thread to operate at a time.
    +    with self._lock:
    +      if self.IsClosed():
    +        if self._started and not self._closed_called:
    +          logging.warning('%s: Process exited with code=%d.', self.name,
    +                          self._proc.returncode)
    +          self.Close()
    +        return lines
    +
    +      reader_thread = reraiser_thread.ReraiserThread(_reader)
    +      reader_thread.start()
    +
    +      try:
    +        self._proc.stdin.write('\n'.join(lines))
    +        self._proc.stdin.write('\n{}\n'.format(eof_line))
    +        self._proc.stdin.flush()
    +        time_since_proc_start = time.time() - self._proc_start_time
    +        timeout = (max(0, self._process_start_timeout - time_since_proc_start) +
    +                   max(self._minimum_timeout,
    +                       len(lines) * self._per_line_timeout))
    +        reader_thread.join(timeout)
    +        if self.IsClosed():
    +          logging.warning('%s: Close() called by another thread during join().',
    +                          self.name)
    +          return lines
    +        if reader_thread.is_alive():
    +          logging.error('%s: Timed out after %f seconds with input:', self.name,
    +                        timeout)
    +          for l in lines:
    +            logging.error(l)
    +          logging.error(eof_line)
    +          logging.error('%s: End of timed out input.', self.name)
    +          logging.error('%s: Timed out output was:', self.name)
    +          for l in out_lines:
    +            logging.error(l)
    +          logging.error('%s: End of timed out output.', self.name)
    +          self.Close()
    +          return lines
    +        return out_lines
    +      except IOError:
    +        logging.exception('%s: Exception during transformation', self.name)
    +        self.Close()
    +        return lines
    +
    +  def Close(self):
    +    with self._close_lock:
    +      needs_closing = not self.IsClosed()
    +      self._closed_called = True
    +
    +    if needs_closing:
    +      self._proc.stdin.close()
    +      self._proc.kill()
    +      self._proc.wait()
    +
    +  def __del__(self):
    +    # self._proc is None when Popen() fails.
    +    if not self._closed_called and self._proc:
    +      logging.error('%s: Forgot to Close()', self.name)
    +      self.Close()
    +
    +  @property
    +  @abstractmethod
    +  def name(self):
    +    ...
    +
    +  @property
    +  @abstractmethod
    +  def command(self):
    +    ...
    +
    +  @staticmethod
    +  def getEofLine():
    +    # Use a format that will be considered a "useful line" without modifying its
    +    # output by third_party/android_platform/development/scripts/stack_core.py
    +    return "Generic useful log header: \'{}\'".format(uuid.uuid4().hex)
    +
    +
    +class ExpensiveLineTransformerPool(ABC):
    +  def __init__(self, max_restarts, pool_size, passthrough_on_failure):
    +    self._max_restarts = max_restarts
    +    self._pool = [self.CreateTransformer() for _ in range(pool_size)]
    +    self._passthrough_on_failure = passthrough_on_failure
    +    # Allow only one thread to select from the pool at a time.
    +    self._lock = threading.Lock()
    +    self._num_restarts = 0
    +
    +  def __enter__(self):
    +    pass
    +
    +  def __exit__(self, *args):
    +    self.Close()
    +
    +  def TransformLines(self, lines):
    +    with self._lock:
    +      assert self._pool, 'TransformLines() called on a closed Pool.'
    +
    +      # transformation is broken.
    +      if self._num_restarts == self._max_restarts:
    +        if self._passthrough_on_failure:
    +          return lines
    +        raise Exception('%s is broken.' % self.name)
    +
    +      # Restart any closed transformer.
    +      for i, d in enumerate(self._pool):
    +        if d.IsClosed():
    +          logging.warning('%s: Restarting closed instance.', self.name)
    +          self._pool[i] = self.CreateTransformer()
    +          self._num_restarts += 1
    +          if self._num_restarts == self._max_restarts:
    +            logging.warning('%s: MAX_RESTARTS reached.', self.name)
    +            if self._passthrough_on_failure:
    +              return lines
    +            raise Exception('%s is broken.' % self.name)
    +
    +      selected = next((x for x in self._pool if x.IsReady()), self._pool[0])
    +      # Rotate the order so that next caller will not choose the same one.
    +      self._pool.remove(selected)
    +      self._pool.append(selected)
    +
    +    return selected.TransformLines(lines)
    +
    +  def Close(self):
    +    with self._lock:
    +      for d in self._pool:
    +        d.Close()
    +      self._pool = None
    +
    +  @abstractmethod
    +  def CreateTransformer(self):
    +    ...
    +
    +  @property
    +  @abstractmethod
    +  def name(self):
    +    ...
    diff --git a/android/pylib/symbols/mock_addr2line/__init__.py b/android/pylib/symbols/mock_addr2line/__init__.py
    new file mode 100644
    index 000000000000..e69de29bb2d1
    diff --git a/android/pylib/symbols/mock_addr2line/mock_addr2line b/android/pylib/symbols/mock_addr2line/mock_addr2line
    new file mode 100755
    index 000000000000..431f387f8f7c
    --- /dev/null
    +++ b/android/pylib/symbols/mock_addr2line/mock_addr2line
    @@ -0,0 +1,80 @@
    +#!/usr/bin/env python3
    +# Copyright 2014 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Simple mock for addr2line.
    +
    +Outputs mock symbol information, with each symbol being a function of the
    +original address (so it is easy to double-check consistency in unittests).
    +"""
    +
    +
    +import optparse
    +import os
    +import posixpath
    +import sys
    +import time
    +
    +
    +def main(argv):
    +  parser = optparse.OptionParser()
    +  parser.add_option('-e', '--exe', dest='exe')  # Path of the debug-library.so.
    +  # Silently swallow the other unnecessary arguments.
    +  parser.add_option('-C', '--demangle', action='store_true')
    +  parser.add_option('-f', '--functions', action='store_true')
    +  parser.add_option('-i', '--inlines', action='store_true')
    +  options, _ = parser.parse_args(argv[1:])
    +  lib_file_name = posixpath.basename(options.exe)
    +  processed_sym_count = 0
    +  crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
    +  hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
    +
    +  while(True):
    +    line = sys.stdin.readline().rstrip('\r')
    +    if not line:
    +      break
    +
    +    # An empty line should generate '??,??:0' (is used as marker for inlines).
    +    if line == '\n':
    +      print('??')
    +      print('??:0')
    +      sys.stdout.flush()
    +      continue
    +
    +    addr = int(line, 16)
    +    processed_sym_count += 1
    +    if crash_every and processed_sym_count % crash_every == 0:
    +      sys.exit(1)
    +    if hang_every and processed_sym_count % hang_every == 0:
    +      time.sleep(1)
    +
    +    # Addresses < 1M will return good mock symbol information.
    +    if addr < 1024 * 1024:
    +      print('mock_sym_for_addr_%d' % addr)
    +      print('mock_src/%s.c:%d' % (lib_file_name, addr))
    +
    +    # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
    +    elif addr < 2 * 1024 * 1024:
    +      print('mock_sym_for_addr_%d' % addr)
    +      print('??:0')
    +
    +    # Addresses 2M <= x < 3M will return unknown symbol information.
    +    elif addr < 3 * 1024 * 1024:
    +      print('??')
    +      print('??')
    +
    +    # Addresses 3M <= x < 4M will return inlines.
    +    elif addr < 4 * 1024 * 1024:
    +      print('mock_sym_for_addr_%d_inner' % addr)
    +      print('mock_src/%s.c:%d' % (lib_file_name, addr))
    +      print('mock_sym_for_addr_%d_middle' % addr)
    +      print('mock_src/%s.c:%d' % (lib_file_name, addr))
    +      print('mock_sym_for_addr_%d_outer' % addr)
    +      print('mock_src/%s.c:%d' % (lib_file_name, addr))
    +
    +    sys.stdout.flush()
    +
    +
    +if __name__ == '__main__':
    +  main(sys.argv)
    \ No newline at end of file
    diff --git a/android/pylib/symbols/stack_symbolizer.py b/android/pylib/symbols/stack_symbolizer.py
    new file mode 100644
    index 000000000000..e3203bfca501
    --- /dev/null
    +++ b/android/pylib/symbols/stack_symbolizer.py
    @@ -0,0 +1,137 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import logging
    +import os
    +import re
    +import tempfile
    +import time
    +
    +from devil.utils import cmd_helper
    +from pylib import constants
    +from pylib.constants import host_paths
    +from .expensive_line_transformer import ExpensiveLineTransformer
    +from .expensive_line_transformer import ExpensiveLineTransformerPool
    +
    +_STACK_TOOL = os.path.join(host_paths.ANDROID_PLATFORM_DEVELOPMENT_SCRIPTS_PATH,
    +                           'stack')
    +_MINIMUM_TIMEOUT = 10.0
    +_PER_LINE_TIMEOUT = .005  # Should be able to process 200 lines per second.
    +_PROCESS_START_TIMEOUT = 20.0
    +_MAX_RESTARTS = 4  # Should be plenty unless tool is crashing on start-up.
    +_POOL_SIZE = 1
    +_PASSTHROUH_ON_FAILURE = True
    +ABI_REG = re.compile('ABI: \'(.+?)\'')
    +
    +
    +def _DeviceAbiToArch(device_abi):
    +  # The order of this list is significant to find the more specific match
    +  # (e.g., arm64) before the less specific (e.g., arm).
    +  arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
    +  for arch in arches:
    +    if arch in device_abi:
    +      return arch
    +  raise RuntimeError('Unknown device ABI: %s' % device_abi)
    +
    +
    +class Symbolizer:
    +  """A helper class to symbolize stack."""
    +
    +  def __init__(self, apk_under_test=None):
    +    self._apk_under_test = apk_under_test
    +    self._time_spent_symbolizing = 0
    +
    +
    +  def __del__(self):
    +    self.CleanUp()
    +
    +
    +  def CleanUp(self):
    +    """Clean up the temporary directory of apk libs."""
    +    if self._time_spent_symbolizing > 0:
    +      logging.info(
    +          'Total time spent symbolizing: %.2fs', self._time_spent_symbolizing)
    +
    +
    +  def ExtractAndResolveNativeStackTraces(self, data_to_symbolize,
    +                                         device_abi, include_stack=True):
    +    """Run the stack tool for given input.
    +
    +    Args:
    +      data_to_symbolize: a list of strings to symbolize.
    +      include_stack: boolean whether to include stack data in output.
    +      device_abi: the default ABI of the device which generated the tombstone.
    +
    +    Yields:
    +      A string for each line of resolved stack output.
    +    """
    +    if not os.path.exists(_STACK_TOOL):
    +      logging.warning('%s missing. Unable to resolve native stack traces.',
    +                      _STACK_TOOL)
    +      return
    +
    +    arch = _DeviceAbiToArch(device_abi)
    +    if not arch:
    +      logging.warning('No device_abi can be found.')
    +      return
    +
    +    cmd = [_STACK_TOOL, '--arch', arch, '--output-directory',
    +           constants.GetOutDirectory(), '--more-info']
    +    env = dict(os.environ)
    +    env['PYTHONDONTWRITEBYTECODE'] = '1'
    +    with tempfile.NamedTemporaryFile(mode='w') as f:
    +      f.write('\n'.join(data_to_symbolize))
    +      f.flush()
    +      start = time.time()
    +      try:
    +        _, output = cmd_helper.GetCmdStatusAndOutput(cmd + [f.name], env=env)
    +      finally:
    +        self._time_spent_symbolizing += time.time() - start
    +    for line in output.splitlines():
    +      if not include_stack and 'Stack Data:' in line:
    +        break
    +      yield line
    +
    +
    +class PassThroughSymbolizer(ExpensiveLineTransformer):
    +  def __init__(self, device_abi):
    +    self._command = None
    +    super().__init__(_PROCESS_START_TIMEOUT, _MINIMUM_TIMEOUT,
    +                     _PER_LINE_TIMEOUT)
    +    if not os.path.exists(_STACK_TOOL):
    +      logging.warning('%s: %s missing. Unable to resolve native stack traces.',
    +                      PassThroughSymbolizer.name, _STACK_TOOL)
    +      return
    +    arch = _DeviceAbiToArch(device_abi)
    +    if not arch:
    +      logging.warning('%s: No device_abi can be found.',
    +                      PassThroughSymbolizer.name)
    +      return
    +    self._command = [
    +        _STACK_TOOL, '--arch', arch, '--output-directory',
    +        constants.GetOutDirectory(), '--more-info', '--pass-through', '--flush',
    +        '--quiet', '-'
    +    ]
    +    self.start()
    +
    +  @property
    +  def name(self):
    +    return "symbolizer"
    +
    +  @property
    +  def command(self):
    +    return self._command
    +
    +
    +class PassThroughSymbolizerPool(ExpensiveLineTransformerPool):
    +  def __init__(self, device_abi):
    +    self._device_abi = device_abi
    +    super().__init__(_MAX_RESTARTS, _POOL_SIZE, _PASSTHROUH_ON_FAILURE)
    +
    +  def CreateTransformer(self):
    +    return PassThroughSymbolizer(self._device_abi)
    +
    +  @property
    +  def name(self):
    +    return "symbolizer-pool"
    diff --git a/android/pylib/utils/__init__.py b/android/pylib/utils/__init__.py
    new file mode 100644
    index 000000000000..e69de29bb2d1
    diff --git a/android/pylib/utils/app_bundle_utils.py b/android/pylib/utils/app_bundle_utils.py
    new file mode 100644
    index 000000000000..9a52d852358c
    --- /dev/null
    +++ b/android/pylib/utils/app_bundle_utils.py
    @@ -0,0 +1,195 @@
    +# Copyright 2018 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import json
    +import logging
    +import os
    +import pathlib
    +import re
    +import shutil
    +import sys
    +import zipfile
    +
    +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
    +
    +from util import build_utils
    +from util import md5_check
    +from util import resource_utils
    +import bundletool
    +
    +# "system_apks" is "default", but with locale list and compressed dex.
    +_SYSTEM_MODES = ('system', 'system_apks')
    +BUILD_APKS_MODES = _SYSTEM_MODES + ('default', 'universal')
    +OPTIMIZE_FOR_OPTIONS = ('ABI', 'SCREEN_DENSITY', 'LANGUAGE',
    +                        'TEXTURE_COMPRESSION_FORMAT')
    +
    +_ALL_ABIS = ['armeabi-v7a', 'arm64-v8a', 'x86', 'x86_64']
    +
    +
    +def _BundleMinSdkVersion(bundle_path):
    +  manifest_data = bundletool.RunBundleTool(
    +      ['dump', 'manifest', '--bundle', bundle_path])
    +  return int(re.search(r'minSdkVersion.*?(\d+)', manifest_data).group(1))
    +
    +
    +def _CreateDeviceSpec(bundle_path, sdk_version, locales):
    +  if not sdk_version:
    +    sdk_version = _BundleMinSdkVersion(bundle_path)
    +
    +  # Setting sdkVersion=minSdkVersion prevents multiple per-minSdkVersion .apk
    +  # files from being created within the .apks file.
    +  return {
    +      'screenDensity': 1000,  # Ignored since we don't split on density.
    +      'sdkVersion': sdk_version,
    +      'supportedAbis': _ALL_ABIS,  # Our .aab files are already split on abi.
    +      'supportedLocales': locales,
    +  }
    +
    +
    +def _FixBundleDexCompressionGlob(src_bundle, dst_bundle):
    +  # Modifies the BundleConfig.pb of the given .aab to add "classes*.dex" to the
    +  # "uncompressedGlob" list.
    +  with zipfile.ZipFile(src_bundle) as src, \
    +      zipfile.ZipFile(dst_bundle, 'w') as dst:
    +    for info in src.infolist():
    +      data = src.read(info)
    +      if info.filename == 'BundleConfig.pb':
    +        # A classesX.dex entry is added by create_app_bundle.py so that we can
    +        # modify it here in order to have it take effect. b/176198991
    +        data = data.replace(b'classesX.dex', b'classes*.dex')
    +      dst.writestr(info, data)
    +
    +
    +def GenerateBundleApks(bundle_path,
    +                       bundle_apks_path,
    +                       aapt2_path,
    +                       keystore_path,
    +                       keystore_password,
    +                       keystore_alias,
    +                       mode=None,
    +                       local_testing=False,
    +                       minimal=False,
    +                       minimal_sdk_version=None,
    +                       check_for_noop=True,
    +                       system_image_locales=None,
    +                       optimize_for=None):
    +  """Generate an .apks archive from a an app bundle if needed.
    +
    +  Args:
    +    bundle_path: Input bundle file path.
    +    bundle_apks_path: Output bundle .apks archive path. Name must end with
    +      '.apks' or this operation will fail.
    +    aapt2_path: Path to aapt2 build tool.
    +    keystore_path: Path to keystore.
    +    keystore_password: Keystore password, as a string.
    +    keystore_alias: Keystore signing key alias.
    +    mode: Build mode, which must be either None or one of BUILD_APKS_MODES.
    +    minimal: Create the minimal set of apks possible (english-only).
    +    minimal_sdk_version: Use this sdkVersion when |minimal| or
    +      |system_image_locales| args are present.
    +    check_for_noop: Use md5_check to short-circuit when inputs have not changed.
    +    system_image_locales: Locales to package in the APK when mode is "system"
    +      or "system_compressed".
    +    optimize_for: Overrides split configuration, which must be None or
    +      one of OPTIMIZE_FOR_OPTIONS.
    +  """
    +  device_spec = None
    +  if minimal_sdk_version:
    +    assert minimal or system_image_locales, (
    +        'minimal_sdk_version is only used when minimal or system_image_locales '
    +        'is specified')
    +  if minimal:
    +    # Measure with one language split installed. Use Hindi because it is
    +    # popular. resource_size.py looks for splits/base-hi.apk.
    +    # Note: English is always included since it's in base-master.apk.
    +    device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, ['hi'])
    +  elif mode in _SYSTEM_MODES:
    +    if not system_image_locales:
    +      raise Exception('system modes require system_image_locales')
    +    # Bundletool doesn't seem to understand device specs with locales in the
    +    # form of "-r", so just provide the language code instead.
    +    locales = [
    +        resource_utils.ToAndroidLocaleName(l).split('-')[0]
    +        for l in system_image_locales
    +    ]
    +    device_spec = _CreateDeviceSpec(bundle_path, minimal_sdk_version, locales)
    +
    +  def rebuild():
    +    logging.info('Building %s', bundle_apks_path)
    +    with build_utils.TempDir() as tmp_dir:
    +      tmp_apks_file = os.path.join(tmp_dir, 'output.apks')
    +      cmd_args = [
    +          'build-apks',
    +          '--aapt2=%s' % aapt2_path,
    +          '--output=%s' % tmp_apks_file,
    +          '--ks=%s' % keystore_path,
    +          '--ks-pass=pass:%s' % keystore_password,
    +          '--ks-key-alias=%s' % keystore_alias,
    +          '--overwrite',
    +      ]
    +      input_bundle_path = bundle_path
    +      # Work around bundletool not respecting uncompressDexFiles setting.
    +      # b/176198991
    +      if mode not in _SYSTEM_MODES and _BundleMinSdkVersion(bundle_path) >= 27:
    +        input_bundle_path = os.path.join(tmp_dir, 'system.aab')
    +        _FixBundleDexCompressionGlob(bundle_path, input_bundle_path)
    +
    +      cmd_args += ['--bundle=%s' % input_bundle_path]
    +
    +      if local_testing:
    +        cmd_args += ['--local-testing']
    +
    +      if mode is not None:
    +        if mode not in BUILD_APKS_MODES:
    +          raise Exception('Invalid mode parameter %s (should be in %s)' %
    +                          (mode, BUILD_APKS_MODES))
    +        if mode != 'system_apks':
    +          cmd_args += ['--mode=' + mode]
    +        else:
    +          # Specify --optimize-for to prevent language splits being created.
    +          cmd_args += ['--optimize-for=device_tier']
    +
    +      if optimize_for:
    +        if optimize_for not in OPTIMIZE_FOR_OPTIONS:
    +          raise Exception('Invalid optimize_for parameter %s '
    +                          '(should be in %s)' %
    +                          (mode, OPTIMIZE_FOR_OPTIONS))
    +        cmd_args += ['--optimize-for=' + optimize_for]
    +
    +      if device_spec:
    +        data = json.dumps(device_spec)
    +        logging.debug('Device Spec: %s', data)
    +        spec_file = pathlib.Path(tmp_dir) / 'device.json'
    +        spec_file.write_text(data)
    +        cmd_args += ['--device-spec=' + str(spec_file)]
    +
    +      bundletool.RunBundleTool(cmd_args)
    +
    +      shutil.move(tmp_apks_file, bundle_apks_path)
    +
    +  if check_for_noop:
    +    input_paths = [
    +        bundle_path,
    +        bundletool.BUNDLETOOL_JAR_PATH,
    +        aapt2_path,
    +        keystore_path,
    +    ]
    +    input_strings = [
    +        keystore_password,
    +        keystore_alias,
    +        device_spec,
    +    ]
    +    if mode is not None:
    +      input_strings.append(mode)
    +
    +    # Avoid rebuilding (saves ~20s) when the input files have not changed. This
    +    # is essential when calling the apk_operations.py script multiple times with
    +    # the same bundle (e.g. out/Debug/bin/monochrome_public_bundle run).
    +    md5_check.CallAndRecordIfStale(
    +        rebuild,
    +        input_paths=input_paths,
    +        input_strings=input_strings,
    +        output_paths=[bundle_apks_path])
    +  else:
    +    rebuild()
    diff --git a/android/pylib/utils/argparse_utils.py b/android/pylib/utils/argparse_utils.py
    new file mode 100644
    index 000000000000..698be786310e
    --- /dev/null
    +++ b/android/pylib/utils/argparse_utils.py
    @@ -0,0 +1,52 @@
    +# Copyright 2015 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +
    +
    +import argparse
    +
    +
    +class CustomHelpAction(argparse.Action):
    +  '''Allows defining custom help actions.
    +
    +  Help actions can run even when the parser would otherwise fail on missing
    +  arguments. The first help or custom help command mentioned on the command
    +  line will have its help text displayed.
    +
    +  Usage:
    +      parser = argparse.ArgumentParser(...)
    +      CustomHelpAction.EnableFor(parser)
    +      parser.add_argument('--foo-help',
    +                          action='custom_help',
    +                          custom_help_text='this is the help message',
    +                          help='What this helps with')
    +  '''
    +  # Derived from argparse._HelpAction from
    +  # https://github.com/python/cpython/blob/main/Lib/argparse.py
    +
    +  # pylint: disable=redefined-builtin
    +  # (complains about 'help' being redefined)
    +  def __init__(self,
    +               option_strings,
    +               dest=argparse.SUPPRESS,
    +               default=argparse.SUPPRESS,
    +               custom_help_text=None,
    +               help=None):
    +    super().__init__(option_strings=option_strings,
    +                     dest=dest,
    +                     default=default,
    +                     nargs=0,
    +                     help=help)
    +
    +    if not custom_help_text:
    +      raise ValueError('custom_help_text is required')
    +    self._help_text = custom_help_text
    +
    +  def __call__(self, parser, namespace, values, option_string=None):
    +    print(self._help_text)
    +    parser.exit()
    +
    +  @staticmethod
    +  def EnableFor(parser):
    +    parser.register('action', 'custom_help', CustomHelpAction)
    diff --git a/android/pylib/utils/chrome_proxy_utils.py b/android/pylib/utils/chrome_proxy_utils.py
    new file mode 100644
    index 000000000000..14960f41f797
    --- /dev/null
    +++ b/android/pylib/utils/chrome_proxy_utils.py
    @@ -0,0 +1,171 @@
    +# Copyright 2020 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +"""Utilities for setting up and tear down WPR and TsProxy service."""
    +
    +from py_utils import ts_proxy_server
    +from py_utils import webpagereplay_go_server
    +
    +from devil.android import forwarder
    +
    +PROXY_HOST_IP = '127.0.0.1'
    +# From Catapult/WebPageReplay document.
    +IGNORE_CERT_ERROR_SPKI_LIST = 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I='
    +PROXY_SERVER = 'socks5://localhost'
    +DEFAULT_DEVICE_PORT = 1080
    +DEFAULT_ROUND_TRIP_LATENCY_MS = 100
    +DEFAULT_DOWNLOAD_BANDWIDTH_KBPS = 72000
    +DEFAULT_UPLOAD_BANDWIDTH_KBPS = 72000
    +
    +
    +class WPRServer:
    +  """Utils to set up a webpagereplay_go_server instance."""
    +
    +  def __init__(self):
    +    self._archive_path = None
    +    self._host_http_port = 0
    +    self._host_https_port = 0
    +    self._record_mode = False
    +    self._server = None
    +
    +  def StartServer(self, wpr_archive_path):
    +    """Starts a webpagereplay_go_server instance."""
    +    if wpr_archive_path == self._archive_path and self._server:
    +      # Reuse existing webpagereplay_go_server instance.
    +      return
    +
    +    if self._server:
    +      self.StopServer()
    +
    +    replay_options = []
    +    if self._record_mode:
    +      replay_options.append('--record')
    +
    +    ports = {}
    +    if not self._server:
    +      self._server = webpagereplay_go_server.ReplayServer(
    +          wpr_archive_path,
    +          PROXY_HOST_IP,
    +          http_port=self._host_http_port,
    +          https_port=self._host_https_port,
    +          replay_options=replay_options)
    +      self._archive_path = wpr_archive_path
    +      ports = self._server.StartServer()
    +
    +    self._host_http_port = ports['http']
    +    self._host_https_port = ports['https']
    +
    +  def StopServer(self):
    +    """Stops the webpagereplay_go_server instance and resets archive."""
    +    self._server.StopServer()
    +    self._server = None
    +    self._host_http_port = 0
    +    self._host_https_port = 0
    +
    +  @staticmethod
    +  def SetServerBinaryPath(go_binary_path):
    +    """Sets the go_binary_path for webpagereplay_go_server.ReplayServer."""
    +    webpagereplay_go_server.ReplayServer.SetGoBinaryPath(go_binary_path)
    +
    +  @property
    +  def record_mode(self):
    +    return self._record_mode
    +
    +  @record_mode.setter
    +  def record_mode(self, value):
    +    self._record_mode = value
    +
    +  @property
    +  def http_port(self):
    +    return self._host_http_port
    +
    +  @property
    +  def https_port(self):
    +    return self._host_https_port
    +
    +  @property
    +  def archive_path(self):
    +    return self._archive_path
    +
    +
    +class ChromeProxySession:
    +  """Utils to help set up a Chrome Proxy."""
    +
    +  def __init__(self, device_proxy_port=DEFAULT_DEVICE_PORT):
    +    self._device_proxy_port = device_proxy_port
    +    self._ts_proxy_server = ts_proxy_server.TsProxyServer(PROXY_HOST_IP)
    +    self._wpr_server = WPRServer()
    +
    +  @property
    +  def wpr_record_mode(self):
    +    """Returns whether this proxy session was running in record mode."""
    +    return self._wpr_server.record_mode
    +
    +  @wpr_record_mode.setter
    +  def wpr_record_mode(self, value):
    +    self._wpr_server.record_mode = value
    +
    +  @property
    +  def wpr_replay_mode(self):
    +    """Returns whether this proxy session was running in replay mode."""
    +    return not self._wpr_server.record_mode
    +
    +  @property
    +  def wpr_archive_path(self):
    +    """Returns the wpr archive file path used in this proxy session."""
    +    return self._wpr_server.archive_path
    +
    +  @property
    +  def device_proxy_port(self):
    +    return self._device_proxy_port
    +
    +  def GetFlags(self):
    +    """Gets the chrome command line flags to be needed by ChromeProxySession."""
    +    extra_flags = []
    +
    +    extra_flags.append('--ignore-certificate-errors-spki-list=%s' %
    +                       IGNORE_CERT_ERROR_SPKI_LIST)
    +    extra_flags.append('--proxy-server=%s:%s' %
    +                       (PROXY_SERVER, self._device_proxy_port))
    +    return extra_flags
    +
    +  @staticmethod
    +  def SetWPRServerBinary(go_binary_path):
    +    """Sets the WPR server go_binary_path."""
    +    WPRServer.SetServerBinaryPath(go_binary_path)
    +
    +  def Start(self, device, wpr_archive_path):
    +    """Starts the wpr_server as well as the ts_proxy server and setups env.
    +
    +    Args:
    +      device: A DeviceUtils instance.
    +      wpr_archive_path: A abs path to the wpr archive file.
    +
    +    """
    +    self._wpr_server.StartServer(wpr_archive_path)
    +    self._ts_proxy_server.StartServer()
    +
    +    # Maps device port to host port
    +    forwarder.Forwarder.Map(
    +        [(self._device_proxy_port, self._ts_proxy_server.port)], device)
    +    # Maps tsProxy port to wpr http/https ports
    +    self._ts_proxy_server.UpdateOutboundPorts(
    +        http_port=self._wpr_server.http_port,
    +        https_port=self._wpr_server.https_port)
    +    self._ts_proxy_server.UpdateTrafficSettings(
    +        round_trip_latency_ms=DEFAULT_ROUND_TRIP_LATENCY_MS,
    +        download_bandwidth_kbps=DEFAULT_DOWNLOAD_BANDWIDTH_KBPS,
    +        upload_bandwidth_kbps=DEFAULT_UPLOAD_BANDWIDTH_KBPS)
    +
    +  def Stop(self, device):
    +    """Stops the wpr_server, and ts_proxy server and tears down env.
    +
    +    Note that Stop does not reset wpr_record_mode, wpr_replay_mode,
    +    wpr_archive_path property.
    +
    +    Args:
    +      device: A DeviceUtils instance.
    +    """
    +    self._wpr_server.StopServer()
    +    self._ts_proxy_server.StopServer()
    +    forwarder.Forwarder.UnmapDevicePort(self._device_proxy_port, device)
    diff --git a/android/pylib/utils/chrome_proxy_utils_test.py b/android/pylib/utils/chrome_proxy_utils_test.py
    new file mode 100755
    index 000000000000..2b8981204333
    --- /dev/null
    +++ b/android/pylib/utils/chrome_proxy_utils_test.py
    @@ -0,0 +1,235 @@
    +#!/usr/bin/env vpython3
    +# Copyright 2020 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +"""Tests for chrome_proxy_utils."""
    +
    +#pylint: disable=protected-access
    +
    +import os
    +import unittest
    +
    +from pylib.utils import chrome_proxy_utils
    +
    +from devil.android import forwarder
    +from devil.android import device_utils
    +from devil.android.sdk import adb_wrapper
    +from py_utils import ts_proxy_server
    +from py_utils import webpagereplay_go_server
    +
    +import mock  # pylint: disable=import-error
    +
    +
    +def _DeviceUtilsMock(test_serial, is_ready=True):
    +  """Returns a DeviceUtils instance based on given serial."""
    +  adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
    +  adb.__str__ = mock.Mock(return_value=test_serial)
    +  adb.GetDeviceSerial.return_value = test_serial
    +  adb.is_ready = is_ready
    +  return device_utils.DeviceUtils(adb)
    +
    +
    +class ChromeProxySessionTest(unittest.TestCase):
    +  """Unittest for ChromeProxySession."""
    +
    +  #pylint: disable=no-self-use
    +
    +  @mock.patch.object(forwarder.Forwarder, 'Map')
    +  @mock.patch.object(chrome_proxy_utils.WPRServer, 'StartServer')
    +  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StartServer')
    +  @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateOutboundPorts')
    +  @mock.patch.object(ts_proxy_server.TsProxyServer, 'UpdateTrafficSettings')
    +  @mock.patch('py_utils.ts_proxy_server.TsProxyServer.port',
    +              new_callable=mock.PropertyMock)
    +  def test_Start(self, port_mock, traffic_setting_mock, outboundport_mock,
    +                 start_server_mock, wpr_mock, forwarder_mock):
    +    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
    +    chrome_proxy._wpr_server._host_http_port = 1
    +    chrome_proxy._wpr_server._host_https_port = 2
    +    port_mock.return_value = 3
    +    device = _DeviceUtilsMock('01234')
    +    chrome_proxy.Start(device, 'abc')
    +
    +    forwarder_mock.assert_called_once_with([(4, 3)], device)
    +    wpr_mock.assert_called_once_with('abc')
    +    start_server_mock.assert_called_once()
    +    outboundport_mock.assert_called_once_with(http_port=1, https_port=2)
    +    traffic_setting_mock.assert_called_once_with(download_bandwidth_kbps=72000,
    +                                                 round_trip_latency_ms=100,
    +                                                 upload_bandwidth_kbps=72000)
    +    port_mock.assert_called_once()
    +
    +  @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
    +  @mock.patch.object(chrome_proxy_utils.WPRServer, 'StopServer')
    +  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
    +  def test_Stop(self, ts_proxy_mock, wpr_mock, forwarder_mock):
    +    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
    +    device = _DeviceUtilsMock('01234')
    +    chrome_proxy.wpr_record_mode = True
    +    chrome_proxy._wpr_server._archive_path = 'abc'
    +    chrome_proxy.Stop(device)
    +
    +    forwarder_mock.assert_called_once_with(4, device)
    +    wpr_mock.assert_called_once_with()
    +    ts_proxy_mock.assert_called_once_with()
    +
    +  #pylint: enable=no-self-use
    +
    +  @mock.patch.object(forwarder.Forwarder, 'UnmapDevicePort')
    +  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
    +  @mock.patch.object(ts_proxy_server.TsProxyServer, 'StopServer')
    +  def test_Stop_WithProperties(self, ts_proxy_mock, wpr_mock, forwarder_mock):
    +    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
    +    chrome_proxy._wpr_server._server = webpagereplay_go_server.ReplayServer(
    +        os.path.abspath(__file__), chrome_proxy_utils.PROXY_HOST_IP, 0, 0, [])
    +    chrome_proxy._wpr_server._archive_path = os.path.abspath(__file__)
    +    device = _DeviceUtilsMock('01234')
    +    chrome_proxy.wpr_record_mode = True
    +    chrome_proxy.Stop(device)
    +
    +    forwarder_mock.assert_called_once_with(4, device)
    +    wpr_mock.assert_called_once_with()
    +    ts_proxy_mock.assert_called_once_with()
    +    self.assertFalse(chrome_proxy.wpr_replay_mode)
    +    self.assertEqual(chrome_proxy.wpr_archive_path, os.path.abspath(__file__))
    +
    +  def test_SetWPRRecordMode(self):
    +    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
    +    chrome_proxy.wpr_record_mode = True
    +    self.assertTrue(chrome_proxy._wpr_server.record_mode)
    +    self.assertTrue(chrome_proxy.wpr_record_mode)
    +    self.assertFalse(chrome_proxy.wpr_replay_mode)
    +
    +    chrome_proxy.wpr_record_mode = False
    +    self.assertFalse(chrome_proxy._wpr_server.record_mode)
    +    self.assertFalse(chrome_proxy.wpr_record_mode)
    +    self.assertTrue(chrome_proxy.wpr_replay_mode)
    +
    +  def test_SetWPRArchivePath(self):
    +    chrome_proxy = chrome_proxy_utils.ChromeProxySession(4)
    +    chrome_proxy._wpr_server._archive_path = 'abc'
    +    self.assertEqual(chrome_proxy.wpr_archive_path, 'abc')
    +
    +  def test_UseDefaultDeviceProxyPort(self):
    +    chrome_proxy = chrome_proxy_utils.ChromeProxySession()
    +    expected_flags = [
    +        '--ignore-certificate-errors-spki-list='
    +        'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
    +        '--proxy-server=socks5://localhost:1080'
    +    ]
    +    self.assertEqual(chrome_proxy.device_proxy_port, 1080)
    +    self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
    +
    +  def test_UseNewDeviceProxyPort(self):
    +    chrome_proxy = chrome_proxy_utils.ChromeProxySession(1)
    +    expected_flags = [
    +        '--ignore-certificate-errors-spki-list='
    +        'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=',
    +        '--proxy-server=socks5://localhost:1'
    +    ]
    +    self.assertEqual(chrome_proxy.device_proxy_port, 1)
    +    self.assertListEqual(chrome_proxy.GetFlags(), expected_flags)
    +
    +
    +class WPRServerTest(unittest.TestCase):
    +  @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
    +  def test_StartSever_fresh_replaymode(self, wpr_mock):
    +    wpr_server = chrome_proxy_utils.WPRServer()
    +    wpr_archive_file = os.path.abspath(__file__)
    +    wpr_server.StartServer(wpr_archive_file)
    +
    +    wpr_mock.assert_called_once_with(wpr_archive_file,
    +                                     '127.0.0.1',
    +                                     http_port=0,
    +                                     https_port=0,
    +                                     replay_options=[])
    +
    +    self.assertEqual(wpr_server._archive_path, wpr_archive_file)
    +    self.assertTrue(wpr_server._server)
    +
    +  @mock.patch('py_utils.webpagereplay_go_server.ReplayServer')
    +  def test_StartSever_fresh_recordmode(self, wpr_mock):
    +    wpr_server = chrome_proxy_utils.WPRServer()
    +    wpr_server.record_mode = True
    +    wpr_server.StartServer(os.path.abspath(__file__))
    +    wpr_archive_file = os.path.abspath(__file__)
    +
    +    wpr_mock.assert_called_once_with(wpr_archive_file,
    +                                     '127.0.0.1',
    +                                     http_port=0,
    +                                     https_port=0,
    +                                     replay_options=['--record'])
    +
    +    self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
    +    self.assertTrue(wpr_server._server)
    +
    +  #pylint: disable=no-self-use
    +
    +  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
    +  def test_StartSever_recordmode(self, start_server_mock):
    +    wpr_server = chrome_proxy_utils.WPRServer()
    +    start_server_mock.return_value = {'http': 1, 'https': 2}
    +    wpr_server.StartServer(os.path.abspath(__file__))
    +
    +    start_server_mock.assert_called_once()
    +    self.assertEqual(wpr_server._host_http_port, 1)
    +    self.assertEqual(wpr_server._host_https_port, 2)
    +    self.assertEqual(wpr_server._archive_path, os.path.abspath(__file__))
    +    self.assertTrue(wpr_server._server)
    +
    +  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
    +  def test_StartSever_reuseServer(self, start_server_mock):
    +    wpr_server = chrome_proxy_utils.WPRServer()
    +    wpr_server._server = webpagereplay_go_server.ReplayServer(
    +        os.path.abspath(__file__),
    +        chrome_proxy_utils.PROXY_HOST_IP,
    +        http_port=0,
    +        https_port=0,
    +        replay_options=[])
    +    wpr_server._archive_path = os.path.abspath(__file__)
    +    wpr_server.StartServer(os.path.abspath(__file__))
    +    start_server_mock.assert_not_called()
    +
    +  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StartServer')
    +  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
    +  def test_StartSever_notReuseServer(self, stop_server_mock, start_server_mock):
    +    wpr_server = chrome_proxy_utils.WPRServer()
    +    wpr_server._server = webpagereplay_go_server.ReplayServer(
    +        os.path.abspath(__file__),
    +        chrome_proxy_utils.PROXY_HOST_IP,
    +        http_port=0,
    +        https_port=0,
    +        replay_options=[])
    +    wpr_server._archive_path = ''
    +    wpr_server.StartServer(os.path.abspath(__file__))
    +    start_server_mock.assert_called_once()
    +    stop_server_mock.assert_called_once()
    +
    +  #pylint: enable=no-self-use
    +
    +  @mock.patch.object(webpagereplay_go_server.ReplayServer, 'StopServer')
    +  def test_StopServer(self, stop_server_mock):
    +    wpr_server = chrome_proxy_utils.WPRServer()
    +    wpr_server._server = webpagereplay_go_server.ReplayServer(
    +        os.path.abspath(__file__),
    +        chrome_proxy_utils.PROXY_HOST_IP,
    +        http_port=0,
    +        https_port=0,
    +        replay_options=[])
    +    wpr_server.StopServer()
    +    stop_server_mock.assert_called_once()
    +    self.assertFalse(wpr_server._server)
    +    self.assertFalse(wpr_server._archive_path)
    +    self.assertFalse(wpr_server.http_port)
    +    self.assertFalse(wpr_server.https_port)
    +
    +  def test_SetWPRRecordMode(self):
    +    wpr_server = chrome_proxy_utils.WPRServer()
    +    wpr_server.record_mode = True
    +    self.assertTrue(wpr_server.record_mode)
    +    wpr_server.record_mode = False
    +    self.assertFalse(wpr_server.record_mode)
    +
    +
    +if __name__ == '__main__':
    +  unittest.main(verbosity=2)
    diff --git a/android/pylib/utils/decorators.py b/android/pylib/utils/decorators.py
    new file mode 100644
    index 000000000000..0cef420b3764
    --- /dev/null
    +++ b/android/pylib/utils/decorators.py
    @@ -0,0 +1,37 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import functools
    +import logging
    +
    +
    +def Memoize(f):
    +  """Decorator to cache return values of function."""
    +  memoize_dict = {}
    +  @functools.wraps(f)
    +  def wrapper(*args, **kwargs):
    +    key = repr((args, kwargs))
    +    if key not in memoize_dict:
    +      memoize_dict[key] = f(*args, **kwargs)
    +    return memoize_dict[key]
    +  return wrapper
    +
    +
    +def NoRaiseException(default_return_value=None, exception_message=''):
    +  """Returns decorator that catches and logs uncaught Exceptions.
    +
    +  Args:
    +    default_return_value: Value to return in the case of uncaught Exception.
    +    exception_message: Message for uncaught exceptions.
    +  """
    +  def decorator(f):
    +    @functools.wraps(f)
    +    def wrapper(*args, **kwargs):
    +      try:
    +        return f(*args, **kwargs)
    +      except Exception:  # pylint: disable=broad-except
    +        logging.exception(exception_message)
    +        return default_return_value
    +    return wrapper
    +  return decorator
    diff --git a/android/pylib/utils/decorators_test.py b/android/pylib/utils/decorators_test.py
    new file mode 100755
    index 000000000000..f8d9075916ab
    --- /dev/null
    +++ b/android/pylib/utils/decorators_test.py
    @@ -0,0 +1,104 @@
    +#!/usr/bin/env vpython3
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Unit tests for decorators.py."""
    +
    +import unittest
    +
    +from pylib.utils import decorators
    +
    +
    +class NoRaiseExceptionDecoratorTest(unittest.TestCase):
    +
    +  def testFunctionDoesNotRaiseException(self):
    +    """Tests that the |NoRaiseException| decorator catches exception."""
    +
    +    @decorators.NoRaiseException()
    +    def raiseException():
    +      raise Exception()
    +
    +    try:
    +      raiseException()
    +    except Exception:  # pylint: disable=broad-except
    +      self.fail('Exception was not caught by |NoRaiseException| decorator')
    +
    +  def testFunctionReturnsCorrectValues(self):
    +    """Tests that the |NoRaiseException| decorator returns correct values."""
    +
    +    @decorators.NoRaiseException(default_return_value=111)
    +    def raiseException():
    +      raise Exception()
    +
    +    @decorators.NoRaiseException(default_return_value=111)
    +    def doesNotRaiseException():
    +      return 999
    +
    +    self.assertEqual(raiseException(), 111)
    +    self.assertEqual(doesNotRaiseException(), 999)
    +
    +
    +class MemoizeDecoratorTest(unittest.TestCase):
    +
    +  def testFunctionExceptionNotMemoized(self):
    +    """Tests that |Memoize| decorator does not cache exception results."""
    +
    +    class ExceptionType1(Exception):
    +      pass
    +
    +    class ExceptionType2(Exception):
    +      pass
    +
    +    @decorators.Memoize
    +    def raiseExceptions():
    +      if raiseExceptions.count == 0:
    +        raiseExceptions.count += 1
    +        raise ExceptionType1()
    +
    +      if raiseExceptions.count == 1:
    +        raise ExceptionType2()
    +    raiseExceptions.count = 0
    +
    +    with self.assertRaises(ExceptionType1):
    +      raiseExceptions()
    +    with self.assertRaises(ExceptionType2):
    +      raiseExceptions()
    +
    +  def testFunctionResultMemoized(self):
    +    """Tests that |Memoize| decorator caches results."""
    +
    +    @decorators.Memoize
    +    def memoized():
    +      memoized.count += 1
    +      return memoized.count
    +    memoized.count = 0
    +
    +    def notMemoized():
    +      notMemoized.count += 1
    +      return notMemoized.count
    +    notMemoized.count = 0
    +
    +    self.assertEqual(memoized(), 1)
    +    self.assertEqual(memoized(), 1)
    +    self.assertEqual(memoized(), 1)
    +
    +    self.assertEqual(notMemoized(), 1)
    +    self.assertEqual(notMemoized(), 2)
    +    self.assertEqual(notMemoized(), 3)
    +
    +  def testFunctionMemoizedBasedOnArgs(self):
    +    """Tests that |Memoize| caches results based on args and kwargs."""
    +
    +    @decorators.Memoize
    +    def returnValueBasedOnArgsKwargs(a, k=0):
    +      return a + k
    +
    +    self.assertEqual(returnValueBasedOnArgsKwargs(1, 1), 2)
    +    self.assertEqual(returnValueBasedOnArgsKwargs(1, 2), 3)
    +    self.assertEqual(returnValueBasedOnArgsKwargs(2, 1), 3)
    +    self.assertEqual(returnValueBasedOnArgsKwargs(3, 3), 6)
    +
    +
    +if __name__ == '__main__':
    +  unittest.main(verbosity=2)
    diff --git a/android/pylib/utils/device_dependencies.py b/android/pylib/utils/device_dependencies.py
    new file mode 100644
    index 000000000000..5f3f1edb6f02
    --- /dev/null
    +++ b/android/pylib/utils/device_dependencies.py
    @@ -0,0 +1,145 @@
    +# Copyright 2016 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import os
    +import re
    +
    +from pylib import constants
    +
    +_EXCLUSIONS = [
    +    # Misc files that exist to document directories
    +    re.compile(r'.*METADATA'),
    +    re.compile(r'.*OWNERS'),
    +    re.compile(r'.*\.md'),
    +    re.compile(r'.*\.crx'),  # Chrome extension zip files.
    +    re.compile(r'.*/\.git.*'),  # Any '.git*' directories/files.
    +    re.compile(r'.*\.so'),  # Libraries packed into .apk.
    +    re.compile(r'.*Mojo.*manifest\.json'),  # Some source_set()s pull these in.
    +    re.compile(r'.*\.py'),  # Some test_support targets include python deps.
    +    re.compile(r'.*\.apk'),  # Should be installed separately.
    +    re.compile(r'.*\.jar'),  # Never need java intermediates.
    +    re.compile(r'.*\.crx'),  # Used by download_from_google_storage.
    +    re.compile(r'.*lib.java/.*'),  # Never need java intermediates.
    +
    +    # Test filter files:
    +    re.compile(r'.*/testing/buildbot/filters/.*'),
    +
    +    # Chrome external extensions config file.
    +    re.compile(r'.*external_extensions\.json'),
    +
    +    # Exists just to test the compile, not to be run.
    +    re.compile(r'.*jni_generator_tests'),
    +
    +    # v8's blobs and icu data get packaged into APKs.
    +    re.compile(r'.*snapshot_blob.*\.bin'),
    +    re.compile(r'.*icudtl\.bin'),
    +
    +    # Scripts that are needed by swarming, but not on devices:
    +    re.compile(r'.*llvm-symbolizer'),
    +    re.compile(r'.*md5sum_(?:bin|dist)'),
    +    re.compile(r'.*/development/scripts/stack'),
    +    re.compile(r'.*/build/android/pylib/symbols'),
    +    re.compile(r'.*/build/android/stacktrace'),
    +
    +    # Required for java deobfuscation on the host:
    +    re.compile(r'.*build/android/stacktrace/.*'),
    +    re.compile(r'.*third_party/jdk/.*'),
    +    re.compile(r'.*third_party/proguard/.*'),
    +
    +    # Our tests don't need these.
    +    re.compile(r'.*/devtools-frontend/src/front_end/.*'),
    +
    +    # Build artifacts:
    +    re.compile(r'.*\.stamp'),
    +    re.compile(r'.*\.pak\.info'),
    +    re.compile(r'.*\.build_config.json'),
    +    re.compile(r'.*\.incremental\.json'),
    +]
    +
    +
    +def _FilterDataDeps(abs_host_files):
    +  exclusions = _EXCLUSIONS + [
    +      re.compile(os.path.join(constants.GetOutDirectory(), 'bin'))
    +  ]
    +  return [p for p in abs_host_files if not any(r.match(p) for r in exclusions)]
    +
    +
    +def DevicePathComponentsFor(host_path, output_directory):
    +  """Returns the device path components for a given host path.
    +
    +  This returns the device path as a list of joinable path components,
    +  with None as the first element to indicate that the path should be
    +  rooted at $EXTERNAL_STORAGE.
    +
    +  e.g., given
    +
    +    '$RUNTIME_DEPS_ROOT_DIR/foo/bar/baz.txt'
    +
    +  this would return
    +
    +    [None, 'foo', 'bar', 'baz.txt']
    +
    +  This handles a couple classes of paths differently than it otherwise would:
    +    - All .pak files get mapped to top-level paks/
    +    - All other dependencies get mapped to the top level directory
    +        - If a file is not in the output directory then it's relative path to
    +          the output directory will start with .. strings, so we remove those
    +          and then the path gets mapped to the top-level directory
    +        - If a file is in the output directory then the relative path to the
    +          output directory gets mapped to the top-level directory
    +
    +  e.g. given
    +
    +    '$RUNTIME_DEPS_ROOT_DIR/out/Release/icu_fake_dir/icudtl.dat'
    +
    +  this would return
    +
    +    [None, 'icu_fake_dir', 'icudtl.dat']
    +
    +  Args:
    +    host_path: The absolute path to the host file.
    +  Returns:
    +    A list of device path components.
    +  """
    +  if (host_path.startswith(output_directory) and
    +      os.path.splitext(host_path)[1] == '.pak'):
    +    return [None, 'paks', os.path.basename(host_path)]
    +
    +  rel_host_path = os.path.relpath(host_path, output_directory)
    +
    +  device_path_components = [None]
    +  p = rel_host_path
    +  while p:
    +    p, d = os.path.split(p)
    +    # The relative path from the output directory to a file under the runtime
    +    # deps root directory may start with multiple .. strings, so they need to
    +    # be skipped.
    +    if d and d != os.pardir:
    +      device_path_components.insert(1, d)
    +  return device_path_components
    +
    +
    +def GetDataDependencies(runtime_deps_path):
    +  """Returns a list of device data dependencies.
    +
    +  Args:
    +    runtime_deps_path: A str path to the .runtime_deps file.
    +  Returns:
    +    A list of (host_path, device_path) tuples.
    +  """
    +  if not runtime_deps_path:
    +    return []
    +
    +  with open(runtime_deps_path, 'r') as runtime_deps_file:
    +    rel_host_files = [l.strip() for l in runtime_deps_file if l]
    +
    +  output_directory = constants.GetOutDirectory()
    +  abs_host_files = [
    +      os.path.abspath(os.path.join(output_directory, r))
    +      for r in rel_host_files]
    +  filtered_abs_host_files = _FilterDataDeps(abs_host_files)
    +  # TODO(crbug.com/752610): Filter out host executables, and investigate
    +  # whether other files could be filtered as well.
    +  return [(f, DevicePathComponentsFor(f, output_directory))
    +          for f in filtered_abs_host_files]
    diff --git a/android/pylib/utils/device_dependencies_test.py b/android/pylib/utils/device_dependencies_test.py
    new file mode 100755
    index 000000000000..2ff937ee6c5c
    --- /dev/null
    +++ b/android/pylib/utils/device_dependencies_test.py
    @@ -0,0 +1,52 @@
    +#! /usr/bin/env vpython3
    +# Copyright 2016 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import os
    +import unittest
    +
    +from pylib import constants
    +from pylib.utils import device_dependencies
    +
    +
    +class DevicePathComponentsForTest(unittest.TestCase):
    +
    +  def testCheckedInFile(self):
    +    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt')
    +    output_directory = os.path.join(
    +        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
    +    self.assertEqual([None, 'foo', 'bar', 'baz.txt'],
    +                     device_dependencies.DevicePathComponentsFor(
    +                         test_path, output_directory))
    +
    +  def testOutputDirectoryFile(self):
    +    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
    +                             'icudtl.dat')
    +    output_directory = os.path.join(
    +        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
    +    self.assertEqual([None, 'icudtl.dat'],
    +                     device_dependencies.DevicePathComponentsFor(
    +                         test_path, output_directory))
    +
    +  def testOutputDirectorySubdirFile(self):
    +    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
    +                             'test_dir', 'icudtl.dat')
    +    output_directory = os.path.join(
    +        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
    +    self.assertEqual([None, 'test_dir', 'icudtl.dat'],
    +                     device_dependencies.DevicePathComponentsFor(
    +                         test_path, output_directory))
    +
    +  def testOutputDirectoryPakFile(self):
    +    test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'out-foo', 'Release',
    +                             'foo.pak')
    +    output_directory = os.path.join(
    +        constants.DIR_SOURCE_ROOT, 'out-foo', 'Release')
    +    self.assertEqual([None, 'paks', 'foo.pak'],
    +                     device_dependencies.DevicePathComponentsFor(
    +                         test_path, output_directory))
    +
    +
    +if __name__ == '__main__':
    +  unittest.main()
    diff --git a/android/pylib/utils/dexdump.py b/android/pylib/utils/dexdump.py
    new file mode 100644
    index 000000000000..0913aad47129
    --- /dev/null
    +++ b/android/pylib/utils/dexdump.py
    @@ -0,0 +1,313 @@
    +# Copyright 2016 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import os
    +import re
    +import shutil
    +import sys
    +import tempfile
    +from xml.etree import ElementTree
    +from collections import namedtuple
    +from typing import Dict
    +
    +from devil.utils import cmd_helper
    +from pylib import constants
    +
    +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'gyp'))
    +from util import build_utils
    +
    +DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
    +
    +
    +# Annotations dict format:
    +#   {
    +#     'empty-annotation-class-name': None,
    +#     'annotation-class-name': {
    +#       'fieldA': 'primitive-value',
    +#       'fieldB': [ 'array-item-1', 'array-item-2', ... ],
    +#       'fieldC': {  # CURRENTLY UNSUPPORTED.
    +#         /* Object value */
    +#         'field': 'primitive-value',
    +#         'field': [ 'array-item-1', 'array-item-2', ... ],
    +#         'field': { /* Object value */ }
    +#       }
    +#     }
    +#   }
    +Annotations = namedtuple('Annotations',
    +                         ['classAnnotations', 'methodsAnnotations'])
    +
    +# Finds each space-separated "foo=..." (where ... can contain spaces).
    +_ANNOTATION_VALUE_MATCHER = re.compile(r'\w+=.*?(?:$|(?= \w+=))')
    +
    +
    +def Dump(apk_path):
    +  """Dumps class and method information from a APK into a dict via dexdump.
    +
    +  Args:
    +    apk_path: An absolute path to an APK file to dump.
    +  Returns:
    +    A dict in the following format:
    +      {
    +        : {
    +          'classes': {
    +            : {
    +              'methods': [, ],
    +              'superclass': ,
    +              'is_abstract': ,
    +              'annotations': 
    +            }
    +          }
    +        }
    +      }
    +  """
    +  try:
    +    dexfile_dir = tempfile.mkdtemp()
    +    parsed_dex_files = []
    +    for dex_file in build_utils.ExtractAll(apk_path,
    +                                           dexfile_dir,
    +                                           pattern='*classes*.dex'):
    +      output_xml = cmd_helper.GetCmdOutput(
    +          [DEXDUMP_PATH, '-a', '-j', '-l', 'xml', dex_file])
    +      # Dexdump doesn't escape its XML output very well; decode it as utf-8 with
    +      # invalid sequences replaced, then remove forbidden characters and
    +      # re-encode it (as etree expects a byte string as input so it can figure
    +      # out the encoding itself from the XML declaration)
    +      BAD_XML_CHARS = re.compile(
    +          u'[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' +
    +          u'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]')
    +
    +      # Line duplicated to avoid pylint redefined-variable-type error.
    +      clean_xml = BAD_XML_CHARS.sub(u'\ufffd', output_xml)
    +
    +      # Constructors are referenced as "" in our annotations
    +      # which will result in in the ElementTree failing to parse
    +      # our xml as it won't find a closing tag for this
    +      clean_xml = clean_xml.replace('', 'constructor')
    +
    +      annotations = _ParseAnnotations(clean_xml)
    +
    +      parsed_dex_files.append(
    +          _ParseRootNode(ElementTree.fromstring(clean_xml.encode('utf-8')),
    +                         annotations))
    +    return parsed_dex_files
    +  finally:
    +    shutil.rmtree(dexfile_dir)
    +
    +
    +def _ParseAnnotationValues(values_str):
    +  if not values_str:
    +    return None
    +  ret = {}
    +  for key_value in _ANNOTATION_VALUE_MATCHER.findall(values_str):
    +    key, value_str = key_value.split('=', 1)
    +    # TODO: support for dicts if ever needed.
    +    if value_str.startswith('{ ') and value_str.endswith(' }'):
    +      value = value_str[2:-2].split()
    +    else:
    +      value = value_str
    +    ret[key] = value
    +  return ret
    +
    +
    +def _ParseAnnotations(dexRaw: str) -> Dict[int, Annotations]:
    +  """ Parse XML strings and return a list of Annotations mapped to
    +  classes by index.
    +
    +  Annotations are written to the dex dump as human readable blocks of text
    +  The only prescription is that they appear before the class in our xml file
    +  They are not required to be nested within the package as our classes
    +  It is simpler to parse for all the annotations and then associate them
    +  back to the
    +  classes
    +
    +  Example:
    +  Class #12 annotations:
    +  Annotations on class
    +    VISIBILITY_RUNTIME Ldalvik/annotation/EnclosingClass; value=...
    +  Annotations on method #512 'example'
    +    VISIBILITY_SYSTEM Ldalvik/annotation/Signature; value=...
    +    VISIBILITY_RUNTIME Landroidx/test/filters/SmallTest;
    +    VISIBILITY_RUNTIME Lorg/chromium/base/test/util/Feature; value={ Cronet }
    +    VISIBILITY_RUNTIME LFoo; key1={ A B } key2=4104 key3=null
    +  """
    +
    +  # We want to find the lines matching the annotations header pattern
    +  # Eg: Class #12 annotations -> true
    +  annotationsBlockMatcher = re.compile(u'^Class #.*annotations:$')
    +  # We want to retrieve the index of the class
    +  # Eg: Class #12 annotations -> 12
    +  classIndexMatcher = re.compile(u'(?<=#)[0-9]*')
    +  # We want to retrieve the method name from between the quotes
    +  # of the annotations line
    +  # Eg: Annotations on method #512 'example'  -> example
    +  methodMatcher = re.compile(u"(?<=')[^']*")
    +  # We want to match everything after the last slash until before the semi colon
    +  # Eg: Ldalvik/annotation/Signature; -> Signature
    +  annotationMatcher = re.compile(u'([^/]+); ?(.*)?')
    +
    +  annotations = {}
    +  currentAnnotationsForClass = None
    +  currentAnnotationsBlock: Dict[str, None] = None
    +
    +  # This loop does four things
    +  # 1. It looks for a line telling us we are describing annotations for
    +  #  a new class
    +  # 2. It looks for a line telling us if the annotations we find will be
    +  #  for the class or for any of it's methods; we will keep reference to
    +  #  this
    +  # 3. It adds the annotations to whatever we are holding reference to
    +  # 4. It looks for a line to see if we should start looking for a
    +  #  new class again
    +  for line in dexRaw.splitlines():
    +    if currentAnnotationsForClass is None:
    +      # Step 1
    +      # We keep searching until we find an annotation descriptor
    +      # This lets us know that we are storing annotations for a new class
    +      if annotationsBlockMatcher.match(line):
    +        currentClassIndex = int(classIndexMatcher.findall(line)[0])
    +        currentAnnotationsForClass = Annotations(classAnnotations={},
    +                                                 methodsAnnotations={})
    +        annotations[currentClassIndex] = currentAnnotationsForClass
    +    else:
    +      # Step 2
    +      # If we find a descriptor indicating we are tracking annotations
    +      # for the class or it's methods, we'll keep a reference of this
    +      # block for when we start finding annotation references
    +      if line.startswith(u'Annotations on class'):
    +        currentAnnotationsBlock = currentAnnotationsForClass.classAnnotations
    +      elif line.startswith(u'Annotations on method'):
    +        method = methodMatcher.findall(line)[0]
    +        currentAnnotationsBlock = {}
    +        currentAnnotationsForClass.methodsAnnotations[
    +            method] = currentAnnotationsBlock
    +
    +      # If we match against any other type of annotations
    +      # we will ignore them
    +      elif line.startswith(u'Annotations on'):
    +        currentAnnotationsBlock = None
    +
    +      # Step 3
    +      # We are only adding runtime annotations as those are the types
    +      # that will affect if we should run tests or not (where this is
    +      # being used)
    +      elif currentAnnotationsBlock is not None and line.strip().startswith(
    +          'VISIBILITY_RUNTIME'):
    +        annotationName, annotationValuesStr = annotationMatcher.findall(line)[0]
    +        annotationValues = _ParseAnnotationValues(annotationValuesStr)
    +
    +        # Our instrumentation tests expect a mapping of "Annotation: Value"
    +        # We aren't using the value for anything and this would increase
    +        # the complexity of this parser so just mapping these to None
    +        currentAnnotationsBlock.update({annotationName: annotationValues})
    +
    +      # Step 4
    +      # Empty lines indicate that the annotation descriptions are complete
    +      # and we should look for new classes
    +      elif not line.strip():
    +        currentAnnotationsForClass = None
    +        currentAnnotationsBlock = None
    +
    +  return annotations
    +
    +
    +def _ParseRootNode(root, annotations: Dict[int, Annotations]):
    +  """Parses the XML output of dexdump. This output is in the following format.
    +
    +  This is a subset of the information contained within dexdump output.
    +
    +  
    +    
    +      
    +        
    +        
    +        
    +          
    +          
    +        
    +        
    +          
    +          
    +        
    +      
    +    
    +  
    +  """
    +  results = {}
    +
    +  # Annotations are referenced by the class order
    +  # To match them, we need to keep track of the class number and
    +  # match it to the appropriate annotation at that stage
    +  classCount = 0
    +
    +  for child in root:
    +    if child.tag == 'package':
    +      package_name = child.attrib['name']
    +      parsed_node, classCount = _ParsePackageNode(child, classCount,
    +                                                  annotations)
    +      if package_name in results:
    +        results[package_name]['classes'].update(parsed_node['classes'])
    +      else:
    +        results[package_name] = parsed_node
    +  return results
    +
    +
    +def _ParsePackageNode(package_node, classCount: int,
    +                      annotations: Dict[int, Annotations]):
    +  """Parses a  node from the dexdump xml output.
    +
    +  Returns:
    +    A tuple in the format:
    +      (classes: {
    +        'classes': {
    +          : {
    +            'methods': [, ],
    +            'superclass': ,
    +            'is_abstract': ,
    +            'annotations': 
    +          },
    +          : {
    +            'methods': [, ],
    +            'superclass': ,
    +            'is_abstract': ,
    +            'annotations': 
    +          },
    +        }
    +      }, classCount: number)
    +  """
    +  classes = {}
    +  for child in package_node:
    +    if child.tag == 'class':
    +      classes[child.attrib['name']] = _ParseClassNode(child, classCount,
    +                                                      annotations)
    +      classCount += 1
    +  return ({'classes': classes}, classCount)
    +
    +
    +def _ParseClassNode(class_node, classIndex: int,
    +                    annotations: Dict[int, Annotations]):
    +  """Parses a  node from the dexdump xml output.
    +
    +  Returns:
    +    A dict in the format:
    +      {
    +        'methods': [, ],
    +        'superclass': ,
    +        'is_abstract': 
    +      }
    +  """
    +  methods = []
    +  for child in class_node:
    +    if child.tag == 'method' and child.attrib['visibility'] == 'public':
    +      methods.append(child.attrib['name'])
    +  return {
    +      'methods':
    +      methods,
    +      'superclass':
    +      class_node.attrib['extends'],
    +      'is_abstract':
    +      class_node.attrib.get('abstract') == 'true',
    +      'annotations':
    +      annotations.get(classIndex,
    +                      Annotations(classAnnotations={}, methodsAnnotations={}))
    +  }
    diff --git a/android/pylib/utils/dexdump_test.py b/android/pylib/utils/dexdump_test.py
    new file mode 100755
    index 000000000000..2b7c72866db8
    --- /dev/null
    +++ b/android/pylib/utils/dexdump_test.py
    @@ -0,0 +1,207 @@
    +#! /usr/bin/env vpython3
    +# Copyright 2016 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import unittest
    +from xml.etree import ElementTree
    +
    +from pylib.utils import dexdump
    +
    +# pylint: disable=protected-access
    +
    +emptyAnnotations = dexdump.Annotations(classAnnotations={},
    +                                       methodsAnnotations={})
    +
    +
    +class DexdumpXMLParseTest(unittest.TestCase):
    +
    +  def testParseAnnotations(self):
    +    example_xml_string = (
    +        '\n'
    +        'Class #1 annotations:\n'
    +        'Annotations on class\n'
    +        ' VISIBILITY_RUNTIME Ldalvik/annotation/AppModeFull; value=Alpha\n'
    +        'Annotations on method #512 \'example\'\n'
    +        ' VISIBILITY_SYSTEM Ldalvik/annotation/Signature; value=Bravo\n'
    +        ' VISIBILITY_RUNTIME Ldalvik/annotation/Test;\n'
    +        ' VISIBILITY_RUNTIME Ldalvik/annotation/Test2; value=Charlie\n'
    +        ' VISIBILITY_RUNTIME Ldalvik/annotation/Test3; A=B x B={ C D }\n'
    +        ' VISIBILITY_RUNTIME Ldalvik/annotation/Test4; A=B x B={ C D } C=D\n'
    +        '\n'
    +        '\n'
    +        '\n'
    +        '\n'
    +        '\n')
    +
    +    actual = dexdump._ParseAnnotations(example_xml_string)
    +
    +    expected = {
    +        1:
    +        dexdump.Annotations(
    +            classAnnotations={'AppModeFull': {
    +                'value': 'Alpha'
    +            }},
    +            methodsAnnotations={
    +                'example': {
    +                    'Test': None,
    +                    'Test2': {
    +                        'value': 'Charlie'
    +                    },
    +                    'Test3': {
    +                        'A': 'B x',
    +                        'B': ['C', 'D']
    +                    },
    +                    'Test4': {
    +                        'A': 'B x',
    +                        'B': ['C', 'D'],
    +                        'C': 'D'
    +                    },
    +                }
    +            },
    +        )
    +    }
    +
    +    self.assertEqual(expected, actual)
    +
    +  def testParseRootXmlNode(self):
    +    example_xml_string = (''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          '')
    +
    +    actual = dexdump._ParseRootNode(ElementTree.fromstring(example_xml_string),
    +                                    {})
    +
    +    expected = {
    +        'com.foo.bar1': {
    +            'classes': {
    +                'Class1': {
    +                    'methods': ['class1Method1', 'class1Method2'],
    +                    'superclass': 'java.lang.Object',
    +                    'is_abstract': False,
    +                    'annotations': emptyAnnotations,
    +                },
    +                'Class2': {
    +                    'methods': ['class2Method1'],
    +                    'superclass': 'java.lang.Object',
    +                    'is_abstract': True,
    +                    'annotations': emptyAnnotations,
    +                }
    +            },
    +        },
    +        'com.foo.bar2': {
    +            'classes': {}
    +        },
    +        'com.foo.bar3': {
    +            'classes': {}
    +        },
    +    }
    +    self.assertEqual(expected, actual)
    +
    +  def testParsePackageNode(self):
    +    example_xml_string = (
    +        ''
    +        ''
    +        ''
    +        ''
    +        ''
    +        '')
    +
    +
    +    (actual, classCount) = dexdump._ParsePackageNode(
    +        ElementTree.fromstring(example_xml_string), 0, {})
    +
    +    expected = {
    +        'classes': {
    +            'Class1': {
    +                'methods': [],
    +                'superclass': 'java.lang.Object',
    +                'is_abstract': False,
    +                'annotations': emptyAnnotations,
    +            },
    +            'Class2': {
    +                'methods': [],
    +                'superclass': 'java.lang.Object',
    +                'is_abstract': True,
    +                'annotations': emptyAnnotations,
    +            },
    +        },
    +    }
    +    self.assertEqual(expected, actual)
    +    self.assertEqual(classCount, 2)
    +
    +  def testParseClassNode(self):
    +    example_xml_string = (''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          ''
    +                          '')
    +
    +    actual = dexdump._ParseClassNode(ElementTree.fromstring(example_xml_string),
    +                                     0, {})
    +
    +    expected = {
    +        'methods': ['method1', 'method2'],
    +        'superclass': 'java.lang.Object',
    +        'is_abstract': False,
    +        'annotations': emptyAnnotations,
    +    }
    +    self.assertEqual(expected, actual)
    +
    +
    +if __name__ == '__main__':
    +  unittest.main()
    diff --git a/android/pylib/utils/gold_utils.py b/android/pylib/utils/gold_utils.py
    new file mode 100644
    index 000000000000..9dc9fe3e0415
    --- /dev/null
    +++ b/android/pylib/utils/gold_utils.py
    @@ -0,0 +1,78 @@
    +# Copyright 2020 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +"""//build/android implementations of //testing/skia_gold_common.
    +
    +Used for interacting with the Skia Gold image diffing service.
    +"""
    +
    +import os
    +import shutil
    +
    +from devil.utils import cmd_helper
    +from pylib.base.output_manager import Datatype
    +from pylib.constants import host_paths
    +from pylib.utils import repo_utils
    +
    +with host_paths.SysPath(host_paths.BUILD_PATH):
    +  from skia_gold_common import skia_gold_session
    +  from skia_gold_common import skia_gold_session_manager
    +  from skia_gold_common import skia_gold_properties
    +
    +
    +class AndroidSkiaGoldSession(skia_gold_session.SkiaGoldSession):
    +  def _StoreDiffLinks(self, image_name, output_manager, output_dir):
    +    """See SkiaGoldSession._StoreDiffLinks for general documentation.
    +
    +    |output_manager| must be a build.android.pylib.base.OutputManager instance.
    +    """
    +    given_path = closest_path = diff_path = None
    +    # The directory should contain "input-.png", "closest-.png",
    +    # and "diff.png".
    +    for f in os.listdir(output_dir):
    +      filepath = os.path.join(output_dir, f)
    +      if f.startswith('input-'):
    +        given_path = filepath
    +      elif f.startswith('closest-'):
    +        closest_path = filepath
    +      elif f == 'diff.png':
    +        diff_path = filepath
    +    results = self._comparison_results.setdefault(image_name,
    +                                                  self.ComparisonResults())
    +    if given_path:
    +      with output_manager.ArchivedTempfile('given_%s.png' % image_name,
    +                                           'gold_local_diffs',
    +                                           Datatype.PNG) as given_file:
    +        shutil.move(given_path, given_file.name)
    +      results.local_diff_given_image = given_file.Link()
    +    if closest_path:
    +      with output_manager.ArchivedTempfile('closest_%s.png' % image_name,
    +                                           'gold_local_diffs',
    +                                           Datatype.PNG) as closest_file:
    +        shutil.move(closest_path, closest_file.name)
    +      results.local_diff_closest_image = closest_file.Link()
    +    if diff_path:
    +      with output_manager.ArchivedTempfile('diff_%s.png' % image_name,
    +                                           'gold_local_diffs',
    +                                           Datatype.PNG) as diff_file:
    +        shutil.move(diff_path, diff_file.name)
    +      results.local_diff_diff_image = diff_file.Link()
    +
    +  @staticmethod
    +  def _RunCmdForRcAndOutput(cmd):
    +    rc, stdout, _ = cmd_helper.GetCmdStatusOutputAndError(cmd,
    +                                                          merge_stderr=True)
    +    return rc, stdout
    +
    +
    +class AndroidSkiaGoldSessionManager(
    +    skia_gold_session_manager.SkiaGoldSessionManager):
    +  @staticmethod
    +  def GetSessionClass():
    +    return AndroidSkiaGoldSession
    +
    +
    +class AndroidSkiaGoldProperties(skia_gold_properties.SkiaGoldProperties):
    +  @staticmethod
    +  def _GetGitOriginMainHeadSha1():
    +    return repo_utils.GetGitOriginMainHeadSHA1(host_paths.DIR_SOURCE_ROOT)
    diff --git a/android/pylib/utils/gold_utils_test.py b/android/pylib/utils/gold_utils_test.py
    new file mode 100755
    index 000000000000..8a9f8a37aa8d
    --- /dev/null
    +++ b/android/pylib/utils/gold_utils_test.py
    @@ -0,0 +1,123 @@
    +#!/usr/bin/env vpython3
    +# Copyright 2020 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +"""Tests for gold_utils."""
    +
    +#pylint: disable=protected-access
    +
    +import contextlib
    +import os
    +import tempfile
    +import unittest
    +
    +from pylib.constants import host_paths
    +from pylib.utils import gold_utils
    +
    +with host_paths.SysPath(host_paths.BUILD_PATH):
    +  from skia_gold_common import unittest_utils
    +
    +import mock  # pylint: disable=import-error
    +from pyfakefs import fake_filesystem_unittest  # pylint: disable=import-error
    +
    +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs
    +
    +
    +def assertArgWith(test, arg_list, arg, value):
    +  i = arg_list.index(arg)
    +  test.assertEqual(arg_list[i + 1], value)
    +
    +
    +class AndroidSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase):
    +  def setUp(self):
    +    self.setUpPyfakefs()
    +    self._working_dir = tempfile.mkdtemp()
    +    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
    +
    +  @mock.patch.object(gold_utils.AndroidSkiaGoldSession, '_RunCmdForRcAndOutput')
    +  def test_commandCommonArgs(self, cmd_mock):
    +    cmd_mock.return_value = (None, None)
    +    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False)
    +    sgp = gold_utils.AndroidSkiaGoldProperties(args)
    +    session = gold_utils.AndroidSkiaGoldSession(self._working_dir,
    +                                                sgp,
    +                                                self._json_keys,
    +                                                'corpus',
    +                                                instance='instance')
    +    session.Diff('name', 'png_file', None)
    +    call_args = cmd_mock.call_args[0][0]
    +    self.assertIn('diff', call_args)
    +    assertArgWith(self, call_args, '--corpus', 'corpus')
    +    # TODO(skbug.com/10610): Remove the -public once we go back to using the
    +    # non-public instance, or add a second test for testing that the correct
    +    # instance is chosen if we decide to support both depending on what the
    +    # user is authenticated for.
    +    assertArgWith(self, call_args, '--instance', 'instance-public')
    +    assertArgWith(self, call_args, '--input', 'png_file')
    +    assertArgWith(self, call_args, '--test', 'name')
    +    # TODO(skbug.com/10611): Re-add this assert and remove the check for the
    +    # absence of the directory once we switch back to using the proper working
    +    # directory.
    +    # assertArgWith(self, call_args, '--work-dir', self._working_dir)
    +    self.assertNotIn(self._working_dir, call_args)
    +    i = call_args.index('--out-dir')
    +    # The output directory should be a subdirectory of the working directory.
    +    self.assertIn(self._working_dir, call_args[i + 1])
    +
    +
    +class AndroidSkiaGoldSessionDiffLinksTest(fake_filesystem_unittest.TestCase):
    +  class FakeArchivedFile:
    +    def __init__(self, path):
    +      self.name = path
    +
    +    def Link(self):
    +      return 'file://' + self.name
    +
    +  class FakeOutputManager:
    +    def __init__(self):
    +      self.output_dir = tempfile.mkdtemp()
    +
    +    @contextlib.contextmanager
    +    def ArchivedTempfile(self, image_name, _, __):
    +      filepath = os.path.join(self.output_dir, image_name)
    +      yield AndroidSkiaGoldSessionDiffLinksTest.FakeArchivedFile(filepath)
    +
    +  def setUp(self):
    +    self.setUpPyfakefs()
    +    self._working_dir = tempfile.mkdtemp()
    +    self._json_keys = tempfile.NamedTemporaryFile(delete=False).name
    +
    +  def test_outputManagerUsed(self):
    +    args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True)
    +    sgp = gold_utils.AndroidSkiaGoldProperties(args)
    +    session = gold_utils.AndroidSkiaGoldSession(self._working_dir, sgp,
    +                                                self._json_keys, None, None)
    +    with open(os.path.join(self._working_dir, 'input-inputhash.png'), 'w') as f:
    +      f.write('input')
    +    with open(os.path.join(self._working_dir, 'closest-closesthash.png'),
    +              'w') as f:
    +      f.write('closest')
    +    with open(os.path.join(self._working_dir, 'diff.png'), 'w') as f:
    +      f.write('diff')
    +
    +    output_manager = AndroidSkiaGoldSessionDiffLinksTest.FakeOutputManager()
    +    session._StoreDiffLinks('foo', output_manager, self._working_dir)
    +
    +    copied_input = os.path.join(output_manager.output_dir, 'given_foo.png')
    +    copied_closest = os.path.join(output_manager.output_dir, 'closest_foo.png')
    +    copied_diff = os.path.join(output_manager.output_dir, 'diff_foo.png')
    +    with open(copied_input) as f:
    +      self.assertEqual(f.read(), 'input')
    +    with open(copied_closest) as f:
    +      self.assertEqual(f.read(), 'closest')
    +    with open(copied_diff) as f:
    +      self.assertEqual(f.read(), 'diff')
    +
    +    self.assertEqual(session.GetGivenImageLink('foo'), 'file://' + copied_input)
    +    self.assertEqual(session.GetClosestImageLink('foo'),
    +                     'file://' + copied_closest)
    +    self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + copied_diff)
    +
    +
    +if __name__ == '__main__':
    +  unittest.main(verbosity=2)
    diff --git a/android/pylib/utils/google_storage_helper.py b/android/pylib/utils/google_storage_helper.py
    new file mode 100644
    index 000000000000..27af7096259c
    --- /dev/null
    +++ b/android/pylib/utils/google_storage_helper.py
    @@ -0,0 +1,128 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Helper functions to upload data to Google Storage.
    +
    +Text data should be streamed to logdog using |logdog_helper| module.
    +Due to logdog not having image or HTML viewer, those instead should be uploaded
    +to Google Storage directly using this module.
    +"""
    +
    +import logging
    +import os
    +import sys
    +import time
    +try:
    +  from urllib.parse import urlparse
    +except ImportError:
    +  from urlparse import urlparse
    +
    +from pylib.constants import host_paths
    +from pylib.utils import decorators
    +
    +if host_paths.DEVIL_PATH not in sys.path:
    +  sys.path.append(host_paths.DEVIL_PATH)
    +from devil.utils import cmd_helper
    +
    +_GSUTIL_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party',
    +                            'catapult', 'third_party', 'gsutil', 'gsutil')
    +_PUBLIC_URL = 'https://storage.googleapis.com/%s/'
    +_AUTHENTICATED_URL = 'https://storage.cloud.google.com/%s/'
    +
    +
    +@decorators.NoRaiseException(default_return_value='')
    +def upload(name, filepath, bucket, gs_args=None, command_args=None,
    +           content_type=None, authenticated_link=True):
    +  """Uploads data to Google Storage.
    +
    +  Args:
    +    name: Name of the file on Google Storage.
    +    filepath: Path to file you want to upload.
    +    bucket: Bucket to upload file to.
    +    content_type: Content type to upload as. If not specified, Google storage
    +        will attempt to infer content type from file extension.
    +    authenticated_link: Whether to return a link that requires user to
    +        authenticate with a Google account. Setting this to false will return
    +        a link that does not require user to be signed into Google account but
    +        will only work for completely public storage buckets.
    +  Returns:
    +    Web link to item uploaded to Google Storage bucket.
    +  """
    +  bucket = _format_bucket_name(bucket)
    +
    +  gs_path = 'gs://%s/%s' % (bucket, name)
    +  logging.info('Uploading %s to %s', filepath, gs_path)
    +
    +  cmd = [_GSUTIL_PATH, '-q']
    +  cmd.extend(gs_args or [])
    +  if content_type:
    +    cmd.extend(['-h', 'Content-Type:%s' % content_type])
    +  cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path])
    +
    +  cmd_helper.RunCmd(cmd)
    +
    +  return get_url_link(name, bucket, authenticated_link)
    +
    +
    +@decorators.NoRaiseException(default_return_value='')
    +def read_from_link(link):
    +  # Note that urlparse returns the path with an initial '/', so we only need to
    +  # add one more after the 'gs;'
    +  gs_path = 'gs:/%s' % urlparse(link).path
    +  cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path]
    +  return cmd_helper.GetCmdOutput(cmd)
    +
    +
    +@decorators.NoRaiseException(default_return_value=False)
    +def exists(name, bucket):
    +  bucket = _format_bucket_name(bucket)
    +  gs_path = 'gs://%s/%s' % (bucket, name)
    +
    +  cmd = [_GSUTIL_PATH, '-q', 'stat', gs_path]
    +  return_code = cmd_helper.RunCmd(cmd)
    +  return return_code == 0
    +
    +
    +# TODO(jbudorick): Delete this function. Only one user of it.
    +def unique_name(basename, suffix='', timestamp=True, device=None):
    +  """Helper function for creating a unique name for a file to store in GS.
    +
    +  Args:
    +    basename: Base of the unique filename.
    +    suffix: Suffix of filename.
    +    timestamp: Whether or not to add a timestamp to name.
    +    device: Device to add device serial of to name.
    +  """
    +  return '%s%s%s%s' % (
    +      basename,
    +      '_%s' % time.strftime('%Y_%m_%d_T%H_%M_%S-UTC', time.gmtime())
    +          if timestamp else '',
    +      '_%s' % device.serial if device else '',
    +      suffix)
    +
    +
    +def get_url_link(name, bucket, authenticated_link=True):
    +  """Get url link before/without uploading.
    +
    +  Args:
    +    name: Name of the file on Google Storage.
    +    bucket: Bucket to upload file to.
    +    authenticated_link: Whether to return a link that requires user to
    +        authenticate with a Google account. Setting this to false will return
    +        a link that does not require user to be signed into Google account but
    +        will only work for completely public storage buckets.
    +  Returns:
    +    Web link to item to be uploaded to Google Storage bucket
    +  """
    +  bucket = _format_bucket_name(bucket)
    +  url_template = _AUTHENTICATED_URL if authenticated_link else _PUBLIC_URL
    +  return os.path.join(url_template % bucket, name)
    +
    +
    +def _format_bucket_name(bucket):
    +  if bucket.startswith('gs://'):
    +    bucket = bucket[len('gs://'):]
    +  if bucket.endswith('/'):
    +    bucket = bucket[:-1]
    +  return bucket
    diff --git a/android/pylib/utils/instrumentation_tracing.py b/android/pylib/utils/instrumentation_tracing.py
    new file mode 100644
    index 000000000000..3c9304e90915
    --- /dev/null
    +++ b/android/pylib/utils/instrumentation_tracing.py
    @@ -0,0 +1,206 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Functions to instrument all Python function calls.
    +
    +This generates a JSON file readable by Chrome's about:tracing. To use it,
    +either call start_instrumenting and stop_instrumenting at the appropriate times,
    +or use the Instrument context manager.
    +
    +A function is only traced if it is from a Python module that matches at least
    +one regular expression object in to_include, and does not match any in
    +to_exclude. In between the start and stop events, every function call of a
    +function from such a module will be added to the trace.
    +"""
    +
    +import contextlib
    +import functools
    +import inspect
    +import os
    +import re
    +import sys
    +import threading
    +
    +from py_trace_event import trace_event
    +
    +
    +# Modules to exclude by default (to avoid problems like infinite loops)
    +DEFAULT_EXCLUDE = [r'py_trace_event\..*']
    +
    +
    +class _TraceArguments:
    +  def __init__(self):
    +    """Wraps a dictionary to ensure safe evaluation of repr()."""
    +    self._arguments = {}
    +
    +  @staticmethod
    +  def _safeStringify(item):
    +    try:
    +      item_str = repr(item)
    +    except Exception: # pylint: disable=broad-except
    +      try:
    +        item_str = str(item)
    +      except Exception: # pylint: disable=broad-except
    +        item_str = ""
    +    return item_str
    +
    +  def add(self, key, val):
    +    key_str = _TraceArguments._safeStringify(key)
    +    val_str = _TraceArguments._safeStringify(val)
    +
    +    self._arguments[key_str] = val_str
    +
    +  def __repr__(self):
    +    return repr(self._arguments)
    +
    +
    +saved_thread_ids = set()
    +
    +def _shouldTrace(frame, to_include, to_exclude, included, excluded):
    +  """
    +  Decides whether or not the function called in frame should be traced.
    +
    +  Args:
    +    frame: The Python frame object of this function call.
    +    to_include: Set of regex objects for modules which should be traced.
    +    to_exclude: Set of regex objects for modules which should not be traced.
    +    included: Set of module names we've determined should be traced.
    +    excluded: Set of module names we've determined should not be traced.
    +  """
    +  if not inspect.getmodule(frame):
    +    return False
    +
    +  module_name = inspect.getmodule(frame).__name__
    +
    +  if module_name in included:
    +    includes = True
    +  elif to_include:
    +    includes = any(pattern.match(module_name) for pattern in to_include)
    +  else:
    +    includes = True
    +
    +  if includes:
    +    included.add(module_name)
    +  else:
    +    return False
    +
    +  # Find the modules of every function in the stack trace.
    +  frames = inspect.getouterframes(frame)
    +  calling_module_names = [inspect.getmodule(fr[0]).__name__ for fr in frames]
    +
    +  # Return False for anything with an excluded module's function anywhere in the
    +  # stack trace (even if the function itself is in an included module).
    +  if to_exclude:
    +    for calling_module in calling_module_names:
    +      if calling_module in excluded:
    +        return False
    +      for pattern in to_exclude:
    +        if pattern.match(calling_module):
    +          excluded.add(calling_module)
    +          return False
    +
    +  return True
    +
    +def _generate_trace_function(to_include, to_exclude):
    +  to_include = {re.compile(item) for item in to_include}
    +  to_exclude = {re.compile(item) for item in to_exclude}
    +  to_exclude.update({re.compile(item) for item in DEFAULT_EXCLUDE})
    +
    +  included = set()
    +  excluded = set()
    +
    +  tracing_pid = os.getpid()
    +
    +  def traceFunction(frame, event, arg):
    +    del arg
    +
    +    # Don't try to trace in subprocesses.
    +    if os.getpid() != tracing_pid:
    +      sys.settrace(None)
    +      return None
    +
    +    # pylint: disable=unused-argument
    +    if event not in ("call", "return"):
    +      return None
    +
    +    function_name = frame.f_code.co_name
    +    filename = frame.f_code.co_filename
    +    line_number = frame.f_lineno
    +
    +    if _shouldTrace(frame, to_include, to_exclude, included, excluded):
    +      if event == "call":
    +        # This function is beginning; we save the thread name (if that hasn't
    +        # been done), record the Begin event, and return this function to be
    +        # used as the local trace function.
    +
    +        thread_id = threading.current_thread().ident
    +
    +        if thread_id not in saved_thread_ids:
    +          thread_name = threading.current_thread().name
    +
    +          trace_event.trace_set_thread_name(thread_name)
    +
    +          saved_thread_ids.add(thread_id)
    +
    +        arguments = _TraceArguments()
    +        # The function's argument values are stored in the frame's
    +        # |co_varnames| as the first |co_argcount| elements. (Following that
    +        # are local variables.)
    +        for idx in range(frame.f_code.co_argcount):
    +          arg_name = frame.f_code.co_varnames[idx]
    +          arguments.add(arg_name, frame.f_locals[arg_name])
    +        trace_event.trace_begin(function_name, arguments=arguments,
    +                                module=inspect.getmodule(frame).__name__,
    +                                filename=filename, line_number=line_number)
    +
    +        # Return this function, so it gets used as the "local trace function"
    +        # within this function's frame (and in particular, gets called for this
    +        # function's "return" event).
    +        return traceFunction
    +
    +      if event == "return":
    +        trace_event.trace_end(function_name)
    +        return None
    +    return None
    +
    +  return traceFunction
    +
    +
    +def no_tracing(f):
    +  @functools.wraps(f)
    +  def wrapper(*args, **kwargs):
    +    trace_func = sys.gettrace()
    +    try:
    +      sys.settrace(None)
    +      threading.settrace(None)
    +      return f(*args, **kwargs)
    +    finally:
    +      sys.settrace(trace_func)
    +      threading.settrace(trace_func)
    +  return wrapper
    +
    +
    +def start_instrumenting(output_file, to_include=(), to_exclude=()):
    +  """Enable tracing of all function calls (from specified modules)."""
    +  trace_event.trace_enable(output_file)
    +
    +  traceFunc = _generate_trace_function(to_include, to_exclude)
    +  sys.settrace(traceFunc)
    +  threading.settrace(traceFunc)
    +
    +
    +def stop_instrumenting():
    +  trace_event.trace_disable()
    +
    +  sys.settrace(None)
    +  threading.settrace(None)
    +
    +
    +@contextlib.contextmanager
    +def Instrument(output_file, to_include=(), to_exclude=()):
    +  try:
    +    start_instrumenting(output_file, to_include, to_exclude)
    +    yield None
    +  finally:
    +    stop_instrumenting()
    diff --git a/android/pylib/utils/local_utils.py b/android/pylib/utils/local_utils.py
    new file mode 100644
    index 000000000000..a7d39d6a2305
    --- /dev/null
    +++ b/android/pylib/utils/local_utils.py
    @@ -0,0 +1,19 @@
    +# Copyright 2020 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +"""Utilities for determining if a test is being run locally or not."""
    +
    +import os
    +
    +
    +def IsOnSwarming():
    +  """Determines whether we are on swarming or not.
    +
    +  Returns:
    +    True if the test is being run on swarming, otherwise False.
    +  """
    +  # Look for the presence of the SWARMING_SERVER environment variable as a
    +  # heuristic to determine whether we're running on a workstation or a bot.
    +  # This should always be set on swarming, but would be strange to be set on
    +  # a workstation.
    +  return 'SWARMING_SERVER' in os.environ
    diff --git a/android/pylib/utils/logdog_helper.py b/android/pylib/utils/logdog_helper.py
    new file mode 100644
    index 000000000000..e1562f51522c
    --- /dev/null
    +++ b/android/pylib/utils/logdog_helper.py
    @@ -0,0 +1,96 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Helper functions to upload data to logdog."""
    +
    +import logging
    +import os
    +import sys
    +
    +from pylib import constants
    +from pylib.utils import decorators
    +
    +sys.path.insert(
    +    0,
    +    os.path.abspath(
    +        os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'logdog')))
    +from logdog import bootstrap  # pylint: disable=import-error
    +
    +
    +@decorators.NoRaiseException(default_return_value='',
    +                             exception_message=('Ignore this exception. '
    +                                                'crbug.com/675666'))
    +def text(name, data, content_type=None):
    +  """Uploads text to logdog.
    +
    +  Args:
    +    name: Name of the logdog stream.
    +    data: String with data you want to upload.
    +    content_type: The optional content type of the stream. If None, a
    +      default content type will be chosen.
    +
    +  Returns:
    +    Link to view uploaded text in logdog viewer.
    +  """
    +  logging.info('Writing text to logdog stream, %s', name)
    +  with get_logdog_client().text(name, content_type=content_type) as stream:
    +    stream.write(data)
    +    return stream.get_viewer_url()
    +
    +
    +@decorators.NoRaiseException(default_return_value=None,
    +                             exception_message=('Ignore this exception. '
    +                                                'crbug.com/675666'))
    +def open_text(name):
    +  """Returns a file like object which you can write to.
    +
    +  Args:
    +    name: Name of the logdog stream.
    +
    +  Returns:
    +    A file like object. close() file when done.
    +  """
    +  logging.info('Opening text logdog stream, %s', name)
    +  return get_logdog_client().open_text(name)
    +
    +
    +@decorators.NoRaiseException(default_return_value='',
    +                             exception_message=('Ignore this exception. '
    +                                                'crbug.com/675666'))
    +def binary(name, binary_path):
    +  """Uploads binary to logdog.
    +
    +  Args:
    +    name: Name of the logdog stream.
    +    binary_path: Path to binary you want to upload.
    +
    +  Returns:
    +    Link to view uploaded binary in logdog viewer.
    +  """
    +  logging.info('Writing binary to logdog stream, %s', name)
    +  with get_logdog_client().binary(name) as stream:
    +    with open(binary_path, 'r') as f:
    +      stream.write(f.read())
    +      return stream.get_viewer_url()
    +
    +
    +@decorators.NoRaiseException(default_return_value='',
    +                             exception_message=('Ignore this exception. '
    +                                                'crbug.com/675666'))
    +def get_viewer_url(name):
    +  """Get Logdog viewer URL.
    +
    +  Args:
    +    name: Name of the logdog stream.
    +
    +  Returns:
    +    Link to view uploaded binary in logdog viewer.
    +  """
    +  return get_logdog_client().get_viewer_url(name)
    +
    +
    +@decorators.Memoize
    +def get_logdog_client():
    +  logging.info('Getting logdog client.')
    +  return bootstrap.ButlerBootstrap.probe().stream_client()
    diff --git a/android/pylib/utils/logging_utils.py b/android/pylib/utils/logging_utils.py
    new file mode 100644
    index 000000000000..fdb0fa6a8b42
    --- /dev/null
    +++ b/android/pylib/utils/logging_utils.py
    @@ -0,0 +1,140 @@
    +# Copyright 2014 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import contextlib
    +import logging
    +import os
    +
    +from pylib.constants import host_paths
    +
    +_COLORAMA_PATH = os.path.join(
    +    host_paths.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')
    +
    +with host_paths.SysPath(_COLORAMA_PATH, position=0):
    +  import colorama
    +
    +BACK = colorama.Back
    +FORE = colorama.Fore
    +STYLE = colorama.Style
    +
    +
    +class _ColorFormatter(logging.Formatter):
    +  # pylint does not see members added dynamically in the constructor.
    +  # pylint: disable=no-member
    +  color_map = {
    +    logging.DEBUG: (FORE.CYAN),
    +    logging.INFO: (),  # Use default style.
    +    logging.WARNING: (FORE.YELLOW),
    +    logging.ERROR: (FORE.RED),
    +    logging.CRITICAL: (BACK.RED),
    +  }
    +
    +  def __init__(self, wrapped_formatter=None):
    +    """Wraps a |logging.Formatter| and adds color."""
    +    super().__init__()
    +    self._wrapped_formatter = wrapped_formatter or logging.Formatter()
    +
    +  #override
    +  def format(self, record):
    +    message = self._wrapped_formatter.format(record)
    +    return self.Colorize(message, record.levelno)
    +
    +  def Colorize(self, message, log_level):
    +    try:
    +      return (''.join(self.color_map[log_level]) + message +
    +              colorama.Style.RESET_ALL)
    +    except KeyError:
    +      return message
    +
    +
    +class ColorStreamHandler(logging.StreamHandler):
    +  """Handler that can be used to colorize logging output.
    +
    +  Example using a specific logger:
    +
    +    logger = logging.getLogger('my_logger')
    +    logger.addHandler(ColorStreamHandler())
    +    logger.info('message')
    +
    +  Example using the root logger:
    +
    +    ColorStreamHandler.MakeDefault()
    +    logging.info('message')
    +
    +  """
    +  def __init__(self, force_color=False):
    +    super().__init__()
    +    self.force_color = force_color
    +    self.setFormatter(logging.Formatter())
    +
    +  @property
    +  def is_tty(self):
    +    try:
    +      isatty = getattr(self.stream, 'isatty')
    +    except AttributeError:
    +      return False
    +    return isatty()
    +
    +  #override
    +  def setFormatter(self, fmt):
    +    if self.force_color or self.is_tty:
    +      fmt = _ColorFormatter(fmt)
    +    super().setFormatter(fmt)
    +
    +  @staticmethod
    +  def MakeDefault(force_color=False):
    +    """
    +     Replaces the default logging handlers with a coloring handler. To use
    +     a colorizing handler at the same time as others, either register them
    +     after this call, or add the ColorStreamHandler on the logger using
    +     Logger.addHandler()
    +
    +     Args:
    +       force_color: Set to True to bypass the tty check and always colorize.
    +     """
    +    # If the existing handlers aren't removed, messages are duplicated
    +    logging.getLogger().handlers = []
    +    logging.getLogger().addHandler(ColorStreamHandler(force_color))
    +
    +
    +@contextlib.contextmanager
    +def OverrideColor(level, color):
    +  """Temporarily override the logging color for a specified level.
    +
    +  Args:
    +    level: logging level whose color gets overridden.
    +    color: tuple of formats to apply to log lines.
    +  """
    +  prev_colors = {}
    +  for handler in logging.getLogger().handlers:
    +    if isinstance(handler.formatter, _ColorFormatter):
    +      prev_colors[handler.formatter] = handler.formatter.color_map[level]
    +      handler.formatter.color_map[level] = color
    +  try:
    +    yield
    +  finally:
    +    for formatter, prev_color in prev_colors.items():
    +      formatter.color_map[level] = prev_color
    +
    +
    +@contextlib.contextmanager
    +def SuppressLogging(level=logging.ERROR):
    +  """Momentarilly suppress logging events from all loggers.
    +
    +  TODO(jbudorick): This is not thread safe. Log events from other threads might
    +  also inadvertently disappear.
    +
    +  Example:
    +
    +    with logging_utils.SuppressLogging():
    +      # all but CRITICAL logging messages are suppressed
    +      logging.info('just doing some thing') # not shown
    +      logging.critical('something really bad happened') # still shown
    +
    +  Args:
    +    level: logging events with this or lower levels are suppressed.
    +  """
    +  logging.disable(level)
    +  yield
    +  logging.disable(logging.NOTSET)
    diff --git a/android/pylib/utils/maven_downloader.py b/android/pylib/utils/maven_downloader.py
    new file mode 100755
    index 000000000000..fd9d97304f1d
    --- /dev/null
    +++ b/android/pylib/utils/maven_downloader.py
    @@ -0,0 +1,140 @@
    +#!/usr/bin/env vpython3
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import errno
    +import logging
    +import os
    +import shutil
    +import sys
    +
    +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
    +import devil_chromium  # pylint: disable=unused-import
    +from devil.utils import cmd_helper
    +from devil.utils import parallelizer
    +
    +
    +def _MakeDirsIfAbsent(path):
    +  try:
    +    os.makedirs(path)
    +  except OSError as err:
    +    if err.errno != errno.EEXIST or not os.path.isdir(path):
    +      raise
    +
    +
    +class MavenDownloader:
    +  '''
    +  Downloads and installs the requested artifacts from the Google Maven repo.
    +  The artifacts are expected to be specified in the format
    +  "group_id:artifact_id:version:file_type", as the default file type is JAR
    +  but most Android libraries are provided as AARs, which would otherwise fail
    +  downloading. See Install()
    +  '''
    +
    +  # Remote repository to download the artifacts from. The support library and
    +  # Google Play service are only distributed there, but third party libraries
    +  # could use Maven Central or JCenter for example. The default Maven remote
    +  # is Maven Central.
    +  _REMOTE_REPO = 'https://maven.google.com'
    +
    +  # Default Maven repository.
    +  _DEFAULT_REPO_PATH = os.path.join(
    +      os.path.expanduser('~'), '.m2', 'repository')
    +
    +  def __init__(self, debug=False):
    +    self._repo_path = MavenDownloader._DEFAULT_REPO_PATH
    +    self._remote_url = MavenDownloader._REMOTE_REPO
    +    self._debug = debug
    +
    +  def Install(self, target_repo, artifacts, include_poms=False):
    +    logging.info('Installing %d artifacts...', len(artifacts))
    +    downloaders = [_SingleArtifactDownloader(self, artifact, target_repo)
    +                   for artifact in artifacts]
    +    if self._debug:
    +      for downloader in downloaders:
    +        downloader.Run(include_poms)
    +    else:
    +      parallelizer.SyncParallelizer(downloaders).Run(include_poms)
    +    logging.info('%d artifacts installed to %s', len(artifacts), target_repo)
    +
    +  @property
    +  def repo_path(self):
    +    return self._repo_path
    +
    +  @property
    +  def remote_url(self):
    +    return self._remote_url
    +
    +  @property
    +  def debug(self):
    +    return self._debug
    +
    +
    +class _SingleArtifactDownloader:
    +  '''Handles downloading and installing a single Maven artifact.'''
    +
    +  _POM_FILE_TYPE = 'pom'
    +
    +  def __init__(self, download_manager, artifact, target_repo):
    +    self._download_manager = download_manager
    +    self._artifact = artifact
    +    self._target_repo = target_repo
    +
    +  def Run(self, include_pom=False):
    +    parts = self._artifact.split(':')
    +    if len(parts) != 4:
    +      raise Exception('Artifacts expected as '
    +                      '"group_id:artifact_id:version:file_type".')
    +    group_id, artifact_id, version, file_type = parts
    +    self._InstallArtifact(group_id, artifact_id, version, file_type)
    +
    +    if include_pom and file_type != _SingleArtifactDownloader._POM_FILE_TYPE:
    +      self._InstallArtifact(group_id, artifact_id, version,
    +                            _SingleArtifactDownloader._POM_FILE_TYPE)
    +
    +  def _InstallArtifact(self, group_id, artifact_id, version, file_type):
    +    logging.debug('Processing %s', self._artifact)
    +
    +    download_relpath = self._DownloadArtifact(
    +        group_id, artifact_id, version, file_type)
    +    logging.debug('Downloaded.')
    +
    +    install_path = self._ImportArtifact(download_relpath)
    +    logging.debug('Installed %s', os.path.relpath(install_path))
    +
    +  def _DownloadArtifact(self, group_id, artifact_id, version, file_type):
    +    '''
    +    Downloads the specified artifact using maven, to its standard location, see
    +    MavenDownloader._DEFAULT_REPO_PATH.
    +    '''
    +    cmd = ['mvn',
    +           'org.apache.maven.plugins:maven-dependency-plugin:RELEASE:get',
    +           '-DremoteRepositories={}'.format(self._download_manager.remote_url),
    +           '-Dartifact={}:{}:{}:{}'.format(group_id, artifact_id, version,
    +                                           file_type)]
    +
    +    stdout = None if self._download_manager.debug else open(os.devnull, 'wb')
    +
    +    try:
    +      ret_code = cmd_helper.Call(cmd, stdout=stdout)
    +      if ret_code != 0:
    +        raise Exception('Command "{}" failed'.format(' '.join(cmd)))
    +    except OSError as e:
    +      if e.errno == errno.ENOENT:
    +        raise Exception('mvn command not found. Please install Maven.') from e
    +      raise
    +
    +    return os.path.join(os.path.join(*group_id.split('.')),
    +                        artifact_id,
    +                        version,
    +                        '{}-{}.{}'.format(artifact_id, version, file_type))
    +
    +  def _ImportArtifact(self, artifact_path):
    +    src_dir = os.path.join(self._download_manager.repo_path, artifact_path)
    +    dst_dir = os.path.join(self._target_repo, os.path.dirname(artifact_path))
    +
    +    _MakeDirsIfAbsent(dst_dir)
    +    shutil.copy(src_dir, dst_dir)
    +
    +    return dst_dir
    diff --git a/android/pylib/utils/repo_utils.py b/android/pylib/utils/repo_utils.py
    new file mode 100644
    index 000000000000..4e1b7a57c3d6
    --- /dev/null
    +++ b/android/pylib/utils/repo_utils.py
    @@ -0,0 +1,28 @@
    +# Copyright 2013 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +from devil.utils import cmd_helper
    +
    +
    +def GetGitHeadSHA1(in_directory):
    +  """Returns the git hash tag for the given directory.
    +
    +  Args:
    +    in_directory: The directory where git is to be run.
    +  """
    +  command_line = ['git', 'log', '-1', '--pretty=format:%H']
    +  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
    +  return output[0:40]
    +
    +
    +def GetGitOriginMasterHeadSHA1(in_directory):
    +  command_line = ['git', 'rev-parse', 'origin/master']
    +  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
    +  return output.strip()
    +
    +
    +def GetGitOriginMainHeadSHA1(in_directory):
    +  command_line = ['git', 'rev-parse', 'origin/main']
    +  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
    +  return output.strip()
    diff --git a/android/pylib/utils/shared_preference_utils.py b/android/pylib/utils/shared_preference_utils.py
    new file mode 100644
    index 000000000000..93324c6b80f8
    --- /dev/null
    +++ b/android/pylib/utils/shared_preference_utils.py
    @@ -0,0 +1,116 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Utility functions for modifying an app's settings file using JSON."""
    +
    +import json
    +import logging
    +
    +
    +def UnicodeToStr(data):
    +  """Recursively converts any Unicode to Python strings.
    +
    +  Args:
    +    data: The data to be converted.
    +
    +  Return:
    +    A copy of the given data, but with instances of Unicode converted to Python
    +    strings.
    +  """
    +  if isinstance(data, dict):
    +    return {
    +        UnicodeToStr(key): UnicodeToStr(value)
    +        for key, value in data.items()
    +    }
    +  if isinstance(data, list):
    +    return [UnicodeToStr(element) for element in data]
    +  try:
    +    # Python-2 compatibility.
    +    if isinstance(data, unicode):
    +      return data.encode('utf-8')
    +  except NameError:
    +    # Strings are already unicode in python3.
    +    pass
    +  return data
    +
    +
    +def ExtractSettingsFromJson(filepath):
    +  """Extracts the settings data from the given JSON file.
    +
    +  Args:
    +    filepath: The path to the JSON file to read.
    +
    +  Return:
    +    The data read from the JSON file with strings converted to Python strings.
    +  """
    +  # json.load() loads strings as unicode, which causes issues when trying
    +  # to edit string values in preference files, so convert to Python strings
    +  with open(filepath) as prefs_file:
    +    return UnicodeToStr(json.load(prefs_file))
    +
    +
    +def ApplySharedPreferenceSetting(shared_pref, setting):
    +  """Applies the given app settings to the given device.
    +
    +  Modifies an installed app's settings by modifying its shared preference
    +  settings file. Provided settings data must be a settings dictionary,
    +  which are in the following format:
    +  {
    +    "package": "com.example.package",
    +    "filename": "AppSettingsFile.xml",
    +    "supports_encrypted_path": true,
    +    "set": {
    +      "SomeBoolToSet": true,
    +      "SomeStringToSet": "StringValue",
    +    },
    +    "remove": [
    +      "list",
    +      "of",
    +      "keys",
    +      "to",
    +      "remove",
    +    ]
    +  }
    +
    +  Example JSON files that can be read with ExtractSettingsFromJson and passed to
    +  this function are in //chrome/android/shared_preference_files/test/.
    +
    +  Args:
    +    shared_pref: The devil SharedPrefs object for the device the settings will
    +        be applied to.
    +    setting: A settings dictionary to apply.
    +  """
    +  shared_pref.Load()
    +  for key in setting.get('remove', []):
    +    try:
    +      shared_pref.Remove(key)
    +    except KeyError:
    +      logging.warning("Attempted to remove non-existent key %s", key)
    +  for key, value in setting.get('set', {}).items():
    +    is_set = False
    +    if not is_set and isinstance(value, bool):
    +      shared_pref.SetBoolean(key, value)
    +      is_set = True
    +    try:
    +      # Python-2 compatibility.
    +      if not is_set and isinstance(value, basestring):
    +        shared_pref.SetString(key, value)
    +        is_set = True
    +      if not is_set and isinstance(value, (long, int)):
    +        shared_pref.SetLong(key, value)
    +        is_set = True
    +    except NameError:
    +      if not is_set and isinstance(value, str):
    +        shared_pref.SetString(key, value)
    +        is_set = True
    +      if not is_set and isinstance(value, int):
    +        shared_pref.SetLong(key, value)
    +        is_set = True
    +    if not is_set and isinstance(value, list):
    +      shared_pref.SetStringSet(key, value)
    +      is_set = True
    +    if not is_set:
    +      raise ValueError("Given invalid value type %s for key %s" % (
    +          str(type(value)), key))
    +  shared_pref.Commit()
    diff --git a/android/pylib/utils/simpleperf.py b/android/pylib/utils/simpleperf.py
    new file mode 100644
    index 000000000000..f096093c7f1e
    --- /dev/null
    +++ b/android/pylib/utils/simpleperf.py
    @@ -0,0 +1,293 @@
    +# Copyright 2018 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import contextlib
    +import logging
    +import os
    +import shutil
    +import subprocess
    +import sys
    +import tempfile
    +
    +from devil import devil_env
    +from devil.android import device_signal, device_errors
    +from devil.android.sdk import version_codes
    +from pylib import constants
    +
    +
    +def _ProcessType(proc):
    +  _, _, suffix = proc.name.partition(':')
    +  if not suffix:
    +    return 'browser'
    +  if suffix.startswith('sandboxed_process'):
    +    return 'renderer'
    +  if suffix.startswith('privileged_process'):
    +    return 'gpu'
    +  return None
    +
    +
    +def _GetSpecifiedPID(device, package_name, process_specifier):
    +  if process_specifier is None:
    +    return None
    +
    +  # Check for numeric PID
    +  try:
    +    pid = int(process_specifier)
    +    return pid
    +  except ValueError:
    +    pass
    +
    +  # Check for exact process name; can be any of these formats:
    +  #   :, i.e. 'org.chromium.chrome:sandboxed_process0'
    +  #   :, i.e. ':sandboxed_process0'
    +  #   , i.e. 'sandboxed_process0'
    +  full_process_name = process_specifier
    +  if process_specifier.startswith(':'):
    +    full_process_name = package_name + process_specifier
    +  elif ':' not in process_specifier:
    +    full_process_name = '%s:%s' % (package_name, process_specifier)
    +  matching_processes = device.ListProcesses(full_process_name)
    +  if len(matching_processes) == 1:
    +    return matching_processes[0].pid
    +  if len(matching_processes) > 1:
    +    raise RuntimeError('Found %d processes with name "%s".' % (
    +        len(matching_processes), process_specifier))
    +
    +  # Check for process type (i.e. 'renderer')
    +  package_processes = device.ListProcesses(package_name)
    +  matching_processes = [p for p in package_processes if (
    +      _ProcessType(p) == process_specifier)]
    +  if process_specifier == 'renderer' and len(matching_processes) > 1:
    +    raise RuntimeError('Found %d renderer processes; please re-run with only '
    +                       'one open tab.' % len(matching_processes))
    +  if len(matching_processes) != 1:
    +    raise RuntimeError('Found %d processes of type "%s".' % (
    +        len(matching_processes), process_specifier))
    +  return matching_processes[0].pid
    +
    +
    +def _ThreadsForProcess(device, pid):
    +  # The thread list output format for 'ps' is the same regardless of version.
    +  # Here's the column headers, and a sample line for a thread belonging to
    +  # pid 12345 (note that the last few columns are not aligned with headers):
    +  #
    +  # USER        PID   TID  PPID     VSZ    RSS WCHAN            ADDR S CMD
    +  # u0_i101   12345 24680   567 1357902  97531 futex_wait_queue_me e85acd9c S \
    +  #     CrRendererMain
    +  if device.build_version_sdk >= version_codes.OREO:
    +    pid_regex = (
    +        r'^[[:graph:]]\{1,\}[[:blank:]]\{1,\}%d[[:blank:]]\{1,\}' % pid)
    +    ps_cmd = "ps -T -e | grep '%s'" % pid_regex
    +    ps_output_lines = device.RunShellCommand(
    +        ps_cmd, shell=True, check_return=True)
    +  else:
    +    ps_cmd = ['ps', '-p', str(pid), '-t']
    +    ps_output_lines = device.RunShellCommand(ps_cmd, check_return=True)
    +  result = []
    +  for l in ps_output_lines:
    +    fields = l.split()
    +    # fields[2] is tid, fields[-1] is thread name. Output may include an entry
    +    # for the process itself with tid=pid; omit that one.
    +    if fields[2] == str(pid):
    +      continue
    +    result.append((int(fields[2]), fields[-1]))
    +  return result
    +
    +
    +def _ThreadType(thread_name):
    +  if not thread_name:
    +    return 'unknown'
    +  if (thread_name.startswith('Chrome_ChildIO') or
    +      thread_name.startswith('Chrome_IO')):
    +    return 'io'
    +  if thread_name.startswith('Compositor'):
    +    return 'compositor'
    +  if (thread_name.startswith('ChildProcessMai') or
    +      thread_name.startswith('CrGpuMain') or
    +      thread_name.startswith('CrRendererMain')):
    +    return 'main'
    +  if thread_name.startswith('RenderThread'):
    +    return 'render'
    +  raise ValueError('got no matching thread_name')
    +
    +
    +def _GetSpecifiedTID(device, pid, thread_specifier):
    +  if thread_specifier is None:
    +    return None
    +
    +  # Check for numeric TID
    +  try:
    +    tid = int(thread_specifier)
    +    return tid
    +  except ValueError:
    +    pass
    +
    +  # Check for thread type
    +  if pid is not None:
    +    matching_threads = [t for t in _ThreadsForProcess(device, pid) if (
    +        _ThreadType(t[1]) == thread_specifier)]
    +    if len(matching_threads) != 1:
    +      raise RuntimeError('Found %d threads of type "%s".' % (
    +          len(matching_threads), thread_specifier))
    +    return matching_threads[0][0]
    +
    +  return None
    +
    +
    +def PrepareDevice(device):
    +  if device.build_version_sdk < version_codes.NOUGAT:
    +    raise RuntimeError('Simpleperf profiling is only supported on Android N '
    +                       'and later.')
    +
    +  # Necessary for profiling
    +  # https://android-review.googlesource.com/c/platform/system/sepolicy/+/234400
    +  device.SetProp('security.perf_harden', '0')
    +
    +
    +def InstallSimpleperf(device, package_name):
    +  package_arch = device.GetPackageArchitecture(package_name) or 'armeabi-v7a'
    +  host_simpleperf_path = devil_env.config.LocalPath('simpleperf', package_arch)
    +  if not host_simpleperf_path:
    +    raise Exception('Could not get path to simpleperf executable on host.')
    +  device_simpleperf_path = '/'.join(
    +      ('/data/local/tmp/profilers', package_arch, 'simpleperf'))
    +  device.PushChangedFiles([(host_simpleperf_path, device_simpleperf_path)])
    +  return device_simpleperf_path
    +
    +
    +@contextlib.contextmanager
    +def RunSimpleperf(device, device_simpleperf_path, package_name,
    +                  process_specifier, thread_specifier, events,
    +                  profiler_args, host_out_path):
    +  pid = _GetSpecifiedPID(device, package_name, process_specifier)
    +  tid = _GetSpecifiedTID(device, pid, thread_specifier)
    +  if pid is None and tid is None:
    +    raise RuntimeError('Could not find specified process/thread running on '
    +                       'device. Make sure the apk is already running before '
    +                       'attempting to profile.')
    +  profiler_args = list(profiler_args)
    +  if profiler_args and profiler_args[0] == 'record':
    +    profiler_args.pop(0)
    +  profiler_args.extend(('-e', events))
    +  if '--call-graph' not in profiler_args and '-g' not in profiler_args:
    +    profiler_args.append('-g')
    +  if '-f' not in profiler_args:
    +    profiler_args.extend(('-f', '1000'))
    +
    +  device_out_path = '/data/local/tmp/perf.data'
    +  should_remove_device_out_path = True
    +  if '-o' in profiler_args:
    +    device_out_path = profiler_args[profiler_args.index('-o') + 1]
    +    should_remove_device_out_path = False
    +  else:
    +    profiler_args.extend(('-o', device_out_path))
    +
    +  # Remove the default output to avoid confusion if simpleperf opts not
    +  # to update the file.
    +  file_exists = True
    +  try:
    +      device.adb.Shell('readlink -e ' + device_out_path)
    +  except device_errors.AdbCommandFailedError:
    +    file_exists = False
    +  if file_exists:
    +    logging.warning('%s output file already exists on device', device_out_path)
    +    if not should_remove_device_out_path:
    +      raise RuntimeError('Specified output file \'{}\' already exists, not '
    +                         'continuing'.format(device_out_path))
    +  device.adb.Shell('rm -f ' + device_out_path)
    +
    +  if tid:
    +    profiler_args.extend(('-t', str(tid)))
    +  else:
    +    profiler_args.extend(('-p', str(pid)))
    +
    +  adb_shell_simpleperf_process = device.adb.StartShell(
    +      [device_simpleperf_path, 'record'] + profiler_args)
    +
    +  completed = False
    +  try:
    +    yield
    +    completed = True
    +
    +  finally:
    +    device.KillAll('simpleperf', signum=device_signal.SIGINT, blocking=True,
    +                   quiet=True)
    +    if completed:
    +      adb_shell_simpleperf_process.wait()
    +      ret = adb_shell_simpleperf_process.returncode
    +      if ret == 0:
    +        # Successfully gathered a profile
    +        device.PullFile(device_out_path, host_out_path)
    +      else:
    +        logging.warning(
    +            'simpleperf exited unusually, expected exit 0, got %d', ret
    +        )
    +        stdout, stderr = adb_shell_simpleperf_process.communicate()
    +        logging.info('stdout: \'%s\', stderr: \'%s\'', stdout, stderr)
    +        raise RuntimeError('simpleperf exited with unexpected code {} '
    +                           '(run with -vv for full stdout/stderr)'.format(ret))
    +
    +
    +def ConvertSimpleperfToPprof(simpleperf_out_path, build_directory,
    +                             pprof_out_path):
    +  # The simpleperf scripts require the unstripped libs to be installed in the
    +  # same directory structure as the libs on the device. Much of the logic here
    +  # is just figuring out and creating the necessary directory structure, and
    +  # symlinking the unstripped shared libs.
    +
    +  # Get the set of libs that we can symbolize
    +  unstripped_lib_dir = os.path.join(build_directory, 'lib.unstripped')
    +  unstripped_libs = set(
    +      f for f in os.listdir(unstripped_lib_dir) if f.endswith('.so'))
    +
    +  # report.py will show the directory structure above the shared libs;
    +  # that is the directory structure we need to recreate on the host.
    +  script_dir = devil_env.config.LocalPath('simpleperf_scripts')
    +  report_path = os.path.join(script_dir, 'report.py')
    +  report_cmd = [sys.executable, report_path, '-i', simpleperf_out_path]
    +  device_lib_path = None
    +  output = subprocess.check_output(report_cmd, stderr=subprocess.STDOUT)
    +  if isinstance(output, bytes):
    +    output = output.decode()
    +  for line in output.splitlines():
    +    fields = line.split()
    +    if len(fields) < 5:
    +      continue
    +    shlib_path = fields[4]
    +    shlib_dirname, shlib_basename = shlib_path.rpartition('/')[::2]
    +    if shlib_basename in unstripped_libs:
    +      device_lib_path = shlib_dirname
    +      break
    +  if not device_lib_path:
    +    raise RuntimeError('No chrome-related symbols in profiling data in %s. '
    +                       'Either the process was idle for the entire profiling '
    +                       'period, or something went very wrong (and you should '
    +                       'file a bug at crbug.com/new with component '
    +                       'Speed>Tracing, and assign it to szager@chromium.org).'
    +                       % simpleperf_out_path)
    +
    +  # Recreate the directory structure locally, and symlink unstripped libs.
    +  processing_dir = tempfile.mkdtemp()
    +  try:
    +    processing_lib_dir = os.path.join(
    +        processing_dir, 'binary_cache', device_lib_path.lstrip('/'))
    +    os.makedirs(processing_lib_dir)
    +    for lib in unstripped_libs:
    +      unstripped_lib_path = os.path.join(unstripped_lib_dir, lib)
    +      processing_lib_path = os.path.join(processing_lib_dir, lib)
    +      os.symlink(unstripped_lib_path, processing_lib_path)
    +
    +    # Run the script to annotate symbols and convert from simpleperf format to
    +    # pprof format.
    +    pprof_converter_script = os.path.join(
    +        script_dir, 'pprof_proto_generator.py')
    +    pprof_converter_cmd = [
    +        sys.executable, pprof_converter_script, '-i', simpleperf_out_path, '-o',
    +        os.path.abspath(pprof_out_path), '--ndk_path',
    +        constants.ANDROID_NDK_ROOT
    +    ]
    +    subprocess.check_output(pprof_converter_cmd, stderr=subprocess.STDOUT,
    +                            cwd=processing_dir)
    +  finally:
    +    shutil.rmtree(processing_dir, ignore_errors=True)
    diff --git a/android/pylib/utils/test_filter.py b/android/pylib/utils/test_filter.py
    new file mode 100644
    index 000000000000..c532f324e175
    --- /dev/null
    +++ b/android/pylib/utils/test_filter.py
    @@ -0,0 +1,145 @@
    +# Copyright 2018 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import os
    +import re
    +
    +
    +_CMDLINE_NAME_SEGMENT_RE = re.compile(
    +    r' with(?:out)? \{[^\}]*\}')
    +
    +
    +def ParseFilterFile(input_lines):
    +  """Converts test filter file contents to positive and negative pattern lists.
    +
    +  See //testing/buildbot/filters/README.md for description of the
    +  syntax that |input_lines| are expected to follow.
    +
    +  See
    +  https://github.com/google/googletest/blob/main/docs/advanced.md#running-a-subset-of-the-tests
    +  for description of the syntax that --gtest_filter argument should follow.
    +
    +  Args:
    +    input_lines: An iterable (e.g. a list or a file) containing input lines.
    +  Returns:
    +    tuple containing the lists of positive patterns and negative patterns
    +  """
    +  # Strip comments and whitespace from each line and filter non-empty lines.
    +  stripped_lines = (l.split('#', 1)[0].strip() for l in input_lines)
    +  filter_lines = [l for l in stripped_lines if l]
    +
    +  # Split the tests into positive and negative patterns (gtest treats
    +  # every pattern after the first '-' sign as an exclusion).
    +  positive_patterns = [l for l in filter_lines if l[0] != '-']
    +  negative_patterns = [l[1:] for l in filter_lines if l[0] == '-']
    +  return positive_patterns, negative_patterns
    +
    +
    +def AddFilterOptions(parser):
    +  """Adds filter command-line options to the provided parser.
    +
    +  Args:
    +    parser: an argparse.ArgumentParser instance.
    +  """
    +  parser.add_argument(
    +      # Deprecated argument.
    +      '--gtest-filter-file',
    +      # New argument.
    +      '--test-launcher-filter-file',
    +      action='append',
    +      dest='test_filter_files',
    +      help='Path to file that contains googletest-style filter strings. '
    +      'See also //testing/buildbot/filters/README.md.')
    +
    +  filter_group = parser.add_mutually_exclusive_group()
    +  filter_group.add_argument('-f',
    +                            '--test-filter',
    +                            '--gtest_filter',
    +                            '--gtest-filter',
    +                            dest='test_filters',
    +                            action='append',
    +                            help='googletest-style filter string.',
    +                            default=os.environ.get('GTEST_FILTER'))
    +  filter_group.add_argument(
    +      '--isolated-script-test-filter',
    +      action='append',
    +      dest='isolated_script_test_filters',
    +      help='isolated script filter string. '
    +      'Like gtest filter strings, but with :: separators instead of :')
    +
    +
    +def AppendPatternsToFilter(test_filter, positive_patterns=None,
    +                           negative_patterns=None):
    +  """Returns a test-filter string with additional patterns.
    +
    +  Args:
    +    test_filter: test filter string
    +    positive_patterns: list of positive patterns to add to string
    +    negative_patterns: list of negative patterns to add to string
    +  """
    +  positives = []
    +  negatives = []
    +  positive = ''
    +  negative = ''
    +
    +  split_filter = test_filter.split('-', 1)
    +  if len(split_filter) == 1:
    +    positive = split_filter[0]
    +  else:
    +    positive, negative = split_filter
    +
    +  positives += [f for f in positive.split(':') if f]
    +  negatives += [f for f in negative.split(':') if f]
    +
    +  positives += positive_patterns if positive_patterns else []
    +  negatives += negative_patterns if negative_patterns else []
    +
    +  final_filter = ':'.join([p.replace('#', '.') for p in positives])
    +  if negatives:
    +    final_filter += '-' + ':'.join([n.replace('#', '.') for n in negatives])
    +  return final_filter
    +
    +
    +def HasPositivePatterns(test_filter):
    +  """Returns True if test_filter contains a positive pattern, else False
    +
    +  Args:
    +    test_filter: test-filter style string
    +  """
    +  return bool(len(test_filter) > 0 and test_filter[0] != '-')
    +
    +
    +def InitializeFiltersFromArgs(args):
    +  """Returns a filter string from the command-line option values.
    +
    +  Args:
    +    args: an argparse.Namespace instance resulting from a using parser
    +      to which the filter options above were added.
    +  """
    +  test_filters = []
    +  if args.isolated_script_test_filters:
    +    args.test_filters = [
    +        isolated_script_test_filter.replace('::', ':')
    +        for isolated_script_test_filter in args.isolated_script_test_filters
    +    ]
    +  if args.test_filters:
    +    for filt in args.test_filters:
    +      test_filters.append(
    +          _CMDLINE_NAME_SEGMENT_RE.sub('', filt.replace('#', '.')))
    +
    +  if not args.test_filter_files:
    +    return test_filters
    +
    +  # At this point it's potentially several files, in a list and ; separated
    +  for test_filter_files in args.test_filter_files:
    +    # At this point it's potentially several files, ; separated
    +    for test_filter_file in test_filter_files.split(';'):
    +      # At this point it's individual files
    +      with open(test_filter_file, 'r') as f:
    +        positive_patterns, negative_patterns = ParseFilterFile(f)
    +        filter_string = AppendPatternsToFilter('', positive_patterns,
    +                                               negative_patterns)
    +        test_filters.append(filter_string)
    +
    +  return test_filters
    diff --git a/android/pylib/utils/test_filter_test.py b/android/pylib/utils/test_filter_test.py
    new file mode 100755
    index 000000000000..fa0718257b06
    --- /dev/null
    +++ b/android/pylib/utils/test_filter_test.py
    @@ -0,0 +1,256 @@
    +#!/usr/bin/env vpython3
    +# Copyright 2018 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import argparse
    +import os
    +import sys
    +import tempfile
    +import unittest
    +
    +from pylib.utils import test_filter
    +
    +class ParseFilterFileTest(unittest.TestCase):
    +
    +  def testParseFilterFile_commentsAndBlankLines(self):
    +    input_lines = [
    +      'positive1',
    +      '# comment',
    +      'positive2  # Another comment',
    +      ''
    +      'positive3'
    +    ]
    +    actual = test_filter.ParseFilterFile(input_lines)
    +    expected = ['positive1', 'positive2', 'positive3'], []
    +    self.assertEqual(expected, actual)
    +
    +  def testParseFilterFile_onlyPositive(self):
    +    input_lines = [
    +      'positive1',
    +      'positive2'
    +    ]
    +    actual = test_filter.ParseFilterFile(input_lines)
    +    expected = ['positive1', 'positive2'], []
    +    self.assertEqual(expected, actual)
    +
    +  def testParseFilterFile_onlyNegative(self):
    +    input_lines = [
    +      '-negative1',
    +      '-negative2'
    +    ]
    +    actual = test_filter.ParseFilterFile(input_lines)
    +    expected = [], ['negative1', 'negative2']
    +    self.assertEqual(expected, actual)
    +
    +  def testParseFilterFile_positiveAndNegative(self):
    +    input_lines = [
    +      'positive1',
    +      'positive2',
    +      '-negative1',
    +      '-negative2'
    +    ]
    +    actual = test_filter.ParseFilterFile(input_lines)
    +    expected = ['positive1', 'positive2'], ['negative1', 'negative2']
    +    self.assertEqual(expected, actual)
    +
    +
    +class InitializeFilterFromArgsTest(unittest.TestCase):
    +
    +  def testInitializeBasicFilter(self):
    +    parser = argparse.ArgumentParser()
    +    test_filter.AddFilterOptions(parser)
    +    args = parser.parse_args([
    +        '--test-filter',
    +        'FooTest.testFoo:BarTest.testBar'])
    +    expected = ['FooTest.testFoo:BarTest.testBar']
    +    actual = test_filter.InitializeFiltersFromArgs(args)
    +    self.assertEqual(actual, expected)
    +
    +  def testInitializeJavaStyleFilter(self):
    +    parser = argparse.ArgumentParser()
    +    test_filter.AddFilterOptions(parser)
    +    args = parser.parse_args([
    +        '--test-filter',
    +        'FooTest#testFoo:BarTest#testBar'])
    +    expected = ['FooTest.testFoo:BarTest.testBar']
    +    actual = test_filter.InitializeFiltersFromArgs(args)
    +    self.assertEqual(actual, expected)
    +
    +  def testInitializeBasicIsolatedScript(self):
    +    parser = argparse.ArgumentParser()
    +    test_filter.AddFilterOptions(parser)
    +    args = parser.parse_args([
    +        '--isolated-script-test-filter',
    +        'FooTest.testFoo::BarTest.testBar'])
    +    expected = ['FooTest.testFoo:BarTest.testBar']
    +    actual = test_filter.InitializeFiltersFromArgs(args)
    +    self.assertEqual(actual, expected)
    +
    +  @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name "
    +                   "doesn't work in Windows.")
    +  def testFilterArgWithPositiveFilterInFilterFile(self):
    +    parser = argparse.ArgumentParser()
    +    test_filter.AddFilterOptions(parser)
    +    with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
    +      tmp_file.write('positive1\npositive2\n-negative2\n-negative3\n')
    +      tmp_file.seek(0)
    +      args = parser.parse_args([
    +          '--test-filter=-negative1',
    +          '--test-launcher-filter-file',
    +          tmp_file.name])
    +      expected = ['-negative1', 'positive1:positive2-negative2:negative3']
    +      actual = test_filter.InitializeFiltersFromArgs(args)
    +      self.assertEqual(actual, expected)
    +
    +  @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name "
    +                   "doesn't work in Windows.")
    +  def testFilterFileWithPositiveFilterInFilterArg(self):
    +    parser = argparse.ArgumentParser()
    +    test_filter.AddFilterOptions(parser)
    +    with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
    +      tmp_file.write('-negative2\n-negative3\n')
    +      tmp_file.seek(0)
    +      args = parser.parse_args([
    +          '--test-filter',
    +          'positive1:positive2-negative1',
    +          '--test-launcher-filter-file',
    +          tmp_file.name])
    +      expected = ['positive1:positive2-negative1', '-negative2:negative3']
    +      actual = test_filter.InitializeFiltersFromArgs(args)
    +      self.assertEqual(actual, expected)
    +
    +  @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name "
    +                   "doesn't work in Windows.")
    +  def testPositiveFilterInBothFileAndArg(self):
    +    parser = argparse.ArgumentParser()
    +    test_filter.AddFilterOptions(parser)
    +    with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
    +      tmp_file.write('positive2-negative2\n')
    +      tmp_file.seek(0)
    +      args = parser.parse_args([
    +          '--test-filter', 'positive1-negative1', '--test-launcher-filter-file',
    +          tmp_file.name
    +      ])
    +      expected = ['positive1-negative1', 'positive2-negative2']
    +      actual = test_filter.InitializeFiltersFromArgs(args)
    +      self.assertEqual(actual, expected)
    +
    +  @unittest.skipIf(os.name == "nt", "Opening NamedTemporaryFile by name "
    +                   "doesn't work in Windows.")
    +  def testFilterArgWithFilterFileAllNegative(self):
    +    parser = argparse.ArgumentParser()
    +    test_filter.AddFilterOptions(parser)
    +    with tempfile.NamedTemporaryFile(mode='w') as tmp_file:
    +      tmp_file.write('-negative3\n-negative4\n')
    +      tmp_file.seek(0)
    +      args = parser.parse_args([
    +          '--test-filter=-negative1:negative2',
    +          '--test-launcher-filter-file',
    +          tmp_file.name])
    +      expected = ['-negative1:negative2', '-negative3:negative4']
    +      actual = test_filter.InitializeFiltersFromArgs(args)
    +      self.assertEqual(actual, expected)
    +
    +
    +class AppendPatternsToFilter(unittest.TestCase):
    +  def testAllEmpty(self):
    +    expected = ''
    +    actual = test_filter.AppendPatternsToFilter('', [], [])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendOnlyPositiveToEmptyFilter(self):
    +    expected = 'positive'
    +    actual = test_filter.AppendPatternsToFilter('', ['positive'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendOnlyNegativeToEmptyFilter(self):
    +    expected = '-negative'
    +    actual = test_filter.AppendPatternsToFilter('',
    +                                                negative_patterns=['negative'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendToEmptyFilter(self):
    +    expected = 'positive-negative'
    +    actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendToPositiveOnlyFilter(self):
    +    expected = 'positive1:positive2-negative'
    +    actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'],
    +                                                ['negative'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendToNegativeOnlyFilter(self):
    +    expected = 'positive-negative1:negative2'
    +    actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'],
    +                                                ['negative2'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendPositiveToFilter(self):
    +    expected = 'positive1:positive2-negative1'
    +    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
    +                                                ['positive2'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendNegativeToFilter(self):
    +    expected = 'positive1-negative1:negative2'
    +    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
    +                                                negative_patterns=['negative2'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendBothToFilter(self):
    +    expected = 'positive1:positive2-negative1:negative2'
    +    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
    +                                                positive_patterns=['positive2'],
    +                                                negative_patterns=['negative2'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendMultipleToFilter(self):
    +    expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
    +    actual = test_filter.AppendPatternsToFilter('positive1-negative1',
    +                                                ['positive2', 'positive3'],
    +                                                ['negative2', 'negative3'])
    +    self.assertEqual(actual, expected)
    +
    +  def testRepeatedAppendToFilter(self):
    +    expected = 'positive1:positive2:positive3-negative1:negative2:negative3'
    +    filter_string = test_filter.AppendPatternsToFilter('positive1-negative1',
    +                                                       ['positive2'],
    +                                                       ['negative2'])
    +    actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'],
    +                                                ['negative3'])
    +    self.assertEqual(actual, expected)
    +
    +  def testAppendHashSeparatedPatternsToFilter(self):
    +    expected = 'positive.test1:positive.test2-negative.test1:negative.test2'
    +    actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1',
    +                                                       ['positive#test2'],
    +                                                       ['negative#test2'])
    +    self.assertEqual(actual, expected)
    +
    +
    +class HasPositivePatterns(unittest.TestCase):
    +  def testEmpty(self):
    +    expected = False
    +    actual = test_filter.HasPositivePatterns('')
    +    self.assertEqual(actual, expected)
    +
    +  def testHasOnlyPositive(self):
    +    expected = True
    +    actual = test_filter.HasPositivePatterns('positive')
    +    self.assertEqual(actual, expected)
    +
    +  def testHasOnlyNegative(self):
    +    expected = False
    +    actual = test_filter.HasPositivePatterns('-negative')
    +    self.assertEqual(actual, expected)
    +
    +  def testHasBoth(self):
    +    expected = True
    +    actual = test_filter.HasPositivePatterns('positive-negative')
    +    self.assertEqual(actual, expected)
    +
    +
    +if __name__ == '__main__':
    +  sys.exit(unittest.main())
    diff --git a/android/pylib/utils/time_profile.py b/android/pylib/utils/time_profile.py
    new file mode 100644
    index 000000000000..54b96c290a1d
    --- /dev/null
    +++ b/android/pylib/utils/time_profile.py
    @@ -0,0 +1,45 @@
    +# Copyright 2013 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import logging
    +import time
    +
    +
    +class TimeProfile:
    +  """Class for simple profiling of action, with logging of cost."""
    +
    +  def __init__(self, description='operation'):
    +    self._starttime = None
    +    self._endtime = None
    +    self._description = description
    +    self.Start()
    +
    +  def Start(self):
    +    self._starttime = time.time()
    +    self._endtime = None
    +
    +  def GetDelta(self):
    +    """Returns the rounded delta.
    +
    +    Also stops the timer if Stop() has not already been called.
    +    """
    +    if self._endtime is None:
    +      self.Stop(log=False)
    +    delta = self._endtime - self._starttime
    +    delta = round(delta, 2) if delta < 10 else round(delta, 1)
    +    return delta
    +
    +  def LogResult(self):
    +    """Logs the result."""
    +    logging.info('%s seconds to perform %s', self.GetDelta(), self._description)
    +
    +  def Stop(self, log=True):
    +    """Stop profiling.
    +
    +    Args:
    +      log: Log the delta (defaults to true).
    +    """
    +    self._endtime = time.time()
    +    if log:
    +      self.LogResult()
    diff --git a/android/pylib/utils/xvfb.py b/android/pylib/utils/xvfb.py
    new file mode 100644
    index 000000000000..6ab24afc75d1
    --- /dev/null
    +++ b/android/pylib/utils/xvfb.py
    @@ -0,0 +1,58 @@
    +# Copyright 2013 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +# pylint: disable=W0702
    +
    +import os
    +import signal
    +import subprocess
    +import sys
    +import time
    +
    +
    +def _IsLinux():
    +  """Return True if on Linux; else False."""
    +  return sys.platform.startswith('linux')
    +
    +
    +class Xvfb:
    +  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
    +
    +  def __init__(self):
    +    self._pid = 0
    +
    +  def Start(self):
    +    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
    +
    +    Copied from tools/code_coverage/coverage_posix.py
    +    """
    +    if not _IsLinux():
    +      return
    +    proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
    +                             '-ac'],
    +                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    +    self._pid = proc.pid
    +    if not self._pid:
    +      raise Exception('Could not start Xvfb')
    +    os.environ['DISPLAY'] = ':9'
    +
    +    # Now confirm, giving a chance for it to start if needed.
    +    for _ in range(10):
    +      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
    +      _, retcode = os.waitpid(proc.pid, 0)
    +      if retcode == 0:
    +        break
    +      time.sleep(0.25)
    +    if retcode != 0:
    +      raise Exception('Could not confirm Xvfb happiness')
    +
    +  def Stop(self):
    +    """Stop Xvfb if needed.  Linux only."""
    +    if self._pid:
    +      try:
    +        os.kill(self._pid, signal.SIGKILL)
    +      except:
    +        pass
    +      del os.environ['DISPLAY']
    +      self._pid = 0
    diff --git a/android/pylib/valgrind_tools.py b/android/pylib/valgrind_tools.py
    new file mode 100644
    index 000000000000..8c00705b72f3
    --- /dev/null
    +++ b/android/pylib/valgrind_tools.py
    @@ -0,0 +1,116 @@
    +# Copyright 2012 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +# pylint: disable=R0201
    +
    +
    +
    +
    +import logging
    +import sys
    +
    +from devil.android import device_errors
    +from devil.android.valgrind_tools import base_tool
    +
    +
    +def SetChromeTimeoutScale(device, scale):
    +  """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
    +  path = '/data/local/tmp/chrome_timeout_scale'
    +  if not scale or scale == 1.0:
    +    # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
    +    device.RemovePath(path, force=True, as_root=True)
    +  else:
    +    device.WriteFile(path, '%f' % scale, as_root=True)
    +
    +
    +
    +class AddressSanitizerTool(base_tool.BaseTool):
    +  """AddressSanitizer tool."""
    +
    +  WRAPPER_NAME = '/system/bin/asanwrapper'
    +  # Disable memcmp overlap check.There are blobs (gl drivers)
    +  # on some android devices that use memcmp on overlapping regions,
    +  # nothing we can do about that.
    +  EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
    +
    +  def __init__(self, device):
    +    super().__init__()
    +    self._device = device
    +
    +  @classmethod
    +  def CopyFiles(cls, device):
    +    """Copies ASan tools to the device."""
    +    del device
    +
    +  def GetTestWrapper(self):
    +    return AddressSanitizerTool.WRAPPER_NAME
    +
    +  def GetUtilWrapper(self):
    +    """Returns the wrapper for utilities, such as forwarder.
    +
    +    AddressSanitizer wrapper must be added to all instrumented binaries,
    +    including forwarder and the like. This can be removed if such binaries
    +    were built without instrumentation. """
    +    return self.GetTestWrapper()
    +
    +  def SetupEnvironment(self):
    +    try:
    +      self._device.EnableRoot()
    +    except device_errors.CommandFailedError as e:
    +      # Try to set the timeout scale anyway.
    +      # TODO(jbudorick) Handle this exception appropriately after interface
    +      #                 conversions are finished.
    +      logging.error(str(e))
    +    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
    +
    +  def CleanUpEnvironment(self):
    +    SetChromeTimeoutScale(self._device, None)
    +
    +  def GetTimeoutScale(self):
    +    # Very slow startup.
    +    return 20.0
    +
    +
    +TOOL_REGISTRY = {
    +    'asan': AddressSanitizerTool,
    +}
    +
    +
    +def CreateTool(tool_name, device):
    +  """Creates a tool with the specified tool name.
    +
    +  Args:
    +    tool_name: Name of the tool to create.
    +    device: A DeviceUtils instance.
    +  Returns:
    +    A tool for the specified tool_name.
    +  """
    +  if not tool_name:
    +    return base_tool.BaseTool()
    +
    +  ctor = TOOL_REGISTRY.get(tool_name)
    +  if ctor:
    +    return ctor(device)
    +  print('Unknown tool %s, available tools: %s' %
    +        (tool_name, ', '.join(sorted(TOOL_REGISTRY.keys()))))
    +  sys.exit(1)
    +
    +
    +def PushFilesForTool(tool_name, device):
    +  """Pushes the files required for |tool_name| to |device|.
    +
    +  Args:
    +    tool_name: Name of the tool to create.
    +    device: A DeviceUtils instance.
    +  """
    +  if not tool_name:
    +    return
    +
    +  clazz = TOOL_REGISTRY.get(tool_name)
    +  if clazz:
    +    clazz.CopyFiles(device)
    +  else:
    +    print('Unknown tool %s, available tools: %s' % (tool_name, ', '.join(
    +        sorted(TOOL_REGISTRY.keys()))))
    +    sys.exit(1)
    diff --git a/android/pylintrc b/android/pylintrc
    new file mode 100644
    index 000000000000..2a721bf2709d
    --- /dev/null
    +++ b/android/pylintrc
    @@ -0,0 +1,15 @@
    +[FORMAT]
    +
    +max-line-length=80
    +
    +[MESSAGES CONTROL]
    +
    +disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position
    +
    +[REPORTS]
    +
    +reports=no
    +
    +[VARIABLES]
    +
    +dummy-variables-rgx=^_.*$|dummy
    diff --git a/android/resource_sizes.gni b/android/resource_sizes.gni
    new file mode 100644
    index 000000000000..c599bbbf71f1
    --- /dev/null
    +++ b/android/resource_sizes.gni
    @@ -0,0 +1,94 @@
    +# Copyright 2019 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import("//build/config/android/internal_rules.gni")
    +
    +# Generates a script in the bin directory that runs
    +# //build/android/resource_sizes.py against the provided apk.
    +#
    +# Only one of apk_name or file_path should be provided.
    +#
    +# Variables:
    +#   apk_name: The name of the apk, without the extension.
    +#   file_path: The path to the apk or .minimal.apks.
    +#   trichrome_chrome_path: The path to chrome apk or .minimal.apks.
    +#   trichrome_webview_path: The path to webview apk or .minimal.apks.
    +#   trichrome_library_path: The path to library apk or .minimal.apks.
    +template("android_resource_sizes_test") {
    +  generate_android_wrapper(target_name) {
    +    forward_variables_from(invoker, [ "data_deps" ])
    +    executable = "//build/android/resource_sizes.py"
    +    wrapper_script = "$root_out_dir/bin/run_${target_name}"
    +
    +    assert(defined(invoker.apk_name) != defined(invoker.file_path),
    +           "Exactly one of apk_name or file_path should be provided.")
    +
    +    deps = [ "//build/android:resource_sizes_py" ]
    +    executable_args = [
    +      "--output-format",
    +      "histograms",
    +      "--chromium-output-directory",
    +      "@WrappedPath(.)",
    +    ]
    +
    +    data = []
    +    if (defined(invoker.trichrome_chrome_path)) {
    +      data += [
    +        invoker.trichrome_chrome_path,
    +        invoker.trichrome_webview_path,
    +        invoker.trichrome_library_path,
    +      ]
    +      _rebased_chrome =
    +          rebase_path(invoker.trichrome_chrome_path, root_build_dir)
    +      _rebased_webview =
    +          rebase_path(invoker.trichrome_webview_path, root_build_dir)
    +      _rebased_library =
    +          rebase_path(invoker.trichrome_library_path, root_build_dir)
    +
    +      # apk_name used only as test suite name. Not a path in this case.
    +      executable_args += [
    +        "--trichrome-chrome",
    +        "@WrappedPath(${_rebased_chrome})",
    +        "--trichrome-webview",
    +        "@WrappedPath(${_rebased_webview})",
    +        "--trichrome-library",
    +        "@WrappedPath(${_rebased_library})",
    +        "${invoker.apk_name}",
    +      ]
    +    } else {
    +      if (defined(invoker.apk_name)) {
    +        _file_path = "$root_out_dir/apks/${invoker.apk_name}.apk"
    +        data += [ "$root_out_dir/arsc/apks/${invoker.apk_name}.ap_" ]
    +      } else if (defined(invoker.file_path)) {
    +        _file_path = invoker.file_path
    +      }
    +      data += [ _file_path ]
    +      _rebased_file_path = rebase_path(_file_path, root_build_dir)
    +      executable_args += [ "@WrappedPath(${_rebased_file_path})" ]
    +    }
    +  }
    +}
    +
    +# Generates a "size config JSON file" to specify data to be passed from recipes
    +# to Python scripts for binary size measurement on bots. All filenames are
    +# relative to $root_build_dir. The resulting JSON file is written to
    +# "$root_build_dir/config/${invoker.name}_size_config.json".
    +#
    +# Refer to tools/binary_size/generate_commit_size_analysis.py for JSON schema.
    +#
    +template("android_size_bot_config") {
    +  _full_target_name = get_label_info(target_name, "label_no_toolchain")
    +  _out_json = {
    +    _HEADER = "Written by build target '${_full_target_name}'"
    +    forward_variables_from(invoker,
    +                           [
    +                             "archive_files",
    +                             "mapping_files",
    +                             "to_resource_sizes_py",
    +                             "supersize_input_file",
    +                           ])
    +  }
    +  _output_json_path = "$root_build_dir/config/${invoker.name}_size_config.json"
    +  write_file(_output_json_path, _out_json, "json")
    +}
    diff --git a/android/resource_sizes.py b/android/resource_sizes.py
    new file mode 100755
    index 000000000000..05ee86ce265b
    --- /dev/null
    +++ b/android/resource_sizes.py
    @@ -0,0 +1,929 @@
    +#!/usr/bin/env vpython3
    +# Copyright 2011 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Reports binary size metrics for an APK.
    +
    +More information at //docs/speed/binary_size/metrics.md.
    +"""
    +
    +
    +import argparse
    +import collections
    +from contextlib import contextmanager
    +import json
    +import logging
    +import os
    +import posixpath
    +import re
    +import struct
    +import sys
    +import tempfile
    +import zipfile
    +import zlib
    +
    +import devil_chromium
    +from devil.android.sdk import build_tools
    +from devil.utils import cmd_helper
    +from devil.utils import lazy
    +import method_count
    +from pylib import constants
    +from pylib.constants import host_paths
    +
    +_AAPT_PATH = lazy.WeakConstant(lambda: build_tools.GetPath('aapt'))
    +_ANDROID_UTILS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build',
    +                                   'android', 'gyp')
    +_BUILD_UTILS_PATH = os.path.join(host_paths.DIR_SOURCE_ROOT, 'build', 'util')
    +_READOBJ_PATH = os.path.join(constants.ANDROID_NDK_ROOT, 'toolchains', 'llvm',
    +                             'prebuilt', 'linux-x86_64', 'bin', 'llvm-readobj')
    +
    +with host_paths.SysPath(host_paths.BUILD_COMMON_PATH):
    +  import perf_tests_results_helper  # pylint: disable=import-error
    +
    +with host_paths.SysPath(host_paths.TRACING_PATH):
    +  from tracing.value import convert_chart_json  # pylint: disable=import-error
    +
    +with host_paths.SysPath(_ANDROID_UTILS_PATH, 0):
    +  from util import build_utils  # pylint: disable=import-error
    +
    +with host_paths.SysPath(_BUILD_UTILS_PATH, 0):
    +  from lib.results import result_sink  # pylint: disable=import-error
    +  from lib.results import result_types  # pylint: disable=import-error
    +
    +# Captures an entire config from aapt output.
    +_AAPT_CONFIG_PATTERN = r'config %s:(.*?)config [a-zA-Z-]+:'
    +# Matches string resource entries from aapt output.
    +_AAPT_ENTRY_RE = re.compile(
    +    r'resource (?P\w{10}) [\w\.]+:string/.*?"(?P.+?)"', re.DOTALL)
    +_BASE_CHART = {
    +    'format_version': '0.1',
    +    'benchmark_name': 'resource_sizes',
    +    'benchmark_description': 'APK resource size information.',
    +    'trace_rerun_options': [],
    +    'charts': {}
    +}
    +# Macro definitions look like (something, 123) when
    +# enable_resource_allowlist_generation=true.
    +_RC_HEADER_RE = re.compile(r'^#define (?P\w+).* (?P\d+)\)?$')
    +_RE_NON_LANGUAGE_PAK = re.compile(r'^assets/.*(resources|percent)\.pak$')
    +_READELF_SIZES_METRICS = {
    +    'text': ['.text'],
    +    'data': ['.data', '.rodata', '.data.rel.ro', '.data.rel.ro.local'],
    +    'relocations': ['.rel.dyn', '.rel.plt', '.rela.dyn', '.rela.plt'],
    +    'unwind': [
    +        '.ARM.extab', '.ARM.exidx', '.eh_frame', '.eh_frame_hdr',
    +        '.ARM.exidxsentinel_section_after_text'
    +    ],
    +    'symbols': [
    +        '.dynsym', '.dynstr', '.dynamic', '.shstrtab', '.got', '.plt',
    +        '.got.plt', '.hash', '.gnu.hash'
    +    ],
    +    'other': [
    +        '.init_array', '.preinit_array', '.ctors', '.fini_array', '.comment',
    +        '.note.gnu.gold-version', '.note.crashpad.info', '.note.android.ident',
    +        '.ARM.attributes', '.note.gnu.build-id', '.gnu.version',
    +        '.gnu.version_d', '.gnu.version_r', '.interp', '.gcc_except_table'
    +    ]
    +}
    +
    +
    +class _AccumulatingReporter:
    +  def __init__(self):
    +    self._combined_metrics = collections.defaultdict(int)
    +
    +  def __call__(self, graph_title, trace_title, value, units):
    +    self._combined_metrics[(graph_title, trace_title, units)] += value
    +
    +  def DumpReports(self, report_func):
    +    for (graph_title, trace_title,
    +         units), value in sorted(self._combined_metrics.items()):
    +      report_func(graph_title, trace_title, value, units)
    +
    +
    +class _ChartJsonReporter(_AccumulatingReporter):
    +  def __init__(self, chartjson):
    +    super().__init__()
    +    self._chartjson = chartjson
    +    self.trace_title_prefix = ''
    +
    +  def __call__(self, graph_title, trace_title, value, units):
    +    super().__call__(graph_title, trace_title, value, units)
    +
    +    perf_tests_results_helper.ReportPerfResult(
    +        self._chartjson, graph_title, self.trace_title_prefix + trace_title,
    +        value, units)
    +
    +  def SynthesizeTotals(self, unique_method_count):
    +    for tup, value in sorted(self._combined_metrics.items()):
    +      graph_title, trace_title, units = tup
    +      if trace_title == 'unique methods':
    +        value = unique_method_count
    +      perf_tests_results_helper.ReportPerfResult(self._chartjson, graph_title,
    +                                                 'Combined_' + trace_title,
    +                                                 value, units)
    +
    +
    +def _PercentageDifference(a, b):
    +  if a == 0:
    +    return 0
    +  return float(b - a) / a
    +
    +
    +def _ReadZipInfoExtraFieldLength(zip_file, zip_info):
    +  """Reads the value of |extraLength| from |zip_info|'s local file header.
    +
    +  |zip_info| has an |extra| field, but it's read from the central directory.
    +  Android's zipalign tool sets the extra field only in local file headers.
    +  """
    +  # Refer to https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers
    +  zip_file.fp.seek(zip_info.header_offset + 28)
    +  return struct.unpack(', or .
    +  on_demand = parse_attr('dist', 'onDemand') or 'on-demand' in output
    +  on_demand = bool(on_demand and is_feature_split)
    +
    +  return sdk_version, skip_extract_lib, on_demand
    +
    +
    +def _NormalizeLanguagePaks(translations, factor):
    +  english_pak = translations.FindByPattern(r'.*/en[-_][Uu][Ss]\.l?pak')
    +  num_translations = translations.GetNumEntries()
    +  ret = 0
    +  if english_pak:
    +    ret -= translations.ComputeZippedSize()
    +    ret += int(english_pak.compress_size * num_translations * factor)
    +  return ret
    +
    +
    +def _NormalizeResourcesArsc(apk_path, num_arsc_files, num_translations,
    +                            out_dir):
    +  """Estimates the expected overhead of untranslated strings in resources.arsc.
    +
    +  See http://crbug.com/677966 for why this is necessary.
    +  """
    +  # If there are multiple .arsc files, use the resource packaged APK instead.
    +  if num_arsc_files > 1:
    +    if not out_dir:
    +      return -float('inf')
    +    ap_name = os.path.basename(apk_path).replace('.apk', '.ap_')
    +    ap_path = os.path.join(out_dir, 'arsc/apks', ap_name)
    +    if not os.path.exists(ap_path):
    +      raise Exception('Missing expected file: %s, try rebuilding.' % ap_path)
    +    apk_path = ap_path
    +
    +  aapt_output = _RunAaptDumpResources(apk_path)
    +  # en-rUS is in the default config and may be cluttered with non-translatable
    +  # strings, so en-rGB is a better baseline for finding missing translations.
    +  en_strings = _CreateResourceIdValueMap(aapt_output, 'en-rGB')
    +  fr_strings = _CreateResourceIdValueMap(aapt_output, 'fr')
    +
    +  # en-US and en-GB will never be translated.
    +  config_count = num_translations - 2
    +
    +  size = 0
    +  for res_id, string_val in en_strings.items():
    +    if string_val == fr_strings[res_id]:
    +      string_size = len(string_val)
    +      # 7 bytes is the per-entry overhead (not specific to any string). See
    +      # https://android.googlesource.com/platform/frameworks/base.git/+/android-4.2.2_r1/tools/aapt/StringPool.cpp#414.
    +      # The 1.5 factor was determined experimentally and is meant to account for
    +      # other languages generally having longer strings than english.
    +      size += config_count * (7 + string_size * 1.5)
    +
    +  return int(size)
    +
    +
    +def _CreateResourceIdValueMap(aapt_output, lang):
    +  """Return a map of resource ids to string values for the given |lang|."""
    +  config_re = _AAPT_CONFIG_PATTERN % lang
    +  return {entry.group('id'): entry.group('val')
    +          for config_section in re.finditer(config_re, aapt_output, re.DOTALL)
    +          for entry in re.finditer(_AAPT_ENTRY_RE, config_section.group(0))}
    +
    +
    +def _RunAaptDumpResources(apk_path):
    +  cmd = [_AAPT_PATH.read(), 'dump', '--values', 'resources', apk_path]
    +  status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
    +  if status != 0:
    +    raise Exception('Failed running aapt command: "%s" with output "%s".' %
    +                    (' '.join(cmd), output))
    +  return output
    +
    +
    +class _FileGroup:
    +  """Represents a category that apk files can fall into."""
    +
    +  def __init__(self, name):
    +    self.name = name
    +    self._zip_infos = []
    +    self._extracted_multipliers = []
    +
    +  def AddZipInfo(self, zip_info, extracted_multiplier=0):
    +    self._zip_infos.append(zip_info)
    +    self._extracted_multipliers.append(extracted_multiplier)
    +
    +  def AllEntries(self):
    +    return iter(self._zip_infos)
    +
    +  def GetNumEntries(self):
    +    return len(self._zip_infos)
    +
    +  def FindByPattern(self, pattern):
    +    return next((i for i in self._zip_infos if re.match(pattern, i.filename)),
    +                None)
    +
    +  def FindLargest(self):
    +    if not self._zip_infos:
    +      return None
    +    return max(self._zip_infos, key=lambda i: i.file_size)
    +
    +  def ComputeZippedSize(self):
    +    return sum(i.compress_size for i in self._zip_infos)
    +
    +  def ComputeUncompressedSize(self):
    +    return sum(i.file_size for i in self._zip_infos)
    +
    +  def ComputeExtractedSize(self):
    +    ret = 0
    +    for zi, multiplier in zip(self._zip_infos, self._extracted_multipliers):
    +      ret += zi.file_size * multiplier
    +    return ret
    +
    +  def ComputeInstallSize(self):
    +    return self.ComputeExtractedSize() + self.ComputeZippedSize()
    +
    +
    +def _AnalyzeInternal(apk_path,
    +                     sdk_version,
    +                     report_func,
    +                     dex_stats_collector,
    +                     out_dir,
    +                     apks_path=None,
    +                     split_name=None):
    +  """Analyse APK to determine size contributions of different file classes.
    +
    +  Returns: Normalized APK size.
    +  """
    +  dex_stats_collector.CollectFromZip(split_name or '', apk_path)
    +  file_groups = []
    +
    +  def make_group(name):
    +    group = _FileGroup(name)
    +    file_groups.append(group)
    +    return group
    +
    +  def has_no_extension(filename):
    +    return os.path.splitext(filename)[1] == ''
    +
    +  native_code = make_group('Native code')
    +  java_code = make_group('Java code')
    +  native_resources_no_translations = make_group('Native resources (no l10n)')
    +  translations = make_group('Native resources (l10n)')
    +  stored_translations = make_group('Native resources stored (l10n)')
    +  icu_data = make_group('ICU (i18n library) data')
    +  v8_snapshots = make_group('V8 Snapshots')
    +  png_drawables = make_group('PNG drawables')
    +  res_directory = make_group('Non-compiled Android resources')
    +  arsc = make_group('Compiled Android resources')
    +  metadata = make_group('Package metadata')
    +  unknown = make_group('Unknown files')
    +  notices = make_group('licenses.notice file')
    +  unwind_cfi = make_group('unwind_cfi (dev and canary only)')
    +
    +  with zipfile.ZipFile(apk_path, 'r') as apk:
    +    apk_contents = apk.infolist()
    +    # Account for zipalign overhead that exists in local file header.
    +    zipalign_overhead = sum(
    +        _ReadZipInfoExtraFieldLength(apk, i) for i in apk_contents)
    +    # Account for zipalign overhead that exists in central directory header.
    +    # Happens when python aligns entries in apkbuilder.py, but does not
    +    # exist when using Android's zipalign. E.g. for bundle .apks files.
    +    zipalign_overhead += sum(len(i.extra) for i in apk_contents)
    +    signing_block_size = _MeasureApkSignatureBlock(apk)
    +
    +  _, skip_extract_lib, _ = _ParseManifestAttributes(apk_path)
    +
    +  # Pre-L: Dalvik - .odex file is simply decompressed/optimized dex file (~1x).
    +  # L, M: ART - .odex file is compiled version of the dex file (~4x).
    +  # N: ART - Uses Dalvik-like JIT for normal apps (~1x), full compilation for
    +  #    shared apps (~4x).
    +  # Actual multipliers calculated using "apk_operations.py disk-usage".
    +  # Will need to update multipliers once apk obfuscation is enabled.
    +  # E.g. with obfuscation, the 4.04 changes to 4.46.
    +  speed_profile_dex_multiplier = 1.17
    +  orig_filename = apks_path or apk_path
    +  is_webview = 'WebView' in orig_filename
    +  is_monochrome = 'Monochrome' in orig_filename
    +  is_library = 'Library' in orig_filename
    +  is_trichrome = 'TrichromeChrome' in orig_filename
    +  # WebView is always a shared APK since other apps load it.
    +  # Library is always shared since it's used by chrome and webview
    +  # Chrome is always shared since renderers can't access dex otherwise
    +  # (see DexFixer).
    +  is_shared_apk = sdk_version >= 24 and (is_monochrome or is_webview
    +                                         or is_library or is_trichrome)
    +  # Dex decompression overhead varies by Android version.
    +  if sdk_version < 21:
    +    # JellyBean & KitKat
    +    dex_multiplier = 1.16
    +  elif sdk_version < 24:
    +    # Lollipop & Marshmallow
    +    dex_multiplier = 4.04
    +  elif is_shared_apk:
    +    # Oreo and above, compilation_filter=speed
    +    dex_multiplier = 4.04
    +  else:
    +    # Oreo and above, compilation_filter=speed-profile
    +    dex_multiplier = speed_profile_dex_multiplier
    +
    +  total_apk_size = os.path.getsize(apk_path)
    +  for member in apk_contents:
    +    filename = member.filename
    +    if filename.endswith('/'):
    +      continue
    +    if filename.endswith('.so'):
    +      basename = posixpath.basename(filename)
    +      should_extract_lib = not skip_extract_lib and basename.startswith('lib')
    +      native_code.AddZipInfo(
    +          member, extracted_multiplier=int(should_extract_lib))
    +    elif filename.startswith('classes') and filename.endswith('.dex'):
    +      # Android P+, uncompressed dex does not need to be extracted.
    +      compressed = member.compress_type != zipfile.ZIP_STORED
    +      multiplier = dex_multiplier
    +      if not compressed and sdk_version >= 28:
    +        multiplier -= 1
    +
    +      java_code.AddZipInfo(member, extracted_multiplier=multiplier)
    +    elif re.search(_RE_NON_LANGUAGE_PAK, filename):
    +      native_resources_no_translations.AddZipInfo(member)
    +    elif filename.endswith('.pak') or filename.endswith('.lpak'):
    +      compressed = member.compress_type != zipfile.ZIP_STORED
    +      bucket = translations if compressed else stored_translations
    +      extracted_multiplier = 0
    +      if compressed:
    +        extracted_multiplier = int('en_' in filename or 'en-' in filename)
    +      bucket.AddZipInfo(member, extracted_multiplier=extracted_multiplier)
    +    elif 'icu' in filename and filename.endswith('.dat'):
    +      icu_data.AddZipInfo(member)
    +    elif filename.endswith('.bin'):
    +      v8_snapshots.AddZipInfo(member)
    +    elif filename.startswith('res/'):
    +      if (filename.endswith('.png') or filename.endswith('.webp')
    +          or has_no_extension(filename)):
    +        png_drawables.AddZipInfo(member)
    +      else:
    +        res_directory.AddZipInfo(member)
    +    elif filename.endswith('.arsc'):
    +      arsc.AddZipInfo(member)
    +    elif filename.startswith('META-INF') or filename in (
    +        'AndroidManifest.xml', 'assets/webapk_dex_version.txt'):
    +      metadata.AddZipInfo(member)
    +    elif filename.endswith('.notice'):
    +      notices.AddZipInfo(member)
    +    elif filename.startswith('assets/unwind_cfi'):
    +      unwind_cfi.AddZipInfo(member)
    +    else:
    +      unknown.AddZipInfo(member)
    +
    +  if apks_path:
    +    # We're mostly focused on size of Chrome for non-English locales, so assume
    +    # Hindi (arbitrarily chosen) locale split is installed.
    +    with zipfile.ZipFile(apks_path) as z:
    +      subpath = 'splits/{}-hi.apk'.format(split_name)
    +      if subpath in z.namelist():
    +        hindi_apk_info = z.getinfo(subpath)
    +        total_apk_size += hindi_apk_info.file_size
    +      else:
    +        assert split_name != 'base', 'splits/base-hi.apk should always exist'
    +
    +  total_install_size = total_apk_size
    +  total_install_size_android_go = total_apk_size
    +  zip_overhead = total_apk_size
    +
    +  for group in file_groups:
    +    actual_size = group.ComputeZippedSize()
    +    install_size = group.ComputeInstallSize()
    +    uncompressed_size = group.ComputeUncompressedSize()
    +    extracted_size = group.ComputeExtractedSize()
    +    total_install_size += extracted_size
    +    zip_overhead -= actual_size
    +
    +    report_func('Breakdown', group.name + ' size', actual_size, 'bytes')
    +    report_func('InstallBreakdown', group.name + ' size', int(install_size),
    +                'bytes')
    +    # Only a few metrics are compressed in the first place.
    +    # To avoid over-reporting, track uncompressed size only for compressed
    +    # entries.
    +    if uncompressed_size != actual_size:
    +      report_func('Uncompressed', group.name + ' size', uncompressed_size,
    +                  'bytes')
    +
    +    if group is java_code:
    +      # Updates are compiled using quicken, but system image uses speed-profile.
    +      multiplier = speed_profile_dex_multiplier
    +
    +      # Android P+, uncompressed dex does not need to be extracted.
    +      compressed = uncompressed_size != actual_size
    +      if not compressed and sdk_version >= 28:
    +        multiplier -= 1
    +      extracted_size = int(uncompressed_size * multiplier)
    +      total_install_size_android_go += extracted_size
    +      report_func('InstallBreakdownGo', group.name + ' size',
    +                  actual_size + extracted_size, 'bytes')
    +    elif group is translations and apks_path:
    +      # Assume Hindi rather than English (accounted for above in total_apk_size)
    +      total_install_size_android_go += actual_size
    +    else:
    +      total_install_size_android_go += extracted_size
    +
    +  # Per-file zip overhead is caused by:
    +  # * 30 byte entry header + len(file name)
    +  # * 46 byte central directory entry + len(file name)
    +  # * 0-3 bytes for zipalign.
    +  report_func('Breakdown', 'Zip Overhead', zip_overhead, 'bytes')
    +  report_func('InstallSize', 'APK size', total_apk_size, 'bytes')
    +  report_func('InstallSize', 'Estimated installed size',
    +              int(total_install_size), 'bytes')
    +  report_func('InstallSize', 'Estimated installed size (Android Go)',
    +              int(total_install_size_android_go), 'bytes')
    +  transfer_size = _CalculateCompressedSize(apk_path)
    +  report_func('TransferSize', 'Transfer size (deflate)', transfer_size, 'bytes')
    +
    +  # Size of main dex vs remaining.
    +  main_dex_info = java_code.FindByPattern('classes.dex')
    +  if main_dex_info:
    +    main_dex_size = main_dex_info.file_size
    +    report_func('Specifics', 'main dex size', main_dex_size, 'bytes')
    +    secondary_size = java_code.ComputeUncompressedSize() - main_dex_size
    +    report_func('Specifics', 'secondary dex size', secondary_size, 'bytes')
    +
    +  main_lib_info = native_code.FindLargest()
    +  native_code_unaligned_size = 0
    +  for lib_info in native_code.AllEntries():
    +    section_sizes = _ExtractLibSectionSizesFromApk(apk_path, lib_info.filename)
    +    native_code_unaligned_size += sum(v for k, v in section_sizes.items()
    +                                      if k != 'bss')
    +    # Size of main .so vs remaining.
    +    if lib_info == main_lib_info:
    +      main_lib_size = lib_info.file_size
    +      report_func('Specifics', 'main lib size', main_lib_size, 'bytes')
    +      secondary_size = native_code.ComputeUncompressedSize() - main_lib_size
    +      report_func('Specifics', 'other lib size', secondary_size, 'bytes')
    +
    +      for metric_name, size in section_sizes.items():
    +        report_func('MainLibInfo', metric_name, size, 'bytes')
    +
    +  # Main metric that we want to monitor for jumps.
    +  normalized_apk_size = total_apk_size
    +  # unwind_cfi exists only in dev, canary, and non-channel builds.
    +  normalized_apk_size -= unwind_cfi.ComputeZippedSize()
    +  # Sections within .so files get 4kb aligned, so use section sizes rather than
    +  # file size. Also gets rid of compression.
    +  normalized_apk_size -= native_code.ComputeZippedSize()
    +  normalized_apk_size += native_code_unaligned_size
    +  # Normalized dex size: Size within the zip + size on disk for Android Go
    +  # devices running Android O (which ~= uncompressed dex size).
    +  # Use a constant compression factor to account for fluctuations.
    +  normalized_apk_size -= java_code.ComputeZippedSize()
    +  normalized_apk_size += java_code.ComputeUncompressedSize()
    +  # Don't include zipalign overhead in normalized size, since it effectively
    +  # causes size changes files that proceed aligned files to be rounded.
    +  # For APKs where classes.dex directly proceeds libchrome.so (the normal case),
    +  # this causes small dex size changes to disappear into libchrome.so alignment.
    +  normalized_apk_size -= zipalign_overhead
    +  # Don't include the size of the apk's signing block because it can fluctuate
    +  # by up to 4kb (from my non-scientific observations), presumably based on hash
    +  # sizes.
    +  normalized_apk_size -= signing_block_size
    +
    +  # Unaligned size should be ~= uncompressed size or something is wrong.
    +  # As of now, padding_fraction ~= .007
    +  padding_fraction = -_PercentageDifference(
    +      native_code.ComputeUncompressedSize(), native_code_unaligned_size)
    +  # Ignore this check for small / no native code
    +  if native_code.ComputeUncompressedSize() > 1000000:
    +    assert 0 <= padding_fraction < .02, (
    +        'Padding was: {} (file_size={}, sections_sum={})'.format(
    +            padding_fraction, native_code.ComputeUncompressedSize(),
    +            native_code_unaligned_size))
    +
    +  if apks_path:
    +    # Locale normalization not needed when measuring only one locale.
    +    # E.g. a change that adds 300 chars of unstranslated strings would cause the
    +    # metric to be off by only 390 bytes (assuming a multiplier of 2.3 for
    +    # Hindi).
    +    pass
    +  else:
    +    # Avoid noise caused when strings change and translations haven't yet been
    +    # updated.
    +    num_translations = translations.GetNumEntries()
    +    num_stored_translations = stored_translations.GetNumEntries()
    +
    +    if num_translations > 1:
    +      # Multipliers found by looking at MonochromePublic.apk and seeing how much
    +      # smaller en-US.pak is relative to the average locale.pak.
    +      normalized_apk_size += _NormalizeLanguagePaks(translations, 1.17)
    +    if num_stored_translations > 1:
    +      normalized_apk_size += _NormalizeLanguagePaks(stored_translations, 1.43)
    +    if num_translations + num_stored_translations > 1:
    +      if num_translations == 0:
    +        # WebView stores all locale paks uncompressed.
    +        num_arsc_translations = num_stored_translations
    +      else:
    +        # Monochrome has more configurations than Chrome since it includes
    +        # WebView (which supports more locales), but these should mostly be
    +        # empty so ignore them here.
    +        num_arsc_translations = num_translations
    +      normalized_apk_size += _NormalizeResourcesArsc(apk_path,
    +                                                     arsc.GetNumEntries(),
    +                                                     num_arsc_translations,
    +                                                     out_dir)
    +
    +  # It will be -Inf for .apk files with multiple .arsc files and no out_dir set.
    +  if normalized_apk_size < 0:
    +    sys.stderr.write('Skipping normalized_apk_size (no output directory set)\n')
    +  else:
    +    report_func('Specifics', 'normalized apk size', normalized_apk_size,
    +                'bytes')
    +  # The "file count" metric cannot be grouped with any other metrics when the
    +  # end result is going to be uploaded to the perf dashboard in the HistogramSet
    +  # format due to mixed units (bytes vs. zip entries) causing malformed
    +  # summaries to be generated.
    +  # TODO(https://crbug.com/903970): Remove this workaround if unit mixing is
    +  # ever supported.
    +  report_func('FileCount', 'file count', len(apk_contents), 'zip entries')
    +
    +  for info in unknown.AllEntries():
    +    sys.stderr.write(
    +        'Unknown entry: %s %d\n' % (info.filename, info.compress_size))
    +  return normalized_apk_size
    +
    +
    +def _CalculateCompressedSize(file_path):
    +  CHUNK_SIZE = 256 * 1024
    +  compressor = zlib.compressobj()
    +  total_size = 0
    +  with open(file_path, 'rb') as f:
    +    for chunk in iter(lambda: f.read(CHUNK_SIZE), b''):
    +      total_size += len(compressor.compress(chunk))
    +  total_size += len(compressor.flush())
    +  return total_size
    +
    +
    +@contextmanager
    +def Unzip(zip_file, filename=None):
    +  """Utility for temporary use of a single file in a zip archive."""
    +  with build_utils.TempDir() as unzipped_dir:
    +    unzipped_files = build_utils.ExtractAll(
    +        zip_file, unzipped_dir, True, pattern=filename)
    +    if len(unzipped_files) == 0:
    +      raise Exception(
    +          '%s not found in %s' % (filename, zip_file))
    +    yield unzipped_files[0]
    +
    +
    +def _ConfigOutDir(out_dir):
    +  if out_dir:
    +    constants.SetOutputDirectory(out_dir)
    +  else:
    +    try:
    +      # Triggers auto-detection when CWD == output directory.
    +      constants.CheckOutputDirectory()
    +      out_dir = constants.GetOutDirectory()
    +    except Exception:  # pylint: disable=broad-except
    +      pass
    +  return out_dir
    +
    +
    +def _IterSplits(namelist):
    +  for subpath in namelist:
    +    # Looks for paths like splits/vr-master.apk, splits/vr-hi.apk.
    +    name_parts = subpath.split('/')
    +    if name_parts[0] == 'splits' and len(name_parts) == 2:
    +      name_parts = name_parts[1].split('-')
    +      if len(name_parts) == 2:
    +        split_name, config_name = name_parts
    +        if config_name == 'master.apk':
    +          yield subpath, split_name
    +
    +
    +def _ExtractToTempFile(zip_obj, subpath, temp_file):
    +  temp_file.seek(0)
    +  temp_file.truncate()
    +  temp_file.write(zip_obj.read(subpath))
    +  temp_file.flush()
    +
    +
    +def _AnalyzeApkOrApks(report_func, apk_path, out_dir):
    +  # Create DexStatsCollector here to track unique methods across base & chrome
    +  # modules.
    +  dex_stats_collector = method_count.DexStatsCollector()
    +
    +  if apk_path.endswith('.apk'):
    +    sdk_version, _, _ = _ParseManifestAttributes(apk_path)
    +    _AnalyzeInternal(apk_path, sdk_version, report_func, dex_stats_collector,
    +                     out_dir)
    +  elif apk_path.endswith('.apks'):
    +    with tempfile.NamedTemporaryFile(suffix='.apk') as f:
    +      with zipfile.ZipFile(apk_path) as z:
    +        # Currently bundletool is creating two apks when .apks is created
    +        # without specifying an sdkVersion. Always measure the one with an
    +        # uncompressed shared library.
    +        try:
    +          info = z.getinfo('splits/base-master_2.apk')
    +        except KeyError:
    +          info = z.getinfo('splits/base-master.apk')
    +        _ExtractToTempFile(z, info.filename, f)
    +        sdk_version, _, _ = _ParseManifestAttributes(f.name)
    +
    +        orig_report_func = report_func
    +        report_func = _AccumulatingReporter()
    +
    +        def do_measure(split_name, on_demand):
    +          logging.info('Measuring %s on_demand=%s', split_name, on_demand)
    +          # Use no-op reporting functions to get normalized size for DFMs.
    +          inner_report_func = report_func
    +          inner_dex_stats_collector = dex_stats_collector
    +          if on_demand:
    +            inner_report_func = lambda *_: None
    +            inner_dex_stats_collector = method_count.DexStatsCollector()
    +
    +          size = _AnalyzeInternal(f.name,
    +                                  sdk_version,
    +                                  inner_report_func,
    +                                  inner_dex_stats_collector,
    +                                  out_dir,
    +                                  apks_path=apk_path,
    +                                  split_name=split_name)
    +          report_func('DFM_' + split_name, 'Size with hindi', size, 'bytes')
    +
    +        # Measure base outside of the loop since we've already extracted it.
    +        do_measure('base', on_demand=False)
    +
    +        for subpath, split_name in _IterSplits(z.namelist()):
    +          if split_name != 'base':
    +            _ExtractToTempFile(z, subpath, f)
    +            _, _, on_demand = _ParseManifestAttributes(f.name)
    +            do_measure(split_name, on_demand=on_demand)
    +
    +        report_func.DumpReports(orig_report_func)
    +        report_func = orig_report_func
    +  else:
    +    raise Exception('Unknown file type: ' + apk_path)
    +
    +  # Report dex stats outside of _AnalyzeInternal() so that the "unique methods"
    +  # metric is not just the sum of the base and chrome modules.
    +  for metric, count in dex_stats_collector.GetTotalCounts().items():
    +    report_func('Dex', metric, count, 'entries')
    +  report_func('Dex', 'unique methods',
    +              dex_stats_collector.GetUniqueMethodCount(), 'entries')
    +  report_func('DexCache', 'DexCache',
    +              dex_stats_collector.GetDexCacheSize(pre_oreo=sdk_version < 26),
    +              'bytes')
    +
    +  return dex_stats_collector
    +
    +
    +def _ResourceSizes(args):
    +  chartjson = _BASE_CHART.copy() if args.output_format else None
    +  reporter = _ChartJsonReporter(chartjson)
    +  # Create DexStatsCollector here to track unique methods across trichrome APKs.
    +  dex_stats_collector = method_count.DexStatsCollector()
    +
    +  specs = [
    +      ('Chrome_', args.trichrome_chrome),
    +      ('WebView_', args.trichrome_webview),
    +      ('Library_', args.trichrome_library),
    +  ]
    +  for prefix, path in specs:
    +    if path:
    +      reporter.trace_title_prefix = prefix
    +      child_dex_stats_collector = _AnalyzeApkOrApks(reporter, path,
    +                                                    args.out_dir)
    +      dex_stats_collector.MergeFrom(prefix, child_dex_stats_collector)
    +
    +  if any(path for _, path in specs):
    +    reporter.SynthesizeTotals(dex_stats_collector.GetUniqueMethodCount())
    +  else:
    +    _AnalyzeApkOrApks(reporter, args.input, args.out_dir)
    +
    +  if chartjson:
    +    _DumpChartJson(args, chartjson)
    +
    +
    +def _DumpChartJson(args, chartjson):
    +  if args.output_file == '-':
    +    json_file = sys.stdout
    +  elif args.output_file:
    +    json_file = open(args.output_file, 'w')
    +  else:
    +    results_path = os.path.join(args.output_dir, 'results-chart.json')
    +    logging.critical('Dumping chartjson to %s', results_path)
    +    json_file = open(results_path, 'w')
    +
    +  json.dump(chartjson, json_file, indent=2)
    +
    +  if json_file is not sys.stdout:
    +    json_file.close()
    +
    +  # We would ideally generate a histogram set directly instead of generating
    +  # chartjson then converting. However, perf_tests_results_helper is in
    +  # //build, which doesn't seem to have any precedent for depending on
    +  # anything in Catapult. This can probably be fixed, but since this doesn't
    +  # need to be super fast or anything, converting is a good enough solution
    +  # for the time being.
    +  if args.output_format == 'histograms':
    +    histogram_result = convert_chart_json.ConvertChartJson(results_path)
    +    if histogram_result.returncode != 0:
    +      raise Exception('chartjson conversion failed with error: ' +
    +                      histogram_result.stdout)
    +
    +    histogram_path = os.path.join(args.output_dir, 'perf_results.json')
    +    logging.critical('Dumping histograms to %s', histogram_path)
    +    with open(histogram_path, 'wb') as json_file:
    +      json_file.write(histogram_result.stdout)
    +
    +
    +def main():
    +  build_utils.InitLogging('RESOURCE_SIZES_DEBUG')
    +  argparser = argparse.ArgumentParser(description='Print APK size metrics.')
    +  argparser.add_argument(
    +      '--min-pak-resource-size',
    +      type=int,
    +      default=20 * 1024,
    +      help='Minimum byte size of displayed pak resources.')
    +  argparser.add_argument(
    +      '--chromium-output-directory',
    +      dest='out_dir',
    +      type=os.path.realpath,
    +      help='Location of the build artifacts.')
    +  argparser.add_argument(
    +      '--chartjson',
    +      action='store_true',
    +      help='DEPRECATED. Use --output-format=chartjson '
    +      'instead.')
    +  argparser.add_argument(
    +      '--output-format',
    +      choices=['chartjson', 'histograms'],
    +      help='Output the results to a file in the given '
    +      'format instead of printing the results.')
    +  argparser.add_argument('--loadable_module', help='Obsolete (ignored).')
    +
    +  # Accepted to conform to the isolated script interface, but ignored.
    +  argparser.add_argument(
    +      '--isolated-script-test-filter', help=argparse.SUPPRESS)
    +  argparser.add_argument(
    +      '--isolated-script-test-perf-output',
    +      type=os.path.realpath,
    +      help=argparse.SUPPRESS)
    +
    +  output_group = argparser.add_mutually_exclusive_group()
    +
    +  output_group.add_argument(
    +      '--output-dir', default='.', help='Directory to save chartjson to.')
    +  output_group.add_argument(
    +      '--output-file',
    +      help='Path to output .json (replaces --output-dir). Works only for '
    +      '--output-format=chartjson')
    +  output_group.add_argument(
    +      '--isolated-script-test-output',
    +      type=os.path.realpath,
    +      help='File to which results will be written in the '
    +      'simplified JSON output format.')
    +
    +  argparser.add_argument('input', help='Path to .apk or .apks file to measure.')
    +  trichrome_group = argparser.add_argument_group(
    +      'Trichrome inputs',
    +      description='When specified, |input| is used only as Test suite name.')
    +  trichrome_group.add_argument(
    +      '--trichrome-chrome', help='Path to Trichrome Chrome .apks')
    +  trichrome_group.add_argument(
    +      '--trichrome-webview', help='Path to Trichrome WebView .apk(s)')
    +  trichrome_group.add_argument(
    +      '--trichrome-library', help='Path to Trichrome Library .apk')
    +  args = argparser.parse_args()
    +
    +  args.out_dir = _ConfigOutDir(args.out_dir)
    +  devil_chromium.Initialize(output_directory=args.out_dir)
    +
    +  # TODO(bsheedy): Remove this once uses of --chartjson have been removed.
    +  if args.chartjson:
    +    args.output_format = 'chartjson'
    +
    +  result_sink_client = result_sink.TryInitClient()
    +  isolated_script_output = {'valid': False, 'failures': []}
    +
    +  test_name = 'resource_sizes (%s)' % os.path.basename(args.input)
    +
    +  if args.isolated_script_test_output:
    +    args.output_dir = os.path.join(
    +        os.path.dirname(args.isolated_script_test_output), test_name)
    +    if not os.path.exists(args.output_dir):
    +      os.makedirs(args.output_dir)
    +
    +  try:
    +    _ResourceSizes(args)
    +    isolated_script_output = {
    +        'valid': True,
    +        'failures': [],
    +    }
    +  finally:
    +    if args.isolated_script_test_output:
    +      results_path = os.path.join(args.output_dir, 'test_results.json')
    +      with open(results_path, 'w') as output_file:
    +        json.dump(isolated_script_output, output_file)
    +      with open(args.isolated_script_test_output, 'w') as output_file:
    +        json.dump(isolated_script_output, output_file)
    +    if result_sink_client:
    +      status = result_types.PASS
    +      if not isolated_script_output['valid']:
    +        status = result_types.UNKNOWN
    +      elif isolated_script_output['failures']:
    +        status = result_types.FAIL
    +      result_sink_client.Post(test_name, status, None, None, None)
    +
    +
    +if __name__ == '__main__':
    +  main()
    diff --git a/android/resource_sizes.pydeps b/android/resource_sizes.pydeps
    new file mode 100644
    index 000000000000..86db3ff82585
    --- /dev/null
    +++ b/android/resource_sizes.pydeps
    @@ -0,0 +1,61 @@
    +# Generated by running:
    +#   build/print_python_deps.py --root build/android --output build/android/resource_sizes.pydeps build/android/resource_sizes.py
    +../../third_party/catapult/common/py_utils/py_utils/__init__.py
    +../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py
    +../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py
    +../../third_party/catapult/common/py_utils/py_utils/lock.py
    +../../third_party/catapult/dependency_manager/dependency_manager/__init__.py
    +../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py
    +../../third_party/catapult/dependency_manager/dependency_manager/base_config.py
    +../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py
    +../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py
    +../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py
    +../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py
    +../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py
    +../../third_party/catapult/dependency_manager/dependency_manager/manager.py
    +../../third_party/catapult/dependency_manager/dependency_manager/uploader.py
    +../../third_party/catapult/devil/devil/__init__.py
    +../../third_party/catapult/devil/devil/android/__init__.py
    +../../third_party/catapult/devil/devil/android/constants/__init__.py
    +../../third_party/catapult/devil/devil/android/constants/chrome.py
    +../../third_party/catapult/devil/devil/android/ndk/__init__.py
    +../../third_party/catapult/devil/devil/android/ndk/abis.py
    +../../third_party/catapult/devil/devil/android/sdk/__init__.py
    +../../third_party/catapult/devil/devil/android/sdk/build_tools.py
    +../../third_party/catapult/devil/devil/android/sdk/keyevent.py
    +../../third_party/catapult/devil/devil/android/sdk/version_codes.py
    +../../third_party/catapult/devil/devil/base_error.py
    +../../third_party/catapult/devil/devil/constants/__init__.py
    +../../third_party/catapult/devil/devil/constants/exit_codes.py
    +../../third_party/catapult/devil/devil/devil_env.py
    +../../third_party/catapult/devil/devil/utils/__init__.py
    +../../third_party/catapult/devil/devil/utils/cmd_helper.py
    +../../third_party/catapult/devil/devil/utils/lazy/__init__.py
    +../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py
    +../../third_party/catapult/devil/devil/utils/reraiser_thread.py
    +../../third_party/catapult/devil/devil/utils/timeout_retry.py
    +../../third_party/catapult/devil/devil/utils/watchdog_timer.py
    +../../third_party/catapult/third_party/six/six.py
    +../../third_party/catapult/third_party/vinn/vinn/__init__.py
    +../../third_party/catapult/third_party/vinn/vinn/_vinn.py
    +../../third_party/catapult/tracing/tracing/__init__.py
    +../../third_party/catapult/tracing/tracing/value/__init__.py
    +../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
    +../../third_party/catapult/tracing/tracing_project.py
    +../gn_helpers.py
    +../util/lib/__init__.py
    +../util/lib/common/perf_result_data_type.py
    +../util/lib/common/perf_tests_results_helper.py
    +../util/lib/results/__init__.py
    +../util/lib/results/result_sink.py
    +../util/lib/results/result_types.py
    +devil_chromium.py
    +gyp/util/__init__.py
    +gyp/util/build_utils.py
    +method_count.py
    +pylib/__init__.py
    +pylib/constants/__init__.py
    +pylib/constants/host_paths.py
    +pylib/dex/__init__.py
    +pylib/dex/dex_parser.py
    +resource_sizes.py
    diff --git a/android/screenshot.py b/android/screenshot.py
    new file mode 100755
    index 000000000000..6366e85555ae
    --- /dev/null
    +++ b/android/screenshot.py
    @@ -0,0 +1,13 @@
    +#!/usr/bin/env vpython3
    +# Copyright 2015 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import sys
    +
    +import devil_chromium
    +from devil.android.tools import screenshot
    +
    +if __name__ == '__main__':
    +  devil_chromium.Initialize()
    +  sys.exit(screenshot.main())
    diff --git a/android/stacktrace/BUILD.gn b/android/stacktrace/BUILD.gn
    new file mode 100644
    index 000000000000..0501a96706f1
    --- /dev/null
    +++ b/android/stacktrace/BUILD.gn
    @@ -0,0 +1,24 @@
    +# Copyright 2017 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import("//build/config/android/rules.gni")
    +
    +java_library("java_deobfuscate_java") {
    +  sources = [ "java/org/chromium/build/FlushingReTrace.java" ]
    +
    +  # Avoid using java_prebuilt() to ensure all uses go through the checked-in
    +  # wrapper script.
    +  input_jars_paths = [ "//third_party/r8/lib/r8.jar" ]
    +}
    +
    +# Use the checked-in copy of the wrapper script & .jar rather than the built
    +# one to simplify usage of the tool.
    +group("java_deobfuscate") {
    +  data = [
    +    "java_deobfuscate.py",
    +    "java_deobfuscate_java.jar",
    +    "//third_party/r8/lib/r8.jar",
    +  ]
    +  deps = [ "//third_party/jdk:java_data" ]
    +}
    diff --git a/android/stacktrace/README.md b/android/stacktrace/README.md
    new file mode 100644
    index 000000000000..528af2278860
    --- /dev/null
    +++ b/android/stacktrace/README.md
    @@ -0,0 +1,28 @@
    +# java_deobfuscate.py
    +
    +A wrapper around ProGuard's ReTrace tool, which:
    +
    +1) Updates the regular expression used to identify stack lines, and
    +2) Streams its output.
    +
    +The second point here is what allows you to run:
    +
    +    adb logcat | build/android/stacktrace/java_deobfuscate.py out/Default/apks/ChromePublic.apk.mapping
    +
    +And have it actually show output without logcat terminating.
    +
    +
    +## Update Instructions:
    +
    +    ninja -C out/Release java_deobfuscate_java
    +    cp out/Release/lib.java/build/android/stacktrace/java_deobfuscate_java.jar build/android/stacktrace
    +
    +# stackwalker.py
    +
    +Extracts Breakpad microdumps from a log file and uses `stackwalker` to symbolize
    +them.
    +
    +
    +# crashpad_stackwalker.py
    +
    +Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
    diff --git a/android/stacktrace/crashpad_stackwalker.py b/android/stacktrace/crashpad_stackwalker.py
    new file mode 100755
    index 000000000000..9703b7c88da7
    --- /dev/null
    +++ b/android/stacktrace/crashpad_stackwalker.py
    @@ -0,0 +1,175 @@
    +#!/usr/bin/env vpython3
    +#
    +# Copyright 2019 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +# Fetches Crashpad dumps from a given device, walks and symbolizes the stacks.
    +# All the non-trivial operations are performed by generate_breakpad_symbols.py,
    +# dump_syms, minidump_dump and minidump_stackwalk.
    +
    +import argparse
    +import logging
    +import os
    +import posixpath
    +import re
    +import sys
    +import shutil
    +import subprocess
    +import tempfile
    +
    +_BUILD_ANDROID_PATH = os.path.abspath(
    +    os.path.join(os.path.dirname(__file__), '..'))
    +sys.path.append(_BUILD_ANDROID_PATH)
    +import devil_chromium
    +from devil.android import device_utils
    +from devil.utils import timeout_retry
    +
    +
    +def _CreateSymbolsDir(build_path, dynamic_library_names):
    +  generator = os.path.normpath(
    +      os.path.join(_BUILD_ANDROID_PATH, '..', '..', 'components', 'crash',
    +                   'content', 'tools', 'generate_breakpad_symbols.py'))
    +  syms_dir = os.path.join(build_path, 'crashpad_syms')
    +  shutil.rmtree(syms_dir, ignore_errors=True)
    +  os.mkdir(syms_dir)
    +  for lib in dynamic_library_names:
    +    unstripped_library_path = os.path.join(build_path, 'lib.unstripped', lib)
    +    if not os.path.exists(unstripped_library_path):
    +      continue
    +    logging.info('Generating symbols for: %s', unstripped_library_path)
    +    cmd = [
    +        generator,
    +        '--symbols-dir',
    +        syms_dir,
    +        '--build-dir',
    +        build_path,
    +        '--binary',
    +        unstripped_library_path,
    +        '--platform',
    +        'android',
    +    ]
    +    return_code = subprocess.call(cmd)
    +    if return_code != 0:
    +      logging.error('Could not extract symbols, command failed: %s',
    +                    ' '.join(cmd))
    +  return syms_dir
    +
    +
    +def _ChooseLatestCrashpadDump(device, crashpad_dump_path):
    +  if not device.PathExists(crashpad_dump_path):
    +    logging.warning('Crashpad dump directory does not exist: %s',
    +                    crashpad_dump_path)
    +    return None
    +  latest = None
    +  latest_timestamp = 0
    +  for crashpad_file in device.ListDirectory(crashpad_dump_path):
    +    if crashpad_file.endswith('.dmp'):
    +      stat = device.StatPath(posixpath.join(crashpad_dump_path, crashpad_file))
    +      current_timestamp = stat['st_mtime']
    +      if current_timestamp > latest_timestamp:
    +        latest_timestamp = current_timestamp
    +        latest = crashpad_file
    +  return latest
    +
    +
    +def _ExtractLibraryNamesFromDump(build_path, dump_path):
    +  default_library_name = 'libmonochrome.so'
    +  dumper_path = os.path.join(build_path, 'minidump_dump')
    +  if not os.access(dumper_path, os.X_OK):
    +    logging.warning(
    +        'Cannot extract library name from dump because %s is not found, '
    +        'default to: %s', dumper_path, default_library_name)
    +    return [default_library_name]
    +  p = subprocess.Popen([dumper_path, dump_path],
    +                       stdout=subprocess.PIPE,
    +                       stderr=subprocess.PIPE)
    +  stdout, stderr = p.communicate()
    +  if p.returncode != 0:
    +    # Dumper errors often do not affect stack walkability, just a warning.
    +    logging.warning('Reading minidump failed with output:\n%s', stderr)
    +
    +  library_names = []
    +  module_library_line_re = re.compile(r'[(]code_file[)]\s+= '
    +                                      r'"(?Plib[^. ]+.so)"')
    +  in_module = False
    +  for line in stdout.splitlines():
    +    line = line.lstrip().rstrip('\n')
    +    if line == 'MDRawModule':
    +      in_module = True
    +      continue
    +    if line == '':
    +      in_module = False
    +      continue
    +    if in_module:
    +      m = module_library_line_re.match(line)
    +      if m:
    +        library_names.append(m.group('library_name'))
    +  if not library_names:
    +    logging.warning(
    +        'Could not find any library name in the dump, '
    +        'default to: %s', default_library_name)
    +    return [default_library_name]
    +  return library_names
    +
    +
    +def main():
    +  logging.basicConfig(level=logging.INFO)
    +  parser = argparse.ArgumentParser(
    +      description='Fetches Crashpad dumps from a given device, '
    +      'walks and symbolizes the stacks.')
    +  parser.add_argument('--device', required=True, help='Device serial number')
    +  parser.add_argument('--adb-path', help='Path to the "adb" command')
    +  parser.add_argument(
    +      '--build-path',
    +      required=True,
    +      help='Build output directory, equivalent to CHROMIUM_OUTPUT_DIR')
    +  parser.add_argument(
    +      '--chrome-cache-path',
    +      required=True,
    +      help='Directory on the device where Chrome stores cached files,'
    +      ' crashpad stores dumps in a subdirectory of it')
    +  args = parser.parse_args()
    +
    +  stackwalk_path = os.path.join(args.build_path, 'minidump_stackwalk')
    +  if not os.path.exists(stackwalk_path):
    +    logging.error('Missing minidump_stackwalk executable')
    +    return 1
    +
    +  devil_chromium.Initialize(output_directory=args.build_path,
    +                            adb_path=args.adb_path)
    +  device = device_utils.DeviceUtils(args.device)
    +
    +  device_crashpad_path = posixpath.join(args.chrome_cache_path, 'Crashpad',
    +                                        'pending')
    +
    +  def CrashpadDumpExists():
    +    return _ChooseLatestCrashpadDump(device, device_crashpad_path)
    +
    +  crashpad_file = timeout_retry.WaitFor(
    +      CrashpadDumpExists, wait_period=1, max_tries=9)
    +  if not crashpad_file:
    +    logging.error('Could not locate a crashpad dump')
    +    return 1
    +
    +  dump_dir = tempfile.mkdtemp()
    +  symbols_dir = None
    +  try:
    +    device.PullFile(
    +        device_path=posixpath.join(device_crashpad_path, crashpad_file),
    +        host_path=dump_dir)
    +    dump_full_path = os.path.join(dump_dir, crashpad_file)
    +    library_names = _ExtractLibraryNamesFromDump(args.build_path,
    +                                                 dump_full_path)
    +    symbols_dir = _CreateSymbolsDir(args.build_path, library_names)
    +    stackwalk_cmd = [stackwalk_path, dump_full_path, symbols_dir]
    +    subprocess.call(stackwalk_cmd)
    +  finally:
    +    shutil.rmtree(dump_dir, ignore_errors=True)
    +    if symbols_dir:
    +      shutil.rmtree(symbols_dir, ignore_errors=True)
    +  return 0
    +
    +
    +if __name__ == '__main__':
    +  sys.exit(main())
    diff --git a/android/stacktrace/java/org/chromium/build/FlushingReTrace.java b/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
    new file mode 100644
    index 000000000000..3e27197542a0
    --- /dev/null
    +++ b/android/stacktrace/java/org/chromium/build/FlushingReTrace.java
    @@ -0,0 +1,146 @@
    +// Copyright 2017 The Chromium Authors
    +// Use of this source code is governed by a BSD-style license that can be
    +// found in the LICENSE file.
    +
    +package org.chromium.build;
    +
    +import com.android.tools.r8.DiagnosticsHandler;
    +import com.android.tools.r8.retrace.ProguardMappingSupplier;
    +import com.android.tools.r8.retrace.Retrace;
    +import com.android.tools.r8.retrace.RetraceCommand;
    +import com.android.tools.r8.retrace.StackTraceSupplier;
    +
    +import java.io.BufferedReader;
    +import java.io.FileInputStream;
    +import java.io.IOException;
    +import java.io.InputStreamReader;
    +import java.util.Collections;
    +import java.util.List;
    +
    +/**
    + * A wrapper around ReTrace that:
    + *  1. Hardcodes a more useful line regular expression
    + *  2. Disables output buffering
    + */
    +public class FlushingReTrace {
    +    // E.g.: D/ConnectivityService(18029): Message
    +    // E.g.: W/GCM     ( 151): Message
    +    // E.g.: 09-08 14:22:59.995 18029 18055 I ProcessStatsService: Message
    +    // E.g.: 09-08 14:30:59.145 17731 18020 D MDnsDS  : Message
    +    private static final String LOGCAT_PREFIX =
    +            "(?:[VDIWEF]/.*?\\( *\\d+\\): |\\d\\d-\\d\\d [0-9:. ]+[VDIWEF] .*?: )?";
    +
    +    // Note: Order of these sub-patterns defines their precedence.
    +    // Note: Deobfuscation of methods without the presense of line numbers basically never works.
    +    // There is a test for these pattern at //build/android/stacktrace/java_deobfuscate_test.py
    +    private static final String LINE_PARSE_REGEX =
    +            // Eagerly match logcat prefix to avoid conflicting with the patterns below.
    +            LOGCAT_PREFIX
    +            + "(?:"
    +            // Based on default ReTrace regex, but with whitespaces allowed in file:line parentheses
    +            // and "at" changed to to allow :
    +            // E.g.: 06-22 13:58:02.895  4674  4674 E THREAD_STATE:     bLA.a( PG : 173 )
    +            // Normal stack trace lines look like:
    +            // \tat org.chromium.chrome.browser.tab.Tab.handleJavaCrash(Tab.java:682)
    +            + "(?:.*?(?::|\\bat)\\s+%c\\.%m\\s*\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\))|"
    +            // Stack trace from crbug.com/1300215 looks like:
    +            // 0xffffffff (chromium-TrichromeChromeGoogle.aab-canary-490400033: 70) ii2.p
    +            + "(?:.*?\\(\\s*%s(?:\\s*:\\s*%l\\s*)?\\)\\s*%c\\.%m)|"
    +            // E.g.: Caused by: java.lang.NullPointerException: Attempt to read from field 'int bLA'
    +            // on a null object reference
    +            + "(?:.*java\\.lang\\.NullPointerException.*[\"']%t\\s*%c\\.(?:%f|%m\\(%a\\))[\"'].*)|"
    +            // E.g.: java.lang.VerifyError: bLA
    +            + "(?:java\\.lang\\.VerifyError: %c)|"
    +            // E.g.: java.lang.NoSuchFieldError: No instance field e of type L...; in class LbxK;
    +            + "(?:java\\.lang\\.NoSuchFieldError: No instance field %f of type .*? in class L%C;)|"
    +            // E.g.: Object of type Clazz was not destroyed... (See LifetimeAssert.java)
    +            + "(?:.*?Object of type %c .*)|"
    +            // E.g.: VFY: unable to resolve new-instance 3810 (LSome/Framework/Class;) in Lfoo/Bar;
    +            + "(?:.*L%C;.*)|"
    +            // E.g.: END SomeTestClass#someMethod
    +            + "(?:.*?%c#%m.*?)|"
    +            // Special-case for a common junit logcat message:
    +            // E.g.: java.lang.NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
    +            + "(?:.* isTestClass for %c)|"
    +            // E.g.: Caused by: java.lang.RuntimeException: Intentional Java Crash
    +            + "(?:Caused by: %c:.*)|"
    +            // Quoted values and lines that end with a class / class+method:
    +            // E.g.: The class: Foo
    +            // E.g.: INSTRUMENTATION_STATUS: class=Foo
    +            // E.g.: NoClassDefFoundError: SomeFrameworkClass in isTestClass for Foo
    +            // E.g.: Could not find class 'SomeFrameworkClass', referenced from method Foo.bar
    +            // E.g.: Could not find method SomeFrameworkMethod, referenced from method Foo.bar
    +            // E.g.: The member "Foo.bar"
    +            // E.g.: The class "Foobar"
    +            // Be careful about matching %c without %m since language tags look like class names.
    +            + "(?:.*?%c\\.%m)|"
    +            + "(?:.*?\"%c\\.%m\".*)|"
    +            + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?\"%c\".*)|"
    +            + "(?:.*\\b(?:[Cc]lass|[Tt]ype)\\b.*?%c)|"
    +            // E.g.: java.lang.RuntimeException: Intentional Java Crash
    +            + "(?:%c:.*)|"
    +            // See if entire line matches a class name (e.g. for manual deobfuscation)
    +            + "(?:%c)"
    +            + ")";
    +
    +    private static void usage() {
    +        System.err.println("Usage: echo $OBFUSCATED_CLASS | java_deobfuscate Foo.apk.mapping");
    +        System.err.println("Usage: java_deobfuscate Foo.apk.mapping < foo.log");
    +        System.err.println("Note: Deobfuscation of symbols outside the context of stack "
    +                + "traces will work only when lines match the regular expression defined "
    +                + "in FlushingReTrace.java.");
    +        System.err.println("Also: Deobfuscation of method names without associated line "
    +                + "numbers does not seem to work.");
    +        System.exit(1);
    +    }
    +
    +    public static void main(String[] args) {
    +        if (args.length != 1 || args[0].startsWith("-")) {
    +            usage();
    +        }
    +
    +        try {
    +            ProguardMappingSupplier mappingSupplier =
    +                    ProguardMappingSupplier.builder()
    +                            .setProguardMapProducer(() -> new FileInputStream(args[0]))
    +                            .build();
    +            // Force earger parsing of .mapping file (~10 second operation). It otherwise would
    +            // not happen until the first line of input is received.
    +            // https://crbug.com/1351023
    +            mappingSupplier.createRetracer(new DiagnosticsHandler() {});
    +
    +            // This whole command was given to us by the R8 team in b/234758957.
    +            RetraceCommand retraceCommand =
    +                    RetraceCommand.builder()
    +                            .setMappingSupplier(mappingSupplier)
    +                            .setRetracedStackTraceConsumer(
    +                                    retraced -> retraced.forEach(System.out::println))
    +                            .setRegularExpression(LINE_PARSE_REGEX)
    +                            .setStackTrace(new StackTraceSupplier() {
    +                                final BufferedReader mReader = new BufferedReader(
    +                                        new InputStreamReader(System.in, "UTF-8"));
    +
    +                                @Override
    +                                public List get() {
    +                                    try {
    +                                        String line = mReader.readLine();
    +                                        if (line == null) {
    +                                            return null;
    +                                        }
    +                                        return Collections.singletonList(line);
    +                                    } catch (IOException e) {
    +                                        e.printStackTrace();
    +                                        return null;
    +                                    }
    +                                }
    +                            })
    +                            .build();
    +            Retrace.run(retraceCommand);
    +        } catch (IOException ex) {
    +            // Print a verbose stack trace.
    +            ex.printStackTrace();
    +            System.exit(1);
    +        }
    +        System.exit(0);
    +    }
    +}
    diff --git a/android/stacktrace/java_deobfuscate.py b/android/stacktrace/java_deobfuscate.py
    new file mode 100755
    index 000000000000..fa872d98ebf1
    --- /dev/null
    +++ b/android/stacktrace/java_deobfuscate.py
    @@ -0,0 +1,36 @@
    +#!/usr/bin/env python3
    +#
    +# Copyright 2020 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +"""Wrapper script for java_deobfuscate.
    +
    +This is also a buildable target, but having it pre-built here simplifies usage.
    +"""
    +
    +import os
    +import sys
    +
    +DIR_SOURCE_ROOT = os.path.normpath(
    +    os.path.join(os.path.dirname(__file__), '../../../'))
    +
    +def main():
    +  classpath = [
    +      os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'stacktrace',
    +                   'java_deobfuscate_java.jar'),
    +      os.path.join(DIR_SOURCE_ROOT, 'third_party', 'r8', 'lib', 'r8.jar')
    +  ]
    +  java_path = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'current',
    +                           'bin', 'java')
    +
    +  cmd = [
    +      java_path, '-classpath', ':'.join(classpath),
    +      'org.chromium.build.FlushingReTrace'
    +  ]
    +  cmd.extend(sys.argv[1:])
    +
    +  os.execvp(cmd[0], cmd)
    +
    +
    +if __name__ == '__main__':
    +  main()
    diff --git a/android/stacktrace/java_deobfuscate_java.jar b/android/stacktrace/java_deobfuscate_java.jar
    new file mode 100644
    index 0000000000000000000000000000000000000000..8f31b76c932eecd715d04a223647f354f46933c7
    GIT binary patch
    literal 7643
    zcmc&(`*$0~75-MXv$DKS9LEXdL0E#7*fNpw021sFCsE|YL~)GmI2g->wY0W3S!vDg
    zDv8?^D9-}rRr&(@pe-$ZH>BX0qy_rqoSvT3zokF5KlHTyX0@{9S1eM_sgCwBGk5O2
    zbLY-CckcEctEz57;dfi)fx5oO0|7u>5ms9Ecq~0(TUjHYjg96FGZX7F^Uj2k8}HKx
    zY%Q%f?~JBR&2j$n@2elO!yar1p$cncG|5;iW1WN*`;467?w3&2(mE)iI&Nij39FJu
    zPVdQQM|FEZ8#P(en6%QGIjGr&DEpPw?u6k;P`YzD-Hr=EI*VJAl_=IBA=Gc>?X=!y
    z2mwv=RF9s~&S}&nkxQGF!yz5l-3cor<0b_+W4(l6kM6ifEyq<*gB22*E0HRpLGX>4
    zT5ddcV)Trjb|q{}TiKYF%h;BYiMf_#Ix%~1?69GY=g8hjJ4e}Q>b8U}%gdk*vzn2U
    zu&!k|S(4T7+8jei>mXDFI0q!-gh;%oJfAY(Vtjfvkt>kiWkmo%-bFKzLoM%0fdC=C
    zfh_8I4`3x$d8G!dMx$5LfI2=yq^7TkU+)}G`sAO_?0KyQ5Lbnsv}>Lx9S>3L9Wu7d
    zh}Kf|9YO3A&b2E)aV`=?J_!K?Bjs5eY%zrM!mUUCc}yy
    z%8!ldww@`p&_n)T-nmP7XW~LypL7i?=g7F13uIiUuQax_7SzqVh8as5j!V*NlgmY{
    zkp=V331!3D*%5Z1nB~fT_5H4vK6_l7^wyIMF2mY>V?3w1d7Is~E|9fvp@jWH)f-w!
    zoeB=%;GBijh&6OvZxjorL&ExELw9a6@6w}n&7VXWhZMwdSVGOofv)X)6}n+U+`1Id
    zDHR;SQ5oF|?!z$&Yi9YBtf3$&WLL6?cP+YzV~Z|gTXzKx#ymrNkl3G}oHPyLcg;mL
    zPUaj|TDGfah&EP+3w3su=Ey26;?D#I=JK6#Ts_ORDsZFBSxRQ+Bu83GUR2hw&?5wn
    zuPG7xi%HZ5Ib#_C?S$K+Z+S@QqZ5UDHt~ErZkZG}
    zMHa@YNunM-gS8EAQ-uZz|b{R4P8cc
    zW6c$;xB~fo1Pj=>mQ(^_fb&Gc3s}du0zPiSO<2#;242L*zXK
    zr%{`1`vf)8rM4M_E+yNhp-7j!_Im16M{R{}bwoBHZ?gx~j}n(3qpZi-`UG2_q~=d?
    zS9;phK_zXFJvQT3N@^m0tGJDCLZ5e-
    zLkpkPXeCa!c{QyRRDgB?{`}2__P<^Fd-nhhDK61<`5kK>uzw;W8DiJtwdlc#AbLZ%
    zAALde2Qd)D$q;J?gE$q!0~it;%!47$;;`6u9u7gnsJNwNaHrNov>o?})ffw591|fJ
    zIMc{3Iq_x%F)5yp$go44A`%*K#LW#Mk8?84hiJJALXD{qK8MdY(lU?M(ejVW_+p3#
    zdP2sN3&M~3MnjtL5B5&CKgTJVkd*cK0Ms_a5x)Pn_;c
    z97(Y2XR-Y>R}c+xb=Tw_ZJg`!`oRMiPF9Lq9BUQ{g6400ESp+(G@~_($fIA=keGf@NyB+K1OkKE8D}UZP{jD*>+kdqR->Hh>$tT3gSXV
    z#7HupR`3j-m2lvsnD2H~Pfu8C^NB-UC;RENiNmMk$%Fm<>P1xm@#&0ijgIA=wC3t+
    zmt{q@$+J=40c3nh!E<KJ*L*0_W>dMuaH56|)^%&XRz>P%%v8ULzQ
    z-gS(OuDTPtnznMTe!=yc1(K`YBsl7M!!*_NmVH*Wa^{qJenQWwA`o)atmdXCyuP+R
    zo;NjH)h|riy5k5b8GVdI8P&+Ci-r7ABFaJ2u@+G->zx49oR+1G^9GLtc~|A3!%7=8
    zP{tFk=Dbsjqh>4;ectgRP_?Cn_Z)D+B7US$x@NIlY#&?BkZ^`(c
    zg74!84S1Wn{ICIW1wX<&3Vw{ANP$F4XZ!Hr;qFt3u8~+Y(wS;eBdJVVs>C-m(3j_NY42RwPwP+OD>CvN!)M@ZNqgHLAI-DW@%*PDR7nloLs{@D_I1$fvl%
    zCU32ssn*tug?>v@2x^ayPbxe#6%{csSB$J*Yu9fYBC(}C=Ro5TAZZ|r%CF!39{KjXE=R(I7>EuqiQ$<#0u|t%mru{
    zb&P9yhfX^>CA6mfi*n{ztaD4DxMj9IHA=G#$I~O?xP5VWz#XC1t*KE8ByB}>zL82&
    zS<$NCr}$ZA&bdp$&+!Yf2!1K!R|+oS-KB#1%VKf;8qdmjPr>_`UR8sd20Sm^_lk@e
    z1s~w5gqy^C#t6zH6;KAu0y7D4iyYba3nSdP&(Go6??>NNy3JETDg5p&#IE9$!yRJw
    z&{9^-Q>+)@nj&m3kBI$v+RLcu7Yqi>)h*qkR5q02VIo9Nv|Z;E@oWh$4^obo-Vvu3
    z`H)ETh_?d_;co5+EXP!(uuO3X%A61o?|q3gT(k|OFUqBJ526U)p@iMDt=x#7u)
    zeL7F%bT(JwfrK+VPqf~#%t+>%gl@r;*SuT?Suy^Uu-e!#G&EyL%3?maveaBi(8!E55?Y#Din@&Z4a)dP!N>T6
    zf-W3Ua0qc8Xgq#icE`w_v(D5~
    zT%SQ9rY@w?9iEB>Wk;C_{5`X*W=_WehB@Wrl{XedUcawz-|_*+HPh@jT%GCaKT8NN
    zFvw!#k+AvtbY3Dzq-6q328oQ0ggd;_c9x@M<@Nk7Ac-R*?>k9R{%s&o$bao(b>Z8<
    zM(pN$@!P!F&VR14B$^
    zJ;+quF!N3iF(LCXb5t5XM(n}ff^O0pCcLUhc_#8<^;=jOsoK>TnnvC0STO@-2KAAv
    z%#&dntESQLF;*wruAy=03f4@c>C_C?GUc}JDl=oGxVdrtG&WqrMmBGn#x2v>eEg0X
    z-1-g_k(s+q=vU?GC=zQ!NKqeta-i%Urs04Lb4@y13?}=|uQr};_WO<
    zWc{~`_(Jb7DbQ54^ao5&0&j1N0acKxEPl?M&v*XktP FOO:
    +    int[] mFontFamily -> a
    +    1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
    +never.Deobfuscated -> NOTFOO:
    +    int[] mFontFamily -> a
    +    1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
    +"""
    +
    +TEST_DATA = [
    +    '',
    +    'FOO',
    +    'FOO.bar',
    +    'Here is a FOO',
    +    'Here is a class FOO',
    +    'Here is a class FOO baz',
    +    'Here is a "FOO" baz',
    +    'Here is a type "FOO" baz',
    +    'Here is a "FOO.bar" baz',
    +    'SomeError: SomeFrameworkClass in isTestClass for FOO',
    +    'Here is a FOO.bar',
    +    'Here is a FOO.bar baz',
    +    'END FOO#bar',
    +    'new-instance 3810 (LSome/Framework/Class;) in LFOO;',
    +    'FOO: Error message',
    +    'Caused by: FOO: Error message',
    +    '\tat FOO.bar(PG:1)',
    +    '\t at\t FOO.bar\t (\t PG:\t 1\t )',
    +    '0xfff \t( \tPG:\t 1 \t)\tFOO.bar',
    +    ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
    +     ' java.lang.NullPointerException: Attempt to invoke interface method'
    +     ' \'void FOO.bar(int,android.os.Bundle)\' on a null object reference'),
    +    ('Caused by: java.lang.NullPointerException: Attempt to read from field'
    +     ' \'int[] FOO.a\' on a null object reference'),
    +    'java.lang.VerifyError: FOO',
    +    ('java.lang.NoSuchFieldError: No instance field a of type '
    +     'Ljava/lang/Class; in class LFOO;'),
    +    'NOTFOO: Object of type FOO was not destroyed...',
    +]
    +
    +EXPECTED_OUTPUT = [
    +    '',
    +    'this.was.Deobfuscated',
    +    'this.was.Deobfuscated.someMethod',
    +    'Here is a FOO',
    +    'Here is a class this.was.Deobfuscated',
    +    'Here is a class FOO baz',
    +    'Here is a "FOO" baz',
    +    'Here is a type "this.was.Deobfuscated" baz',
    +    'Here is a "this.was.Deobfuscated.someMethod" baz',
    +    'SomeError: SomeFrameworkClass in isTestClass for this.was.Deobfuscated',
    +    'Here is a this.was.Deobfuscated.someMethod',
    +    'Here is a FOO.bar baz',
    +    'END this.was.Deobfuscated#someMethod',
    +    'new-instance 3810 (LSome/Framework/Class;) in Lthis/was/Deobfuscated;',
    +    'this.was.Deobfuscated: Error message',
    +    'Caused by: this.was.Deobfuscated: Error message',
    +    '\tat this.was.Deobfuscated.someMethod(Deobfuscated.java:65)',
    +    ('\t at\t this.was.Deobfuscated.someMethod\t '
    +     '(\t Deobfuscated.java:\t 65\t )'),
    +    '0xfff \t( \tDeobfuscated.java:\t 65 \t)\tthis.was.Deobfuscated.someMethod',
    +    ('Unable to start activity ComponentInfo{garbage.in/here.test}:'
    +     ' java.lang.NullPointerException: Attempt to invoke interface method'
    +     ' \'void this.was.Deobfuscated.someMethod(int,android.os.Bundle)\' on a'
    +     ' null object reference'),
    +    ('Caused by: java.lang.NullPointerException: Attempt to read from field'
    +     ' \'int[] this.was.Deobfuscated.mFontFamily\' on a null object reference'),
    +    'java.lang.VerifyError: this.was.Deobfuscated',
    +    ('java.lang.NoSuchFieldError: No instance field mFontFamily of type '
    +     'Ljava/lang/Class; in class Lthis/was/Deobfuscated;'),
    +    'NOTFOO: Object of type this.was.Deobfuscated was not destroyed...',
    +]
    +TEST_DATA = [s + '\n' for s in TEST_DATA]
    +EXPECTED_OUTPUT = [s + '\n' for s in EXPECTED_OUTPUT]
    +
    +
    +class JavaDeobfuscateTest(unittest.TestCase):
    +
    +  def __init__(self, *args, **kwargs):
    +    super().__init__(*args, **kwargs)
    +    self._map_file = None
    +
    +  def setUp(self):
    +    self._map_file = tempfile.NamedTemporaryFile()
    +    self._map_file.write(TEST_MAP.encode('utf-8'))
    +    self._map_file.flush()
    +
    +  def tearDown(self):
    +    if self._map_file:
    +      self._map_file.close()
    +
    +  def _testImpl(self, input_lines=None, expected_output_lines=None,
    +                prefix=''):
    +    self.assertTrue(bool(input_lines) == bool(expected_output_lines))
    +
    +    if not input_lines:
    +      input_lines = [prefix + x for x in TEST_DATA]
    +    if not expected_output_lines:
    +      expected_output_lines = [prefix + x for x in EXPECTED_OUTPUT]
    +
    +    cmd = [_JAVA_DEOBFUSCATE_PATH, self._map_file.name]
    +    proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
    +    proc_output, _ = proc.communicate(''.join(input_lines).encode())
    +    actual_output_lines = proc_output.decode().splitlines(True)
    +    for actual, expected in zip(actual_output_lines, expected_output_lines):
    +      self.assertTrue(
    +          actual == expected or actual.replace('bar', 'someMethod') == expected,
    +          msg=''.join([
    +              'Deobfuscation failed.\n',
    +              '  actual:   %s' % actual,
    +              '  expected: %s' % expected]))
    +
    +  def testNoPrefix(self):
    +    self._testImpl(prefix='')
    +
    +  def testThreadtimePrefix(self):
    +    self._testImpl(prefix='09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ')
    +
    +  def testStandardPrefix(self):
    +    self._testImpl(prefix='W/GCM     (15158): ')
    +
    +  def testStandardPrefixWithPadding(self):
    +    self._testImpl(prefix='W/GCM     (  158): ')
    +
    +  @unittest.skip('causes java_deobfuscate to hang, see crbug.com/876539')
    +  def testIndefiniteHang(self):
    +    # Test for crbug.com/876539.
    +    self._testImpl(
    +        input_lines=[
    +            'VFY: unable to resolve virtual method 2: LFOO;'
    +                + '.onDescendantInvalidated '
    +                + '(Landroid/view/View;Landroid/view/View;)V',
    +        ],
    +        expected_output_lines=[
    +            'VFY: unable to resolve virtual method 2: Lthis.was.Deobfuscated;'
    +                + '.onDescendantInvalidated '
    +                + '(Landroid/view/View;Landroid/view/View;)V',
    +        ])
    +
    +
    +if __name__ == '__main__':
    +  parser = argparse.ArgumentParser()
    +  parser.add_argument('--java-deobfuscate-path', type=os.path.realpath,
    +                      required=True)
    +  known_args, unittest_args = parser.parse_known_args()
    +  _JAVA_DEOBFUSCATE_PATH = known_args.java_deobfuscate_path
    +  unittest_args = [sys.argv[0]] + unittest_args
    +  unittest.main(argv=unittest_args)
    diff --git a/android/stacktrace/stackwalker.py b/android/stacktrace/stackwalker.py
    new file mode 100755
    index 000000000000..ad60e998281d
    --- /dev/null
    +++ b/android/stacktrace/stackwalker.py
    @@ -0,0 +1,136 @@
    +#!/usr/bin/env vpython3
    +#
    +# Copyright 2016 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +
    +import argparse
    +import os
    +import re
    +import sys
    +import tempfile
    +
    +if __name__ == '__main__':
    +  sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
    +from pylib.constants import host_paths
    +
    +if host_paths.DEVIL_PATH not in sys.path:
    +  sys.path.append(host_paths.DEVIL_PATH)
    +from devil.utils import cmd_helper
    +
    +
    +_MICRODUMP_BEGIN = re.compile(
    +    '.*google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----')
    +_MICRODUMP_END = re.compile(
    +    '.*google-breakpad: -----END BREAKPAD MICRODUMP-----')
    +
    +""" Example Microdump
    +  6270  6131 F google-breakpad: -----BEGIN BREAKPAD MICRODUMP-----
    +  6270  6131 F google-breakpad: V Chrome_Android:54.0.2790.0
    +...
    +  6270  6131 F google-breakpad: -----END BREAKPAD MICRODUMP-----
    +
    +"""
    +
    +
    +def GetMicroDumps(dump_path):
    +  """Returns all microdumps found in given log file
    +
    +  Args:
    +    dump_path: Path to the log file.
    +
    +  Returns:
    +    List of all microdumps as lists of lines.
    +  """
    +  with open(dump_path, 'r') as d:
    +    data = d.read()
    +  all_dumps = []
    +  current_dump = None
    +  for line in data.splitlines():
    +    if current_dump is not None:
    +      if _MICRODUMP_END.match(line):
    +        current_dump.append(line)
    +        all_dumps.append(current_dump)
    +        current_dump = None
    +      else:
    +        current_dump.append(line)
    +    elif _MICRODUMP_BEGIN.match(line):
    +      current_dump = []
    +      current_dump.append(line)
    +  return all_dumps
    +
    +
    +def SymbolizeMicroDump(stackwalker_binary_path, dump, symbols_path):
    +  """Runs stackwalker on microdump.
    +
    +  Runs the stackwalker binary at stackwalker_binary_path on a given microdump
    +  using the symbols at symbols_path.
    +
    +  Args:
    +    stackwalker_binary_path: Path to the stackwalker binary.
    +    dump: The microdump to run the stackwalker on.
    +    symbols_path: Path the the symbols file to use.
    +
    +  Returns:
    +    Output from stackwalker tool.
    +  """
    +  with tempfile.NamedTemporaryFile() as tf:
    +    for l in dump:
    +      tf.write('%s\n' % l)
    +    cmd = [stackwalker_binary_path, tf.name, symbols_path]
    +    return cmd_helper.GetCmdOutput(cmd)
    +
    +
    +def AddArguments(parser):
    +  parser.add_argument('--stackwalker-binary-path', required=True,
    +                      help='Path to stackwalker binary.')
    +  parser.add_argument('--stack-trace-path', required=True,
    +                      help='Path to stacktrace containing microdump.')
    +  parser.add_argument('--symbols-path', required=True,
    +                      help='Path to symbols file.')
    +  parser.add_argument('--output-file',
    +                      help='Path to dump stacktrace output to')
    +
    +
    +def _PrintAndLog(line, fp):
    +  if fp:
    +    fp.write('%s\n' % line)
    +  print(line)
    +
    +
    +def main():
    +  parser = argparse.ArgumentParser()
    +  AddArguments(parser)
    +  args = parser.parse_args()
    +
    +  micro_dumps = GetMicroDumps(args.stack_trace_path)
    +  if not micro_dumps:
    +    print('No microdump found. Exiting.')
    +    return 0
    +
    +  symbolized_dumps = []
    +  for micro_dump in micro_dumps:
    +    symbolized_dumps.append(SymbolizeMicroDump(
    +        args.stackwalker_binary_path, micro_dump, args.symbols_path))
    +
    +  try:
    +    fp = open(args.output_file, 'w') if args.output_file else None
    +    _PrintAndLog('%d microdumps found.' % len(micro_dumps), fp)
    +    _PrintAndLog('---------- Start output from stackwalker ----------', fp)
    +    for index, symbolized_dump in list(enumerate(symbolized_dumps)):
    +      _PrintAndLog(
    +          '------------------ Start dump %d ------------------' % index, fp)
    +      _PrintAndLog(symbolized_dump, fp)
    +      _PrintAndLog(
    +          '------------------- End dump %d -------------------' % index, fp)
    +    _PrintAndLog('----------- End output from stackwalker -----------', fp)
    +  except Exception:
    +    if fp:
    +      fp.close()
    +    raise
    +  return 0
    +
    +
    +if __name__ == '__main__':
    +  sys.exit(main())
    diff --git a/android/test/BUILD.gn b/android/test/BUILD.gn
    new file mode 100644
    index 000000000000..e9bbbce1b423
    --- /dev/null
    +++ b/android/test/BUILD.gn
    @@ -0,0 +1,83 @@
    +# Copyright 2021 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import("//build/config/android/android_nocompile.gni")
    +import("missing_symbol_test.gni")
    +import("nocompile_gn/nocompile_sources.gni")
    +
    +group("android_nocompile_tests") {
    +  testonly = true
    +
    +  # No-compile tests use an output directory dedicated to no-compile tests.
    +  # All test suites use targets in nocompile_gn/BUILD.gn in order to share the
    +  # same target output directory and avoid running 'gn gen' for each
    +  # android_nocompile_test_suite().
    +  deps = [
    +    ":android_lint_tests",
    +    ":android_lookup_dep_tests",
    +  ]
    +}
    +
    +android_nocompile_test_suite("android_lint_tests") {
    +  # Depend on lint script so that the action is re-run whenever the script is  modified.
    +  pydeps = [ "//build/android/gyp/lint.pydeps" ]
    +
    +  tests = [
    +    {
    +      target = "nocompile_gn:default_locale_lint_test"
    +      nocompile_sources =
    +          rebase_path(default_locale_lint_test_nocompile_sources,
    +                      "",
    +                      "nocompile_gn")
    +      expected_compile_output_regex = "Warning:.*DefaultLocale"
    +    },
    +    {
    +      target = "nocompile_gn:new_api_lint_test"
    +      nocompile_sources =
    +          rebase_path(new_api_lint_test_nocompile_sources, "", "nocompile_gn")
    +      expected_compile_output_regex = "Error:.*NewApi"
    +    },
    +  ]
    +}
    +
    +android_nocompile_test_suite("android_lookup_dep_tests") {
    +  sources =
    +      [ rebase_path(missing_symbol_generated_importer_template_nocompile_source,
    +                    "",
    +                    "nocompile_gn") ]
    +
    +  tests = [
    +    {
    +      target = "nocompile_gn:import_child_missing_symbol_test_java"
    +      nocompile_sources =
    +          rebase_path(import_child_missing_symbol_test_nocompile_sources,
    +                      "",
    +                      "nocompile_gn")
    +      expected_compile_output_regex = "Hint: Try adding the following to //build/android/test/nocompile_gn:import_child_missing_symbol_test_java\n *\"//build/android/test/nocompile_gn:sub_b_java\""
    +    },
    +    {
    +      target = "nocompile_gn:import_parent_missing_symbol_test_java"
    +      nocompile_sources = []
    +      expected_compile_output_regex = "Hint: Try adding the following to //build/android/test/nocompile_gn:import_parent_missing_symbol_test_java\n *\"//build/android/test/nocompile_gn:b_java\""
    +    },
    +    {
    +      target = "nocompile_gn:import_turbine_missing_symbol_test_java"
    +      nocompile_sources =
    +          rebase_path(import_turbine_missing_symbol_test_nocompile_sources,
    +                      "",
    +                      "nocompile_gn")
    +      expected_compile_output_regex = "Hint: Try adding the following to //build/android/test/nocompile_gn:import_turbine_missing_symbol_test_java\n *\"//build/android/test/nocompile_gn:b_java\""
    +    },
    +    {
    +      target = "nocompile_gn:prebuilt_missing_symbol_test_java"
    +      nocompile_sources = []
    +      expected_compile_output_regex = "Hint: Try adding the following to //build/android/test/nocompile_gn:prebuilt_missing_symbol_test_java\n *\"//build/android/test/nocompile_gn:c_prebuilt_java\""
    +    },
    +    {
    +      target = "nocompile_gn:cpp_template_missing_symbol_test_java"
    +      nocompile_sources = []
    +      expected_compile_output_regex = "Hint: Try adding the following to //build/android/test/nocompile_gn:cpp_template_missing_symbol_test_java\n *\"//build/android/test/nocompile_gn:d_java\""
    +    },
    +  ]
    +}
    diff --git a/android/test/incremental_javac_gn/BUILD.gn b/android/test/incremental_javac_gn/BUILD.gn
    new file mode 100644
    index 000000000000..9411497faf1c
    --- /dev/null
    +++ b/android/test/incremental_javac_gn/BUILD.gn
    @@ -0,0 +1,98 @@
    +# Copyright 2021 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import("//build/config/android/rules.gni")
    +
    +declare_args() {
    +  incremental_javac_test_toggle_gn = false
    +}
    +
    +all_test_sources = [
    +  "../../java/test/NoSignatureChangeIncrementalJavacTestHelper.template",
    +  "../../java/test/NoSignatureChangeIncrementalJavacTestHelper2.java",
    +]
    +
    +template("incremental_javac_prebuilt") {
    +  _out_jar = "${target_gen_dir}/${target_name}.jar"
    +
    +  action(target_name) {
    +    script = "incremental_javac_test_android_library.py"
    +    forward_variables_from(invoker,
    +                           [
    +                             "sources",
    +                             "testonly",
    +                           ])
    +    deps = [ invoker.toggle_gn_target ]
    +
    +    inputs = []
    +    if (defined(invoker.pydeps)) {
    +      foreach(_pydeps_file, invoker.pydeps) {
    +        _pydeps_file_lines = []
    +        _pydeps_file_lines = read_file(_pydeps_file, "list lines")
    +        _pydeps_entries = []
    +        _pydeps_entries = filter_exclude(_pydeps_file_lines, [ "#*" ])
    +        _pydeps_file_dir = get_path_info(_pydeps_file, "dir")
    +        inputs += rebase_path(_pydeps_entries, ".", _pydeps_file_dir)
    +      }
    +    }
    +
    +    outputs = [ _out_jar ]
    +
    +    args = [
    +      "--target-name",
    +      get_label_info("${invoker.toggle_gn_target}", "label_no_toolchain"),
    +      "--gn-args-path",
    +      "args.gn",
    +      "--out-dir",
    +      rebase_path("${target_out_dir}/${target_name}/incremental_javac_out",
    +                  root_build_dir),
    +      "--out-jar",
    +      rebase_path(_out_jar, root_build_dir),
    +    ]
    +  }
    +}
    +
    +# Use jinja_template() instead of java_cpp_template() because incremental builds
    +# are not done when non-.java files change.
    +jinja_template("changing_javagen") {
    +  input = "../../java/test/NoSignatureChangeIncrementalJavacTestHelper.template"
    +  assert(filter_include(all_test_sources, [ input ]) != [])
    +  output =
    +      "${target_gen_dir}/test/NoSignatureChangeIncrementalJavacTestHelper.java"
    +  if (incremental_javac_test_toggle_gn) {
    +    variables = [ "foo_return_value=foo2" ]
    +  } else {
    +    variables = [ "foo_return_value=foo" ]
    +  }
    +}
    +
    +android_library("changing_java") {
    +  testonly = true
    +
    +  # Should not be re-compiled during incremental build.
    +  sources =
    +      [ "../../java/test/NoSignatureChangeIncrementalJavacTestHelper2.java" ]
    +  assert(filter_include(all_test_sources, sources) != [])
    +
    +  # Should be recompiled during incremental build.
    +  sources += get_target_outputs(":changing_javagen")
    +  deps = [ ":changing_javagen" ]
    +}
    +
    +# Compiles :changing_java with and without |incremental_javac_test_toggle_gn|.
    +incremental_javac_prebuilt("no_signature_change_prebuilt_generator") {
    +  testonly = true
    +  sources = all_test_sources
    +  toggle_gn_target = ":changing_java"
    +  pydeps = [ "//build/android/gyp/compile_java.pydeps" ]
    +}
    +
    +android_java_prebuilt("no_signature_change_prebuilt_java") {
    +  testonly = true
    +  _generator_outputs =
    +      get_target_outputs(":no_signature_change_prebuilt_generator")
    +  jar_paths = filter_include(_generator_outputs, [ "*.jar" ])
    +  jar_path = jar_paths[0]
    +  deps = [ ":no_signature_change_prebuilt_generator" ]
    +}
    diff --git a/android/test/incremental_javac_gn/incremental_javac_test_android_library.py b/android/test/incremental_javac_gn/incremental_javac_test_android_library.py
    new file mode 100755
    index 000000000000..640745086cbb
    --- /dev/null
    +++ b/android/test/incremental_javac_gn/incremental_javac_test_android_library.py
    @@ -0,0 +1,154 @@
    +#!/usr/bin/env python3
    +#
    +# Copyright 2021 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +"""Compiles twice: With incremental_javac_test_toggle_gn=[false, true]
    +
    +The purpose of compiling the target twice is to test that builds generated by
    +the incremental build code path are valid.
    +"""
    +
    +import argparse
    +import os
    +import pathlib
    +import subprocess
    +import shutil
    +
    +_CHROMIUM_SRC = pathlib.Path(__file__).resolve().parents[4].resolve()
    +_NINJA_PATH = _CHROMIUM_SRC / 'third_party' / 'ninja' / 'ninja'
    +
    +# Relative to _CHROMIUM_SRC
    +_GN_SRC_REL_PATH = 'buildtools/linux64/gn'
    +
    +_USING_PARTIAL_JAVAC_MSG = 'Using partial javac optimization'
    +
    +
    +def _raise_command_exception(args, returncode, output):
    +  """Raises an exception whose message describes a command failure.
    +
    +    Args:
    +      args: shell command-line (as passed to subprocess.Popen())
    +      returncode: status code.
    +      output: command output.
    +    Raises:
    +      a new Exception.
    +    """
    +  message = ('Command failed with status {}: {}\n'
    +             'Output:-----------------------------------------\n{}\n'
    +             '------------------------------------------------\n').format(
    +                 returncode, args, output)
    +  raise Exception(message)
    +
    +
    +def _run_command(args, check_returncode=True, cwd=None, env=None):
    +  """Runs shell command. Raises exception if command fails."""
    +  p = subprocess.Popen(args,
    +                       stdout=subprocess.PIPE,
    +                       stderr=subprocess.STDOUT,
    +                       cwd=cwd,
    +                       env=env,
    +                       universal_newlines=True)
    +  pout, _ = p.communicate()
    +  if check_returncode and p.returncode != 0:
    +    _raise_command_exception(args, p.returncode, pout)
    +  return pout
    +
    +
    +def _copy_and_append_gn_args(src_args_path, dest_args_path, extra_args):
    +  """Copies args.gn.
    +
    +    Args:
    +      src_args_path: args.gn file to copy.
    +      dest_args_path: Copy file destination.
    +      extra_args: Text to append to args.gn after copy.
    +    """
    +  with open(src_args_path) as f:
    +    initial_args_str = f.read()
    +
    +  with open(dest_args_path, 'w') as f:
    +    f.write(initial_args_str)
    +    f.write('\n')
    +
    +    # Write |extra_args| after |initial_args_str| so that |extra_args|
    +    # overwrites |initial_args_str| in the case of duplicate entries.
    +    f.write('\n'.join(extra_args))
    +
    +
    +def _run_gn(args, check_returncode=True):
    +  _run_command([_GN_SRC_REL_PATH] + args,
    +               check_returncode=check_returncode,
    +               cwd=_CHROMIUM_SRC)
    +
    +
    +def main():
    +  parser = argparse.ArgumentParser()
    +  parser.add_argument('--target-name',
    +                      required=True,
    +                      help='name of target to build with and without ' +
    +                      'incremental_javac_test_toggle_gn=true')
    +  parser.add_argument('--gn-args-path',
    +                      required=True,
    +                      help='Path to args.gn file to copy args from.')
    +  parser.add_argument('--out-dir',
    +                      required=True,
    +                      help='Path to output directory to use for compilation.')
    +  parser.add_argument('--out-jar',
    +                      required=True,
    +                      help='Path where output jar should be stored.')
    +  options = parser.parse_args()
    +
    +  options.out_dir = pathlib.Path(options.out_dir).resolve()
    +
    +  options.out_dir.mkdir(parents=True, exist_ok=True)
    +
    +  # Clear the output directory so that first compile is not an incremental
    +  # build.
    +  # This will make the test fail in the scenario that:
    +  # - The output directory contains a previous build generated by this script.
    +  # - Incremental builds are broken and are a no-op.
    +  _run_gn(['clean', options.out_dir.relative_to(_CHROMIUM_SRC)],
    +          check_returncode=False)
    +
    +  out_gn_args_path = options.out_dir / 'args.gn'
    +  extra_gn_args = [
    +      'treat_warnings_as_errors = true',
    +      # GOMA does not work with non-standard output directories.
    +      'use_goma = false',
    +  ]
    +  _copy_and_append_gn_args(
    +      options.gn_args_path, out_gn_args_path,
    +      extra_gn_args + ['incremental_javac_test_toggle_gn = false'])
    +
    +  _run_gn([
    +      '--root-target=' + options.target_name, 'gen',
    +      options.out_dir.relative_to(_CHROMIUM_SRC)
    +  ])
    +
    +  ninja_env = os.environ.copy()
    +  ninja_env['JAVAC_DEBUG'] = '1'
    +
    +  # Strip leading '//'
    +  gn_path = options.target_name[2:]
    +  ninja_args = [_NINJA_PATH, '-C', options.out_dir, gn_path]
    +  ninja_output = _run_command(ninja_args, env=ninja_env)
    +  if _USING_PARTIAL_JAVAC_MSG in ninja_output:
    +    raise Exception('Incorrectly using partial javac for clean compile.')
    +
    +  _copy_and_append_gn_args(
    +      options.gn_args_path, out_gn_args_path,
    +      extra_gn_args + ['incremental_javac_test_toggle_gn = true'])
    +  ninja_output = _run_command(ninja_args, env=ninja_env)
    +  if _USING_PARTIAL_JAVAC_MSG not in ninja_output:
    +    raise Exception('Not using partial javac for incremental compile.')
    +
    +  expected_output_path = '{}/obj/{}.javac.jar'.format(options.out_dir,
    +                                                      gn_path.replace(':', '/'))
    +  if not os.path.exists(expected_output_path):
    +    raise Exception('{} not created.'.format(expected_output_path))
    +
    +  shutil.copyfile(expected_output_path, options.out_jar)
    +
    +
    +if __name__ == '__main__':
    +  main()
    diff --git a/android/test/missing_symbol_test.gni b/android/test/missing_symbol_test.gni
    new file mode 100644
    index 000000000000..3cc4741c9450
    --- /dev/null
    +++ b/android/test/missing_symbol_test.gni
    @@ -0,0 +1,57 @@
    +# Copyright 2021 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import("//build/config/android/android_nocompile.gni")
    +import("//build/config/android/rules.gni")
    +
    +missing_symbol_generated_importer_template_nocompile_source =
    +    "//build/android/java/test/missing_symbol/Importer.template"
    +
    +template("missing_symbol_test") {
    +  # Not named "_java" to prevent target from being considered a classpath dep.
    +  _helper_target_name = string_replace("${target_name}__helper", "java", "")
    +
    +  group(_helper_target_name) {
    +    # Make group() depend on dependencies that |target_name| cannot find so that
    +    # the missing symbol resolver can find and suggest the missing GN dep.
    +    deps = invoker.deps
    +  }
    +
    +  android_library(target_name) {
    +    sources = [ "//tools/android/errorprone_plugin/test/src/org/chromium/tools/errorprone/plugin/Empty.java" ]
    +    not_needed(invoker,
    +               [
    +                 "sources",
    +                 "importer_srcjar_deps",
    +               ])
    +    if (enable_android_nocompile_tests) {
    +      if (defined(invoker.sources)) {
    +        sources += invoker.sources
    +      }
    +      if (defined(invoker.importer_srcjar_deps)) {
    +        srcjar_deps = invoker.importer_srcjar_deps
    +      }
    +    }
    +
    +    deps = [ ":${_helper_target_name}" ]
    +  }
    +}
    +
    +# missing_symbol_test() template wrapper which generates importer class.
    +template("missing_symbol_generated_importer_test") {
    +  _importer_generator_target = "${target_name}__importer_javagen"
    +  java_cpp_template(_importer_generator_target) {
    +    sources = [ missing_symbol_generated_importer_template_nocompile_source ]
    +    defines = [
    +      "_IMPORTER_PACKAGE=${invoker.importer_package}",
    +      "_IMPORTEE_PACKAGE=${invoker.imported_package}",
    +      "_IMPORTEE_CLASS_NAME=${invoker.imported_class_name}",
    +    ]
    +  }
    +
    +  missing_symbol_test(target_name) {
    +    importer_srcjar_deps = [ ":${_importer_generator_target}" ]
    +    forward_variables_from(invoker, [ "deps" ])
    +  }
    +}
    diff --git a/android/test/nocompile_gn/BUILD.gn b/android/test/nocompile_gn/BUILD.gn
    new file mode 100644
    index 000000000000..406bd8c485fc
    --- /dev/null
    +++ b/android/test/nocompile_gn/BUILD.gn
    @@ -0,0 +1,101 @@
    +# Copyright 2021 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +import("//build/android/test/missing_symbol_test.gni")
    +import("//build/config/android/android_nocompile.gni")
    +import("//build/config/android/rules.gni")
    +import("nocompile_sources.gni")
    +
    +template("lint_test") {
    +  _library_target_name = "${target_name}_test_java"
    +  _apk_target_name = "${target_name}_apk"
    +
    +  android_library(_library_target_name) {
    +    sources = [ "//tools/android/errorprone_plugin/test/src/org/chromium/tools/errorprone/plugin/Empty.java" ]
    +    not_needed(invoker, [ "sources" ])
    +    if (enable_android_nocompile_tests) {
    +      sources += invoker.sources
    +    }
    +  }
    +
    +  android_apk(_apk_target_name) {
    +    # This cannot be marked testonly since lint has special ignores for testonly
    +    # targets. We need to test linting a normal apk target.
    +    apk_name = _apk_target_name
    +    deps = [ ":$_library_target_name" ]
    +    android_manifest = "//build/android/AndroidManifest.xml"
    +  }
    +
    +  android_lint(target_name) {
    +    _apk_target = ":${_apk_target_name}"
    +    deps = [ "${_apk_target}__java" ]
    +    build_config_dep = "$_apk_target$build_config_target_suffix"
    +    build_config = get_label_info(_apk_target, "target_gen_dir") + "/" +
    +                   get_label_info(_apk_target, "name") + ".build_config.json"
    +    if (enable_android_nocompile_tests) {
    +      skip_build_server = true
    +    }
    +  }
    +}
    +
    +lint_test("default_locale_lint_test") {
    +  sources = default_locale_lint_test_nocompile_sources
    +}
    +
    +lint_test("new_api_lint_test") {
    +  sources = new_api_lint_test_nocompile_sources
    +}
    +
    +missing_symbol_generated_importer_test(
    +    "import_parent_missing_symbol_test_java") {
    +  importer_package = "test.missing_symbol.child_missing"
    +  imported_package = "test.missing_symbol"
    +  imported_class_name = "B"
    +  deps = [ ":b_java" ]
    +}
    +
    +missing_symbol_test("import_child_missing_symbol_test_java") {
    +  sources = import_child_missing_symbol_test_nocompile_sources
    +  deps = [ ":sub_b_java" ]
    +}
    +
    +missing_symbol_test("import_turbine_missing_symbol_test_java") {
    +  sources = import_turbine_missing_symbol_test_nocompile_sources
    +  deps = [ ":b_java" ]
    +}
    +
    +missing_symbol_generated_importer_test("prebuilt_missing_symbol_test_java") {
    +  importer_package = "test.missing_symbol.prebuilt_missing"
    +  imported_package = "test.missing_symbol"
    +  imported_class_name = "C"
    +  deps = [ ":c_prebuilt_java" ]
    +}
    +
    +missing_symbol_generated_importer_test(
    +    "cpp_template_missing_symbol_test_java") {
    +  importer_package = "test.missing_symbol.cpp_template_missing"
    +  imported_package = "test.missing_symbol"
    +  imported_class_name = "D"
    +  deps = [ ":d_java" ]
    +}
    +
    +android_library("b_java") {
    +  sources = [ "../../java/test/missing_symbol/B.java" ]
    +}
    +
    +android_library("sub_b_java") {
    +  sources = [ "../../java/test/missing_symbol/sub/SubB.java" ]
    +}
    +
    +android_java_prebuilt("c_prebuilt_java") {
    +  jar_path = "../../java/test/missing_symbol/c.jar"
    +}
    +
    +android_library("d_java") {
    +  srcjar_deps = [ ":d_template_javagen" ]
    +}
    +
    +java_cpp_template("d_template_javagen") {
    +  sources = [ "../../java/test/missing_symbol/D.template" ]
    +}
    diff --git a/android/test/nocompile_gn/nocompile_sources.gni b/android/test/nocompile_gn/nocompile_sources.gni
    new file mode 100644
    index 000000000000..36cd91503c83
    --- /dev/null
    +++ b/android/test/nocompile_gn/nocompile_sources.gni
    @@ -0,0 +1,14 @@
    +# Copyright 2021 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +default_locale_lint_test_nocompile_sources =
    +    [ "../../java/test/DefaultLocaleLintTest.java" ]
    +
    +new_api_lint_test_nocompile_sources = [ "../../java/test/NewApiLintTest.java" ]
    +
    +import_child_missing_symbol_test_nocompile_sources =
    +    [ "../../java/test/missing_symbol/ImportsSubB.java" ]
    +
    +import_turbine_missing_symbol_test_nocompile_sources =
    +    [ "../../java/test/missing_symbol/sub/BInMethodSignature.java" ]
    diff --git a/android/test_runner.py b/android/test_runner.py
    new file mode 100755
    index 000000000000..34b8debd4660
    --- /dev/null
    +++ b/android/test_runner.py
    @@ -0,0 +1,1388 @@
    +#!/usr/bin/env vpython3
    +#
    +# Copyright 2013 The Chromium Authors
    +# Use of this source code is governed by a BSD-style license that can be
    +# found in the LICENSE file.
    +
    +"""Runs all types of tests from one unified interface."""
    +
    +from __future__ import absolute_import
    +import argparse
    +import collections
    +import contextlib
    +import io
    +import itertools
    +import logging
    +import os
    +import re
    +import shlex
    +import shutil
    +import signal
    +import sys
    +import tempfile
    +import threading
    +import traceback
    +import unittest
    +
    +# Import _strptime before threaded code. datetime.datetime.strptime is
    +# threadsafe except for the initial import of the _strptime module.
    +# See http://crbug.com/724524 and https://bugs.python.org/issue7980.
    +import _strptime  # pylint: disable=unused-import
    +
    +# pylint: disable=ungrouped-imports
    +from pylib.constants import host_paths
    +
    +if host_paths.DEVIL_PATH not in sys.path:
    +  sys.path.append(host_paths.DEVIL_PATH)
    +
    +from devil import base_error
    +from devil.utils import reraiser_thread
    +from devil.utils import run_tests_helper
    +
    +from pylib import constants
    +from pylib.base import base_test_result
    +from pylib.base import environment_factory
    +from pylib.base import output_manager
    +from pylib.base import output_manager_factory
    +from pylib.base import test_instance_factory
    +from pylib.base import test_run_factory
    +from pylib.results import json_results
    +from pylib.results import report_results
    +from pylib.results.presentation import test_results_presentation
    +from pylib.utils import local_utils
    +from pylib.utils import logdog_helper
    +from pylib.utils import logging_utils
    +from pylib.utils import test_filter
    +
    +from py_utils import contextlib_ext
    +
    +from lib.results import result_sink  # pylint: disable=import-error
    +
    +_DEVIL_STATIC_CONFIG_FILE = os.path.abspath(os.path.join(
    +    host_paths.DIR_SOURCE_ROOT, 'build', 'android', 'devil_config.json'))
    +
    +_RERUN_FAILED_TESTS_FILE = 'rerun_failed_tests.filter'
    +
    +
    +def _RealPath(arg):
    +  if arg.startswith('//'):
    +    arg = os.path.abspath(os.path.join(host_paths.DIR_SOURCE_ROOT,
    +                                       arg[2:].replace('/', os.sep)))
    +  return os.path.realpath(arg)
    +
    +
    +def AddTestLauncherOptions(parser):
    +  """Adds arguments mirroring //base/test/launcher.
    +
    +  Args:
    +    parser: The parser to which arguments should be added.
    +  Returns:
    +    The given parser.
    +  """
    +  parser.add_argument(
    +      '--test-launcher-retry-limit',
    +      '--test_launcher_retry_limit',
    +      '--num_retries', '--num-retries',
    +      '--isolated-script-test-launcher-retry-limit',
    +      dest='num_retries', type=int, default=2,
    +      help='Number of retries for a test before '
    +           'giving up (default: %(default)s).')
    +  parser.add_argument(
    +      '--test-launcher-summary-output',
    +      '--json-results-file',
    +      dest='json_results_file', type=os.path.realpath,
    +      help='If set, will dump results in JSON form to the specified file. '
    +           'Note that this will also trigger saving per-test logcats to '
    +           'logdog.')
    +  parser.add_argument(
    +      '--test-launcher-shard-index',
    +      type=int, default=os.environ.get('GTEST_SHARD_INDEX', 0),
    +      help='Index of the external shard to run.')
    +  parser.add_argument(
    +      '--test-launcher-total-shards',
    +      type=int, default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
    +      help='Total number of external shards.')
    +
    +  test_filter.AddFilterOptions(parser)
    +
    +  return parser
    +
    +
    +def AddCommandLineOptions(parser):
    +  """Adds arguments to support passing command-line flags to the device."""
    +  parser.add_argument(
    +      '--device-flags-file',
    +      type=os.path.realpath,
    +      help='The relative filepath to a file containing '
    +           'command-line flags to set on the device')
    +  parser.add_argument(
    +      '--use-apk-under-test-flags-file',
    +      action='store_true',
    +      help='Wether to use the flags file for the apk under test. If set, '
    +           "the filename will be looked up in the APK's PackageInfo.")
    +  parser.set_defaults(allow_unknown=True)
    +  parser.set_defaults(command_line_flags=None)
    +
    +
    +def AddTracingOptions(parser):
    +  # TODO(shenghuazhang): Move this into AddCommonOptions once it's supported
    +  # for all test types.
    +  parser.add_argument(
    +      '--trace-output',
    +      metavar='FILENAME', type=os.path.realpath,
    +      help='Path to save test_runner trace json output to.')
    +
    +  parser.add_argument(
    +      '--trace-all',
    +      action='store_true',
    +      help='Whether to trace all function calls.')
    +
    +
    +def AddCommonOptions(parser):
    +  """Adds all common options to |parser|."""
    +
    +  default_build_type = os.environ.get('BUILDTYPE', 'Debug')
    +
    +  debug_or_release_group = parser.add_mutually_exclusive_group()
    +  debug_or_release_group.add_argument(
    +      '--debug',
    +      action='store_const', const='Debug', dest='build_type',
    +      default=default_build_type,
    +      help='If set, run test suites under out/Debug. '
    +           'Default is env var BUILDTYPE or Debug.')
    +  debug_or_release_group.add_argument(
    +      '--release',
    +      action='store_const', const='Release', dest='build_type',
    +      help='If set, run test suites under out/Release. '
    +           'Default is env var BUILDTYPE or Debug.')
    +
    +  parser.add_argument(
    +      '--break-on-failure', '--break_on_failure',
    +      dest='break_on_failure', action='store_true',
    +      help='Whether to break on failure.')
    +
    +  # TODO(jbudorick): Remove this once everything has switched to platform
    +  # mode.
    +  parser.add_argument(
    +      '--enable-platform-mode',
    +      action='store_true',
    +      help='Run the test scripts in platform mode, which '
    +           'conceptually separates the test runner from the '
    +           '"device" (local or remote, real or emulated) on '
    +           'which the tests are running. [experimental]')
    +
    +  parser.add_argument(
    +      '-e', '--environment',
    +      default='local', choices=constants.VALID_ENVIRONMENTS,
    +      help='Test environment to run in (default: %(default)s).')
    +
    +  parser.add_argument(
    +      '--local-output',
    +      action='store_true',
    +      help='Whether to archive test output locally and generate '
    +           'a local results detail page.')
    +
    +  parser.add_argument('--list-tests',
    +                      action='store_true',
    +                      help='List available tests and exit.')
    +
    +  parser.add_argument('--wrapper-script-args',
    +                      help='A string of args that were passed to the wrapper '
    +                      'script. This should probably not be edited by a '
    +                      'user as it is passed by the wrapper itself.')
    +
    +  class FastLocalDevAction(argparse.Action):
    +    def __call__(self, parser, namespace, values, option_string=None):
    +      namespace.enable_concurrent_adb = True
    +      namespace.enable_device_cache = True
    +      namespace.extract_test_list_from_filter = True
    +      namespace.local_output = True
    +      namespace.num_retries = 0
    +      namespace.skip_clear_data = True
    +      namespace.use_persistent_shell = True
    +
    +  parser.add_argument(
    +      '--fast-local-dev',
    +      type=bool,
    +      nargs=0,
    +      action=FastLocalDevAction,
    +      help='Alias for: --num-retries=0 --enable-device-cache '
    +      '--enable-concurrent-adb --skip-clear-data '
    +      '--extract-test-list-from-filter --use-persistent-shell --local-output')
    +
    +  # TODO(jbudorick): Remove this once downstream bots have switched to
    +  # api.test_results.
    +  parser.add_argument(
    +      '--flakiness-dashboard-server',
    +      dest='flakiness_dashboard_server',
    +      help=argparse.SUPPRESS)
    +  parser.add_argument(
    +      '--gs-results-bucket',
    +      help='Google Storage bucket to upload results to.')
    +
    +  parser.add_argument(
    +      '--output-directory',
    +      dest='output_directory', type=os.path.realpath,
    +      help='Path to the directory in which build files are'
    +           ' located (must include build type). This will take'
    +           ' precedence over --debug and --release')
    +  parser.add_argument(
    +      '-v', '--verbose',
    +      dest='verbose_count', default=0, action='count',
    +      help='Verbose level (multiple times for more)')
    +
    +  parser.add_argument(
    +      '--repeat', '--gtest_repeat', '--gtest-repeat',
    +      '--isolated-script-test-repeat',
    +      dest='repeat', type=int, default=0,
    +      help='Number of times to repeat the specified set of tests.')
    +
    +  # Not useful for junit tests.
    +  parser.add_argument(
    +      '--use-persistent-shell',
    +      action='store_true',
    +      help='Uses a persistent shell connection for the adb connection.')
    +
    +  parser.add_argument('--disable-test-server',
    +                      action='store_true',
    +                      help='Disables SpawnedTestServer which doesn'
    +                      't work with remote adb. '
    +                      'WARNING: Will break tests which require the server.')
    +
    +  # This is currently only implemented for gtests and instrumentation tests.
    +  parser.add_argument(
    +      '--gtest_also_run_disabled_tests', '--gtest-also-run-disabled-tests',
    +      '--isolated-script-test-also-run-disabled-tests',
    +      dest='run_disabled', action='store_true',
    +      help='Also run disabled tests if applicable.')
    +
    +  # These are currently only implemented for gtests.
    +  parser.add_argument('--isolated-script-test-output',
    +                      help='If present, store test results on this path.')
    +  parser.add_argument('--isolated-script-test-perf-output',
    +                      help='If present, store chartjson results on this path.')
    +
    +  AddTestLauncherOptions(parser)
    +
    +
    +def ProcessCommonOptions(args):
    +  """Processes and handles all common options."""
    +  run_tests_helper.SetLogLevel(args.verbose_count, add_handler=False)
    +  if args.verbose_count > 0:
    +    handler = logging_utils.ColorStreamHandler()
    +  else:
    +    handler = logging.StreamHandler(sys.stdout)
    +  handler.setFormatter(run_tests_helper.CustomFormatter())
    +  logging.getLogger().addHandler(handler)
    +
    +  constants.SetBuildType(args.build_type)
    +  if args.output_directory:
    +    constants.SetOutputDirectory(args.output_directory)
    +
    +
    +def AddDeviceOptions(parser):
    +  """Adds device options to |parser|."""
    +
    +  parser = parser.add_argument_group('device arguments')
    +
    +  parser.add_argument(
    +      '--adb-path',
    +      type=os.path.realpath,
    +      help='Specify the absolute path of the adb binary that '
    +           'should be used.')
    +  parser.add_argument('--denylist-file',
    +                      type=os.path.realpath,
    +                      help='Device denylist file.')
    +  parser.add_argument(
    +      '-d', '--device', nargs='+',
    +      dest='test_devices',
    +      help='Target device(s) for the test suite to run on.')
    +  parser.add_argument(
    +      '--enable-concurrent-adb',
    +      action='store_true',
    +      help='Run multiple adb commands at the same time, even '
    +           'for the same device.')
    +  parser.add_argument(
    +      '--enable-device-cache',
    +      action='store_true',
    +      help='Cache device state to disk between runs')
    +  parser.add_argument(
    +      '--skip-clear-data',
    +      action='store_true',
    +      help='Do not wipe app data between tests. Use this to '
    +           'speed up local development and never on bots '
    +                     '(increases flakiness)')
    +  parser.add_argument(
    +      '--recover-devices',
    +      action='store_true',
    +      help='Attempt to recover devices prior to the final retry. Warning: '
    +           'this will cause all devices to reboot.')
    +  parser.add_argument(
    +      '--tool',
    +      dest='tool',
    +      help='Run the test under a tool '
    +           '(use --tool help to list them)')
    +
    +  parser.add_argument(
    +      '--upload-logcats-file',
    +      action='store_true',
    +      dest='upload_logcats_file',
    +      help='Whether to upload logcat file to logdog.')
    +
    +  logcat_output_group = parser.add_mutually_exclusive_group()
    +  logcat_output_group.add_argument(
    +      '--logcat-output-dir', type=os.path.realpath,
    +      help='If set, will dump logcats recorded during test run to directory. '
    +           'File names will be the device ids with timestamps.')
    +  logcat_output_group.add_argument(
    +      '--logcat-output-file', type=os.path.realpath,
    +      help='If set, will merge logcats recorded during test run and dump them '
    +           'to the specified file.')
    +
    +
    +def AddEmulatorOptions(parser):
    +  """Adds emulator-specific options to |parser|."""
    +  parser = parser.add_argument_group('emulator arguments')
    +
    +  parser.add_argument(
    +      '--avd-config',
    +      type=os.path.realpath,
    +      help='Path to the avd config textpb. '
    +      '(See //tools/android/avd/proto/ for message definition'
    +      ' and existing textpb files.)')
    +  parser.add_argument(
    +      '--emulator-count',
    +      type=int,
    +      default=1,
    +      help='Number of emulators to use.')
    +  parser.add_argument(
    +      '--emulator-window',
    +      action='store_true',
    +      default=False,
    +      help='Enable graphical window display on the emulator.')
    +  parser.add_argument(
    +      '--emulator-debug-tags',
    +      help='Comma-separated list of debug tags. This can be used to enable or '
    +      'disable debug messages from specific parts of the emulator, e.g. '
    +      'init,snapshot. See "emulator -help-debug-tags" '
    +      'for a full list of tags.')
    +
    +
    +def AddGTestOptions(parser):
    +  """Adds gtest options to |parser|."""
    +
    +  parser = parser.add_argument_group('gtest arguments')
    +
    +  parser.add_argument(
    +      '--app-data-file',
    +      action='append', dest='app_data_files',
    +      help='A file path relative to the app data directory '
    +           'that should be saved to the host.')
    +  parser.add_argument(
    +      '--app-data-file-dir',
    +      help='Host directory to which app data files will be'
    +           ' saved. Used with --app-data-file.')
    +  parser.add_argument(
    +      '--enable-xml-result-parsing',
    +      action='store_true', help=argparse.SUPPRESS)
    +  parser.add_argument(
    +      '--executable-dist-dir',
    +      type=os.path.realpath,
    +      help="Path to executable's dist directory for native"
    +           " (non-apk) tests.")
    +  parser.add_argument(
    +      '--extract-test-list-from-filter',
    +      action='store_true',
    +      help='When a test filter is specified, and the list of '
    +           'tests can be determined from it, skip querying the '
    +           'device for the list of all tests. Speeds up local '
    +           'development, but is not safe to use on bots ('
    +           'http://crbug.com/549214')
    +  parser.add_argument(
    +      '--gs-test-artifacts-bucket',
    +      help=('If present, test artifacts will be uploaded to this Google '
    +            'Storage bucket.'))
    +  parser.add_argument(
    +      '--render-test-output-dir',
    +      help='If present, store rendering artifacts in this path.')
    +  parser.add_argument(
    +      '--runtime-deps-path',
    +      dest='runtime_deps_path', type=os.path.realpath,
    +      help='Runtime data dependency file from GN.')
    +  parser.add_argument(
    +      '-t', '--shard-timeout',
    +      dest='shard_timeout', type=int, default=120,
    +      help='Timeout to wait for each test (default: %(default)s).')
    +  parser.add_argument(
    +      '--store-tombstones',
    +      dest='store_tombstones', action='store_true',
    +      help='Add tombstones in results if crash.')
    +  parser.add_argument(
    +      '-s', '--suite',
    +      dest='suite_name', nargs='+', metavar='SUITE_NAME', required=True,
    +      help='Executable name of the test suite to run.')
    +  parser.add_argument(
    +      '--test-apk-incremental-install-json',
    +      type=os.path.realpath,
    +      help='Path to install json for the test apk.')
    +  parser.add_argument('--test-launcher-batch-limit',
    +                      dest='test_launcher_batch_limit',
    +                      type=int,
    +                      help='The max number of tests to run in a shard. '
    +                      'Ignores non-positive ints and those greater than '
    +                      'MAX_SHARDS')
    +  parser.add_argument(
    +      '-w', '--wait-for-java-debugger', action='store_true',
    +      help='Wait for java debugger to attach before running any application '
    +           'code. Also disables test timeouts and sets retries=0.')
    +  parser.add_argument(
    +      '--coverage-dir',
    +      type=os.path.realpath,
    +      help='Directory in which to place all generated coverage files.')
    +  parser.add_argument(
    +      '--use-existing-test-data',
    +      action='store_true',
    +      help='Do not push new files to the device, instead using existing APK '
    +      'and test data. Only use when running the same test for multiple '
    +      'iterations.')
    +
    +
    +def AddInstrumentationTestOptions(parser):
    +  """Adds Instrumentation test options to |parser|."""
    +
    +  parser = parser.add_argument_group('instrumentation arguments')
    +
    +  parser.add_argument('--additional-apex',
    +                      action='append',
    +                      dest='additional_apexs',
    +                      default=[],
    +                      type=_RealPath,
    +                      help='Additional apex that must be installed on '
    +                      'the device when the tests are run')
    +  parser.add_argument(
    +      '--additional-apk',
    +      action='append', dest='additional_apks', default=[],
    +      type=_RealPath,
    +      help='Additional apk that must be installed on '
    +           'the device when the tests are run')
    +  parser.add_argument('--forced-queryable-additional-apk',
    +                      action='append',
    +                      dest='forced_queryable_additional_apks',
    +                      default=[],
    +                      type=_RealPath,
    +                      help='Configures an additional-apk to be forced '
    +                      'to be queryable by other APKs.')
    +  parser.add_argument('--instant-additional-apk',
    +                      action='append',
    +                      dest='instant_additional_apks',
    +                      default=[],
    +                      type=_RealPath,
    +                      help='Configures an additional-apk to be an instant APK')
    +  parser.add_argument(
    +      '-A', '--annotation',
    +      dest='annotation_str',
    +      help='Comma-separated list of annotations. Run only tests with any of '
    +           'the given annotations. An annotation can be either a key or a '
    +           'key-values pair. A test that has no annotation is considered '
    +           '"SmallTest".')
    +  # TODO(jbudorick): Remove support for name-style APK specification once
    +  # bots are no longer doing it.
    +  parser.add_argument(
    +      '--apk-under-test',
    +      help='Path or name of the apk under test.')
    +  parser.add_argument(
    +      '--store-data-in-app-directory',
    +      action='store_true',
    +      help='Store test data in the application\'s data directory. By default '
    +      'the test data is stored in the external storage folder.')
    +  parser.add_argument(
    +      '--module',
    +      action='append',
    +      dest='modules',
    +      help='Specify Android App Bundle modules to install in addition to the '
    +      'base module.')
    +  parser.add_argument(
    +      '--fake-module',
    +      action='append',
    +      dest='fake_modules',
    +      help='Specify Android App Bundle modules to fake install in addition to '
    +      'the real modules.')
    +  parser.add_argument(
    +      '--additional-locale',
    +      action='append',
    +      dest='additional_locales',
    +      help='Specify locales in addition to the device locale to install splits '
    +      'for when --apk-under-test is an Android App Bundle.')
    +  parser.add_argument(
    +      '--coverage-dir',
    +      type=os.path.realpath,
    +      help='Directory in which to place all generated '
    +      'Jacoco coverage files.')
    +  parser.add_argument(
    +      '--disable-dalvik-asserts',
    +      dest='set_asserts', action='store_false', default=True,
    +      help='Removes the dalvik.vm.enableassertions property')
    +  parser.add_argument(
    +      '--proguard-mapping-path',
    +      help='.mapping file to use to Deobfuscate java stack traces in test '
    +      'output and logcat.')
    +  parser.add_argument(
    +      '-E', '--exclude-annotation',
    +      dest='exclude_annotation_str',
    +      help='Comma-separated list of annotations. Exclude tests with these '
    +           'annotations.')
    +  parser.add_argument(
    +      '--enable-breakpad-dump',
    +      action='store_true',
    +      help='Stores any breakpad dumps till the end of the test.')
    +  parser.add_argument(
    +      '--replace-system-package',
    +      type=_RealPath,
    +      default=None,
    +      help='Use this apk to temporarily replace a system package with the same '
    +      'package name.')
    +  parser.add_argument(
    +      '--remove-system-package',
    +      default=[],
    +      action='append',
    +      dest='system_packages_to_remove',
    +      help='Specifies a system package to remove before testing if it exists '
    +      'on the system. WARNING: THIS WILL PERMANENTLY REMOVE THE SYSTEM APP. '
    +      'Unlike --replace-system-package, the app will not be restored after '
    +      'tests are finished.')
    +  parser.add_argument(
    +      '--use-voice-interaction-service',
    +      help='This can be used to update the voice interaction service to be a '
    +      'custom one. This is useful for mocking assistants. eg: '
    +      'android.assist.service/.MainInteractionService')
    +  parser.add_argument(
    +      '--use-webview-provider',
    +      type=_RealPath, default=None,
    +      help='Use this apk as the webview provider during test. '
    +           'The original provider will be restored if possible, '
    +           "on Nougat the provider can't be determined and so "
    +           'the system will choose the default provider.')
    +  parser.add_argument(
    +      '--run-setup-command',
    +      default=[],
    +      action='append',
    +      dest='run_setup_commands',
    +      help='This can be used to run a custom shell command on the device as a '
    +      'setup step')
    +  parser.add_argument(
    +      '--run-teardown-command',
    +      default=[],
    +      action='append',
    +      dest='run_teardown_commands',
    +      help='This can be used to run a custom shell command on the device as a '
    +      'teardown step')
    +  parser.add_argument(
    +      '--runtime-deps-path',
    +      dest='runtime_deps_path', type=os.path.realpath,
    +      help='Runtime data dependency file from GN.')
    +  parser.add_argument(
    +      '--screenshot-directory',
    +      dest='screenshot_dir', type=os.path.realpath,
    +      help='Capture screenshots of test failures')
    +  parser.add_argument(
    +      '--shared-prefs-file',
    +      dest='shared_prefs_file', type=_RealPath,
    +      help='The relative path to a file containing JSON list of shared '
    +           'preference files to edit and how to do so. Example list: '
    +           '[{'
    +           '  "package": "com.package.example",'
    +           '  "filename": "ExampleSettings.xml",'
    +           '  "set": {'
    +           '    "boolean_key_in_xml": true,'
    +           '    "string_key_in_xml": "string_value"'
    +           '  },'
    +           '  "remove": ['
    +           '    "key_in_xml_to_remove"'
    +           '  ]'
    +           '}]')
    +  parser.add_argument(
    +      '--store-tombstones',
    +      action='store_true', dest='store_tombstones',
    +      help='Add tombstones in results if crash.')
    +  parser.add_argument(
    +      '--strict-mode',
    +      dest='strict_mode', default='testing',
    +      help='StrictMode command-line flag set on the device, '
    +           'death/testing to kill the process, off to stop '
    +           'checking, flash to flash only. (default: %(default)s)')
    +  parser.add_argument(
    +      '--test-apk',
    +      required=True,
    +      help='Path or name of the apk containing the tests.')
    +  parser.add_argument(
    +      '--test-apk-as-instant',
    +      action='store_true',
    +      help='Install the test apk as an instant app. '
    +      'Instant apps run in a more restrictive execution environment.')
    +  parser.add_argument(
    +      '--test-launcher-batch-limit',
    +      dest='test_launcher_batch_limit',
    +      type=int,
    +      help=('Not actually used for instrumentation tests, but can be used as '
    +            'a proxy for determining if the current run is a retry without '
    +            'patch.'))
    +  parser.add_argument(
    +      '--timeout-scale',
    +      type=float,
    +      help='Factor by which timeouts should be scaled.')
    +  parser.add_argument(
    +      '--is-unit-test',
    +      action='store_true',
    +      help=('Specify the test suite as composed of unit tests, blocking '
    +            'certain operations.'))
    +  parser.add_argument(
    +      '-w', '--wait-for-java-debugger', action='store_true',
    +      help='Wait for java debugger to attach before running any application '
    +           'code. Also disables test timeouts and sets retries=0.')
    +
    +  # WPR record mode.
    +  parser.add_argument('--wpr-enable-record',
    +                      action='store_true',
    +                      default=False,
    +                      help='If true, WPR server runs in record mode.'
    +                      'otherwise, runs in replay mode.')
    +
    +  parser.add_argument(
    +      '--approve-app-links',
    +      help='Force enables Digital Asset Link verification for the provided '
    +      'package and domain, example usage: --approve-app-links '
    +      'com.android.package:www.example.com')
    +
    +  # These arguments are suppressed from the help text because they should
    +  # only ever be specified by an intermediate script.
    +  parser.add_argument(
    +      '--apk-under-test-incremental-install-json',
    +      help=argparse.SUPPRESS)
    +  parser.add_argument(
    +      '--test-apk-incremental-install-json',
    +      type=os.path.realpath,
    +      help=argparse.SUPPRESS)
    +
    +
    +def AddSkiaGoldTestOptions(parser):
    +  """Adds Skia Gold test options to |parser|."""
    +  parser = parser.add_argument_group("Skia Gold arguments")
    +  parser.add_argument(
    +      '--code-review-system',
    +      help='A non-default code review system to pass to pass to Gold, if '
    +      'applicable')
    +  parser.add_argument(
    +      '--continuous-integration-system',
    +      help='A non-default continuous integration system to pass to Gold, if '
    +      'applicable')
    +  parser.add_argument(
    +      '--git-revision', help='The git commit currently being tested.')
    +  parser.add_argument(
    +      '--gerrit-issue',
    +      help='The Gerrit issue this test is being run on, if applicable.')
    +  parser.add_argument(
    +      '--gerrit-patchset',
    +      help='The Gerrit patchset this test is being run on, if applicable.')
    +  parser.add_argument(
    +      '--buildbucket-id',
    +      help='The Buildbucket build ID that this test was triggered from, if '
    +      'applicable.')
    +  local_group = parser.add_mutually_exclusive_group()
    +  local_group.add_argument(
    +      '--local-pixel-tests',
    +      action='store_true',
    +      default=None,
    +      help='Specifies to run the Skia Gold pixel tests in local mode. When run '
    +      'in local mode, uploading to Gold is disabled and traditional '
    +      'generated/golden/diff images are output instead of triage links. '
    +      'Running in local mode also implies --no-luci-auth. If both this '
    +      'and --no-local-pixel-tests are left unset, the test harness will '
    +      'attempt to detect whether it is running on a workstation or not '
    +      'and set the options accordingly.')
    +  local_group.add_argument(
    +      '--no-local-pixel-tests',
    +      action='store_false',
    +      dest='local_pixel_tests',
    +      help='Specifies to run the Skia Gold pixel tests in non-local (bot) '
    +      'mode. When run in this mode, data is actually uploaded to Gold and '
    +      'triage links are generated. If both this and --local-pixel-tests '
    +      'are left unset, the test harness will attempt to detect whether '
    +      'it is running on a workstation or not and set the options '
    +      'accordingly.')
    +  parser.add_argument(
    +      '--no-luci-auth',
    +      action='store_true',
    +      default=False,
    +      help="Don't use the serve account provided by LUCI for authentication "
    +      'with Skia Gold, instead relying on gsutil to be pre-authenticated. '
    +      'Meant for testing locally instead of on the bots.')
    +  parser.add_argument(
    +      '--bypass-skia-gold-functionality',
    +      action='store_true',
    +      default=False,
    +      help='Bypass all interaction with Skia Gold, effectively disabling the '
    +      'image comparison portion of any tests that use Gold. Only meant to be '
    +      'used in case a Gold outage occurs and cannot be fixed quickly.')
    +
    +
    +def AddJUnitTestOptions(parser):
    +  """Adds junit test options to |parser|."""
    +
    +  parser = parser.add_argument_group('junit arguments')
    +
    +  parser.add_argument(
    +      '--coverage-on-the-fly',
    +      action='store_true',
    +      help='Generate coverage data by Jacoco on-the-fly instrumentation.')
    +  parser.add_argument(
    +      '--coverage-dir', type=os.path.realpath,
    +      help='Directory to store coverage info.')
    +  parser.add_argument(
    +      '--package-filter',
    +      help='Filters tests by package.')
    +  parser.add_argument(
    +      '--runner-filter',
    +      help='Filters tests by runner class. Must be fully qualified.')
    +  parser.add_argument(
    +      '--shards',
    +      default=-1,
    +      type=int,
    +      help='Number of shards to run junit tests in parallel on. Only 1 shard '
    +      'is supported when test-filter is specified. Values less than 1 will '
    +      'use auto select.')
    +  parser.add_argument(
    +      '-s', '--test-suite', required=True,
    +      help='JUnit test suite to run.')
    +  debug_group = parser.add_mutually_exclusive_group()
    +  debug_group.add_argument(
    +      '-w', '--wait-for-java-debugger', action='store_const', const='8701',
    +      dest='debug_socket', help='Alias for --debug-socket=8701')
    +  debug_group.add_argument(
    +      '--debug-socket',
    +      help='Wait for java debugger to attach at specified socket address '
    +           'before running any application code. Also disables test timeouts '
    +           'and sets retries=0.')
    +
    +  # These arguments are for Android Robolectric tests.
    +  parser.add_argument(
    +      '--robolectric-runtime-deps-dir',
    +      help='Path to runtime deps for Robolectric.')
    +  parser.add_argument('--native-libs-dir',
    +                      help='Path to search for native libraries.')
    +  parser.add_argument(
    +      '--resource-apk',
    +      required=True,
    +      help='Path to .ap_ containing binary resources for Robolectric.')
    +
    +
    +def AddLinkerTestOptions(parser):
    +
    +  parser = parser.add_argument_group('linker arguments')
    +
    +  parser.add_argument(
    +      '--test-apk',
    +      type=os.path.realpath,
    +      help='Path to the linker test APK.')
    +
    +
    +def AddMonkeyTestOptions(parser):
    +  """Adds monkey test options to |parser|."""
    +
    +  parser = parser.add_argument_group('monkey arguments')
    +
    +  parser.add_argument('--browser',
    +                      required=True,
    +                      choices=list(constants.PACKAGE_INFO.keys()),
    +                      metavar='BROWSER',
    +                      help='Browser under test.')
    +  parser.add_argument(
    +      '--category',
    +      nargs='*', dest='categories', default=[],
    +      help='A list of allowed categories. Monkey will only visit activities '
    +           'that are listed with one of the specified categories.')
    +  parser.add_argument(
    +      '--event-count',
    +      default=10000, type=int,
    +      help='Number of events to generate (default: %(default)s).')
    +  parser.add_argument(
    +      '--seed',
    +      type=int,
    +      help='Seed value for pseudo-random generator. Same seed value generates '
    +           'the same sequence of events. Seed is randomized by default.')
    +  parser.add_argument(
    +      '--throttle',
    +      default=100, type=int,
    +      help='Delay between events (ms) (default: %(default)s). ')
    +
    +
    +def AddPythonTestOptions(parser):
    +
    +  parser = parser.add_argument_group('python arguments')
    +
    +  parser.add_argument('-s',
    +                      '--suite',
    +                      dest='suite_name',
    +                      metavar='SUITE_NAME',
    +                      choices=list(constants.PYTHON_UNIT_TEST_SUITES.keys()),
    +                      help='Name of the test suite to run.')
    +
    +
    +def _CreateClassToFileNameDict(test_apk):
    +  """Creates a dict mapping classes to file names from size-info apk."""
    +  constants.CheckOutputDirectory()
    +  test_apk_size_info = os.path.join(constants.GetOutDirectory(), 'size-info',
    +                                    os.path.basename(test_apk) + '.jar.info')
    +
    +  class_to_file_dict = {}
    +  # Some tests such as webview_cts_tests use a separately downloaded apk to run
    +  # tests. This means the apk may not have been built by the system and hence
    +  # no size info file exists.
    +  if not os.path.exists(test_apk_size_info):
    +    logging.debug('Apk size file not found. %s', test_apk_size_info)
    +    return class_to_file_dict
    +
    +  with open(test_apk_size_info, 'r') as f:
    +    for line in f:
    +      file_class, file_name = line.rstrip().split(',', 1)
    +      # Only want files that are not prebuilt.
    +      if file_name.startswith('../../'):
    +        class_to_file_dict[file_class] = str(
    +            file_name.replace('../../', '//', 1))
    +
    +  return class_to_file_dict
    +
    +
    +def _RunPythonTests(args):
    +  """Subcommand of RunTestsCommand which runs python unit tests."""
    +  suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name]
    +  suite_path = suite_vars['path']
    +  suite_test_modules = suite_vars['test_modules']
    +
    +  sys.path = [suite_path] + sys.path
    +  try:
    +    suite = unittest.TestSuite()
    +    suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m)
    +                   for m in suite_test_modules)
    +    runner = unittest.TextTestRunner(verbosity=1+args.verbose_count)
    +    return 0 if runner.run(suite).wasSuccessful() else 1
    +  finally:
    +    sys.path = sys.path[1:]
    +
    +
    +_DEFAULT_PLATFORM_MODE_TESTS = [
    +    'gtest', 'instrumentation', 'junit', 'linker', 'monkey'
    +]
    +
    +
    +def RunTestsCommand(args, result_sink_client=None):
    +  """Checks test type and dispatches to the appropriate function.
    +
    +  Args:
    +    args: argparse.Namespace object.
    +    result_sink_client: A ResultSinkClient object.
    +
    +  Returns:
    +    Integer indicated exit code.
    +
    +  Raises:
    +    Exception: Unknown command name passed in, or an exception from an
    +        individual test runner.
    +  """
    +  command = args.command
    +
    +  ProcessCommonOptions(args)
    +  logging.info('command: %s', ' '.join(sys.argv))
    +  if args.enable_platform_mode or command in _DEFAULT_PLATFORM_MODE_TESTS:
    +    return RunTestsInPlatformMode(args, result_sink_client)
    +
    +  if command == 'python':
    +    return _RunPythonTests(args)
    +  raise Exception('Unknown test type.')
    +
    +
    +def _SinkTestResult(test_result, test_file_name, result_sink_client):
    +  """Upload test result to result_sink.
    +
    +  Args:
    +    test_result: A BaseTestResult object
    +    test_file_name: A string representing the file location of the test
    +    result_sink_client: A ResultSinkClient object
    +
    +  Returns:
    +    N/A
    +  """
    +  # Some tests put in non utf-8 char as part of the test
    +  # which breaks uploads, so need to decode and re-encode.
    +  log_decoded = test_result.GetLog()
    +  if isinstance(log_decoded, bytes):
    +    log_decoded = log_decoded.decode('utf-8', 'replace')
    +  html_artifact = ''
    +  https_artifacts = []
    +  for link_name, link_url in sorted(test_result.GetLinks().items()):
    +    if link_url.startswith('https:'):
    +      https_artifacts.append('
  1. %s
  2. ' % + (link_url, link_name)) + else: + logging.info('Skipping non-https link %r (%s) for test %s.', link_name, + link_url, test_result.GetName()) + if https_artifacts: + html_artifact += '
      %s
    ' % '\n'.join(https_artifacts) + result_sink_client.Post(test_result.GetNameForResultSink(), + test_result.GetType(), + test_result.GetDuration(), + log_decoded.encode('utf-8'), + test_file_name, + variant=test_result.GetVariantForResultSink(), + failure_reason=test_result.GetFailureReason(), + html_artifact=html_artifact) + + +_SUPPORTED_IN_PLATFORM_MODE = [ + # TODO(jbudorick): Add support for more test types. + 'gtest', + 'instrumentation', + 'junit', + 'linker', + 'monkey', +] + + +def RunTestsInPlatformMode(args, result_sink_client=None): + + def infra_error(message): + logging.fatal(message) + sys.exit(constants.INFRA_EXIT_CODE) + + if args.command not in _SUPPORTED_IN_PLATFORM_MODE: + infra_error('%s is not yet supported in platform mode' % args.command) + + ### Set up sigterm handler. + + contexts_to_notify_on_sigterm = [] + def unexpected_sigterm(_signum, _frame): + msg = [ + 'Received SIGTERM. Shutting down.', + ] + for live_thread in threading.enumerate(): + # pylint: disable=protected-access + thread_stack = ''.join(traceback.format_stack( + sys._current_frames()[live_thread.ident])) + msg.extend([ + 'Thread "%s" (ident: %s) is currently running:' % ( + live_thread.name, live_thread.ident), + thread_stack]) + + for context in contexts_to_notify_on_sigterm: + context.ReceivedSigterm() + + infra_error('\n'.join(msg)) + + signal.signal(signal.SIGTERM, unexpected_sigterm) + + ### Set up results handling. + # TODO(jbudorick): Rewrite results handling. + + # all_raw_results is a list of lists of + # base_test_result.TestRunResults objects. Each instance of + # TestRunResults contains all test results produced by a single try, + # while each list of TestRunResults contains all tries in a single + # iteration. + all_raw_results = [] + + # all_iteration_results is a list of base_test_result.TestRunResults + # objects. Each instance of TestRunResults contains the last test + # result for each test run in that iteration. + all_iteration_results = [] + + global_results_tags = set() + + json_file = tempfile.NamedTemporaryFile(delete=False) + json_file.close() + + @contextlib.contextmanager + def json_finalizer(): + try: + yield + finally: + if args.json_results_file and os.path.exists(json_file.name): + shutil.move(json_file.name, args.json_results_file) + elif args.isolated_script_test_output and os.path.exists(json_file.name): + shutil.move(json_file.name, args.isolated_script_test_output) + else: + os.remove(json_file.name) + + @contextlib.contextmanager + def json_writer(): + try: + yield + except Exception: + global_results_tags.add('UNRELIABLE_RESULTS') + raise + finally: + if args.isolated_script_test_output: + interrupted = 'UNRELIABLE_RESULTS' in global_results_tags + json_results.GenerateJsonTestResultFormatFile(all_raw_results, + interrupted, + json_file.name, + indent=2) + else: + json_results.GenerateJsonResultsFile( + all_raw_results, + json_file.name, + global_tags=list(global_results_tags), + indent=2) + + test_class_to_file_name_dict = {} + # Test Location is only supported for instrumentation tests as it + # requires the size-info file. + if test_instance.TestType() == 'instrumentation': + test_class_to_file_name_dict = _CreateClassToFileNameDict(args.test_apk) + + if result_sink_client: + for run in all_raw_results: + for results in run: + for r in results.GetAll(): + # Matches chrome.page_info.PageInfoViewTest#testChromePage + match = re.search(r'^(.+\..+)#', r.GetName()) + test_file_name = test_class_to_file_name_dict.get( + match.group(1)) if match else None + _SinkTestResult(r, test_file_name, result_sink_client) + + @contextlib.contextmanager + def upload_logcats_file(): + try: + yield + finally: + if not args.logcat_output_file: + logging.critical('Cannot upload logcat file: no file specified.') + elif not os.path.exists(args.logcat_output_file): + logging.critical("Cannot upload logcat file: file doesn't exist.") + else: + with open(args.logcat_output_file) as src: + dst = logdog_helper.open_text('unified_logcats') + if dst: + shutil.copyfileobj(src, dst) + dst.close() + logging.critical( + 'Logcat: %s', logdog_helper.get_viewer_url('unified_logcats')) + + + logcats_uploader = contextlib_ext.Optional( + upload_logcats_file(), + 'upload_logcats_file' in args and args.upload_logcats_file) + + save_detailed_results = (args.local_output or not local_utils.IsOnSwarming() + ) and not args.isolated_script_test_output + + ### Set up test objects. + + out_manager = output_manager_factory.CreateOutputManager(args) + env = environment_factory.CreateEnvironment( + args, out_manager, infra_error) + test_instance = test_instance_factory.CreateTestInstance(args, infra_error) + test_run = test_run_factory.CreateTestRun(env, test_instance, infra_error) + + contexts_to_notify_on_sigterm.append(env) + contexts_to_notify_on_sigterm.append(test_run) + + if args.list_tests: + try: + with out_manager, env, test_instance, test_run: + test_names = test_run.GetTestsForListing() + print('There are {} tests:'.format(len(test_names))) + for n in test_names: + print(n) + return 0 + except NotImplementedError: + sys.stderr.write('Test does not support --list-tests (type={}).\n'.format( + args.command)) + return 1 + + ### Run. + with out_manager, json_finalizer(): + # |raw_logs_fh| is only used by Robolectric tests. + raw_logs_fh = io.StringIO() if save_detailed_results else None + + with json_writer(), logcats_uploader, env, test_instance, test_run: + + repetitions = (range(args.repeat + + 1) if args.repeat >= 0 else itertools.count()) + result_counts = collections.defaultdict( + lambda: collections.defaultdict(int)) + iteration_count = 0 + for _ in repetitions: + # raw_results will be populated with base_test_result.TestRunResults by + # test_run.RunTests(). It is immediately added to all_raw_results so + # that in the event of an exception, all_raw_results will already have + # the up-to-date results and those can be written to disk. + raw_results = [] + all_raw_results.append(raw_results) + + test_run.RunTests(raw_results, raw_logs_fh=raw_logs_fh) + if not raw_results: + all_raw_results.pop() + continue + + iteration_results = base_test_result.TestRunResults() + for r in reversed(raw_results): + iteration_results.AddTestRunResults(r) + all_iteration_results.append(iteration_results) + iteration_count += 1 + + for r in iteration_results.GetAll(): + result_counts[r.GetName()][r.GetType()] += 1 + + report_results.LogFull( + results=iteration_results, + test_type=test_instance.TestType(), + test_package=test_run.TestPackage(), + annotation=getattr(args, 'annotations', None), + flakiness_server=getattr(args, 'flakiness_dashboard_server', + None)) + + failed_tests = (iteration_results.GetNotPass() - + iteration_results.GetSkip()) + if failed_tests: + _LogRerunStatement(failed_tests, args.wrapper_script_args) + + if args.break_on_failure and not iteration_results.DidRunPass(): + break + + if iteration_count > 1: + # display summary results + # only display results for a test if at least one test did not pass + all_pass = 0 + tot_tests = 0 + for test_name in result_counts: + tot_tests += 1 + if any(result_counts[test_name][x] for x in ( + base_test_result.ResultType.FAIL, + base_test_result.ResultType.CRASH, + base_test_result.ResultType.TIMEOUT, + base_test_result.ResultType.UNKNOWN)): + logging.critical( + '%s: %s', + test_name, + ', '.join('%s %s' % (str(result_counts[test_name][i]), i) + for i in base_test_result.ResultType.GetTypes())) + else: + all_pass += 1 + + logging.critical('%s of %s tests passed in all %s runs', + str(all_pass), + str(tot_tests), + str(iteration_count)) + + if save_detailed_results: + assert raw_logs_fh + raw_logs_fh.seek(0) + raw_logs = raw_logs_fh.read() + if raw_logs: + with out_manager.ArchivedTempfile( + 'raw_logs.txt', 'raw_logs', + output_manager.Datatype.TEXT) as raw_logs_file: + raw_logs_file.write(raw_logs) + logging.critical('RAW LOGS: %s', raw_logs_file.Link()) + + with out_manager.ArchivedTempfile( + 'test_results_presentation.html', + 'test_results_presentation', + output_manager.Datatype.HTML) as results_detail_file: + result_html_string, _, _ = test_results_presentation.result_details( + json_path=json_file.name, + test_name=args.command, + cs_base_url='http://cs.chromium.org', + local_output=True) + results_detail_file.write(result_html_string) + results_detail_file.flush() + logging.critical('TEST RESULTS: %s', results_detail_file.Link()) + + ui_screenshots = test_results_presentation.ui_screenshot_set( + json_file.name) + if ui_screenshots: + with out_manager.ArchivedTempfile( + 'ui_screenshots.json', + 'ui_capture', + output_manager.Datatype.JSON) as ui_screenshot_file: + ui_screenshot_file.write(ui_screenshots) + logging.critical('UI Screenshots: %s', ui_screenshot_file.Link()) + + return (0 if all(r.DidRunPass() for r in all_iteration_results) + else constants.ERROR_EXIT_CODE) + + +def _LogRerunStatement(failed_tests, wrapper_arg_str): + """Logs a message that can rerun the failed tests. + + Logs a copy/pasteable message that filters tests so just the failing tests + are run. + + Args: + failed_tests: A set of test results that did not pass. + wrapper_arg_str: A string of args that were passed to the called wrapper + script. + """ + rerun_arg_list = [] + try: + constants.CheckOutputDirectory() + # constants.CheckOutputDirectory throws bare exceptions. + except: # pylint: disable=bare-except + logging.exception('Output directory not found. Unable to generate failing ' + 'test filter file.') + return + + output_directory = constants.GetOutDirectory() + if not os.path.exists(output_directory): + logging.error('Output directory not found. Unable to generate failing ' + 'test filter file.') + return + + test_filter_file = os.path.join(os.path.relpath(output_directory), + _RERUN_FAILED_TESTS_FILE) + arg_list = shlex.split(wrapper_arg_str) if wrapper_arg_str else sys.argv + index = 0 + while index < len(arg_list): + arg = arg_list[index] + # Skip adding the filter= and/or the filter arg as we're replacing + # it with the new filter arg. + # This covers --test-filter=, --test-launcher-filter-file=, --gtest-filter=, + # --test-filter *Foobar.baz, -f *foobar, --package-filter , + # --runner-filter . + if 'filter' in arg or arg == '-f': + index += 1 if '=' in arg else 2 + continue + + rerun_arg_list.append(arg) + index += 1 + + failed_test_list = [str(t) for t in failed_tests] + with open(test_filter_file, 'w') as fp: + for t in failed_test_list: + # Test result names can have # in them that don't match when applied as + # a test name filter. + fp.write('%s\n' % t.replace('#', '.')) + + rerun_arg_list.append('--test-launcher-filter-file=%s' % test_filter_file) + msg = """ + %d Test(s) failed. + Rerun failed tests with copy and pastable command: + %s + """ + logging.critical(msg, len(failed_tests), shlex.join(rerun_arg_list)) + + +def DumpThreadStacks(_signal, _frame): + for thread in threading.enumerate(): + reraiser_thread.LogThreadStack(thread) + + +def main(): + signal.signal(signal.SIGUSR1, DumpThreadStacks) + + parser = argparse.ArgumentParser() + command_parsers = parser.add_subparsers( + title='test types', dest='command') + + subp = command_parsers.add_parser( + 'gtest', + help='googletest-based C++ tests') + AddCommonOptions(subp) + AddDeviceOptions(subp) + AddEmulatorOptions(subp) + AddGTestOptions(subp) + AddTracingOptions(subp) + AddCommandLineOptions(subp) + + subp = command_parsers.add_parser( + 'instrumentation', + help='InstrumentationTestCase-based Java tests') + AddCommonOptions(subp) + AddDeviceOptions(subp) + AddEmulatorOptions(subp) + AddInstrumentationTestOptions(subp) + AddSkiaGoldTestOptions(subp) + AddTracingOptions(subp) + AddCommandLineOptions(subp) + + subp = command_parsers.add_parser( + 'junit', + help='JUnit4-based Java tests') + AddCommonOptions(subp) + AddJUnitTestOptions(subp) + + subp = command_parsers.add_parser( + 'linker', + help='linker tests') + AddCommonOptions(subp) + AddDeviceOptions(subp) + AddEmulatorOptions(subp) + AddLinkerTestOptions(subp) + + subp = command_parsers.add_parser( + 'monkey', + help="tests based on Android's monkey command") + AddCommonOptions(subp) + AddDeviceOptions(subp) + AddEmulatorOptions(subp) + AddMonkeyTestOptions(subp) + + subp = command_parsers.add_parser( + 'python', + help='python tests based on unittest.TestCase') + AddCommonOptions(subp) + AddPythonTestOptions(subp) + + args, unknown_args = parser.parse_known_args() + if unknown_args: + if hasattr(args, 'allow_unknown') and args.allow_unknown: + args.command_line_flags = unknown_args + else: + parser.error('unrecognized arguments: %s' % ' '.join(unknown_args)) + + # --replace-system-package/--remove-system-package has the potential to cause + # issues if --enable-concurrent-adb is set, so disallow that combination. + concurrent_adb_enabled = (hasattr(args, 'enable_concurrent_adb') + and args.enable_concurrent_adb) + replacing_system_packages = (hasattr(args, 'replace_system_package') + and args.replace_system_package) + removing_system_packages = (hasattr(args, 'system_packages_to_remove') + and args.system_packages_to_remove) + if (concurrent_adb_enabled + and (replacing_system_packages or removing_system_packages)): + parser.error('--enable-concurrent-adb cannot be used with either ' + '--replace-system-package or --remove-system-package') + + # --use-webview-provider has the potential to cause issues if + # --enable-concurrent-adb is set, so disallow that combination + if (hasattr(args, 'use_webview_provider') and + hasattr(args, 'enable_concurrent_adb') and args.use_webview_provider and + args.enable_concurrent_adb): + parser.error('--use-webview-provider and --enable-concurrent-adb cannot ' + 'be used together') + + if (getattr(args, 'coverage_on_the_fly', False) + and not getattr(args, 'coverage_dir', '')): + parser.error('--coverage-on-the-fly requires --coverage-dir') + + if (hasattr(args, 'debug_socket') or + (hasattr(args, 'wait_for_java_debugger') and + args.wait_for_java_debugger)): + args.num_retries = 0 + + # Result-sink may not exist in the environment if rdb stream is not enabled. + result_sink_client = result_sink.TryInitClient() + + try: + return RunTestsCommand(args, result_sink_client) + except base_error.BaseError as e: + logging.exception('Error occurred.') + if e.is_infra_error: + return constants.INFRA_EXIT_CODE + return constants.ERROR_EXIT_CODE + except: # pylint: disable=W0702 + logging.exception('Unrecognized error occurred.') + return constants.ERROR_EXIT_CODE + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/test_runner.pydeps b/android/test_runner.pydeps new file mode 100644 index 000000000000..5c1cd13440c9 --- /dev/null +++ b/android/test_runner.pydeps @@ -0,0 +1,231 @@ +# Generated by running: +# build/print_python_deps.py --root build/android --output build/android/test_runner.pydeps build/android/test_runner.py +../../third_party/catapult/common/py_trace_event/py_trace_event/__init__.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_proto_classes.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py +../../third_party/catapult/common/py_trace_event/py_trace_event/trace_time.py +../../third_party/catapult/common/py_trace_event/third_party/protobuf/encoder.py +../../third_party/catapult/common/py_trace_event/third_party/protobuf/wire_format.py +../../third_party/catapult/common/py_utils/py_utils/__init__.py +../../third_party/catapult/common/py_utils/py_utils/atexit_with_log.py +../../third_party/catapult/common/py_utils/py_utils/binary_manager.py +../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py +../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py +../../third_party/catapult/common/py_utils/py_utils/contextlib_ext.py +../../third_party/catapult/common/py_utils/py_utils/lock.py +../../third_party/catapult/common/py_utils/py_utils/modules_util.py +../../third_party/catapult/common/py_utils/py_utils/retry_util.py +../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py +../../third_party/catapult/common/py_utils/py_utils/ts_proxy_server.py +../../third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py +../../third_party/catapult/dependency_manager/dependency_manager/__init__.py +../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py +../../third_party/catapult/dependency_manager/dependency_manager/base_config.py +../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py +../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py +../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py +../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py +../../third_party/catapult/dependency_manager/dependency_manager/manager.py +../../third_party/catapult/dependency_manager/dependency_manager/uploader.py +../../third_party/catapult/devil/devil/__init__.py +../../third_party/catapult/devil/devil/android/__init__.py +../../third_party/catapult/devil/devil/android/apk_helper.py +../../third_party/catapult/devil/devil/android/battery_utils.py +../../third_party/catapult/devil/devil/android/constants/__init__.py +../../third_party/catapult/devil/devil/android/constants/chrome.py +../../third_party/catapult/devil/devil/android/constants/file_system.py +../../third_party/catapult/devil/devil/android/crash_handler.py +../../third_party/catapult/devil/devil/android/decorators.py +../../third_party/catapult/devil/devil/android/device_denylist.py +../../third_party/catapult/devil/devil/android/device_errors.py +../../third_party/catapult/devil/devil/android/device_list.py +../../third_party/catapult/devil/devil/android/device_signal.py +../../third_party/catapult/devil/devil/android/device_temp_file.py +../../third_party/catapult/devil/devil/android/device_utils.py +../../third_party/catapult/devil/devil/android/flag_changer.py +../../third_party/catapult/devil/devil/android/forwarder.py +../../third_party/catapult/devil/devil/android/install_commands.py +../../third_party/catapult/devil/devil/android/logcat_monitor.py +../../third_party/catapult/devil/devil/android/md5sum.py +../../third_party/catapult/devil/devil/android/ndk/__init__.py +../../third_party/catapult/devil/devil/android/ndk/abis.py +../../third_party/catapult/devil/devil/android/ports.py +../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../third_party/catapult/devil/devil/android/sdk/aapt.py +../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py +../../third_party/catapult/devil/devil/android/sdk/build_tools.py +../../third_party/catapult/devil/devil/android/sdk/bundletool.py +../../third_party/catapult/devil/devil/android/sdk/intent.py +../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../third_party/catapult/devil/devil/android/sdk/shared_prefs.py +../../third_party/catapult/devil/devil/android/sdk/split_select.py +../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../third_party/catapult/devil/devil/android/settings.py +../../third_party/catapult/devil/devil/android/tools/__init__.py +../../third_party/catapult/devil/devil/android/tools/device_recovery.py +../../third_party/catapult/devil/devil/android/tools/device_status.py +../../third_party/catapult/devil/devil/android/tools/script_common.py +../../third_party/catapult/devil/devil/android/tools/system_app.py +../../third_party/catapult/devil/devil/android/tools/webview_app.py +../../third_party/catapult/devil/devil/android/valgrind_tools/__init__.py +../../third_party/catapult/devil/devil/android/valgrind_tools/base_tool.py +../../third_party/catapult/devil/devil/base_error.py +../../third_party/catapult/devil/devil/constants/__init__.py +../../third_party/catapult/devil/devil/constants/exit_codes.py +../../third_party/catapult/devil/devil/devil_env.py +../../third_party/catapult/devil/devil/utils/__init__.py +../../third_party/catapult/devil/devil/utils/cmd_helper.py +../../third_party/catapult/devil/devil/utils/file_utils.py +../../third_party/catapult/devil/devil/utils/host_utils.py +../../third_party/catapult/devil/devil/utils/lazy/__init__.py +../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py +../../third_party/catapult/devil/devil/utils/logging_common.py +../../third_party/catapult/devil/devil/utils/lsusb.py +../../third_party/catapult/devil/devil/utils/parallelizer.py +../../third_party/catapult/devil/devil/utils/reraiser_thread.py +../../third_party/catapult/devil/devil/utils/reset_usb.py +../../third_party/catapult/devil/devil/utils/run_tests_helper.py +../../third_party/catapult/devil/devil/utils/signal_handler.py +../../third_party/catapult/devil/devil/utils/timeout_retry.py +../../third_party/catapult/devil/devil/utils/watchdog_timer.py +../../third_party/catapult/devil/devil/utils/zip_utils.py +../../third_party/catapult/third_party/six/six.py +../../third_party/colorama/src/colorama/__init__.py +../../third_party/colorama/src/colorama/ansi.py +../../third_party/colorama/src/colorama/ansitowin32.py +../../third_party/colorama/src/colorama/initialise.py +../../third_party/colorama/src/colorama/win32.py +../../third_party/colorama/src/colorama/winterm.py +../../third_party/jinja2/__init__.py +../../third_party/jinja2/_identifier.py +../../third_party/jinja2/async_utils.py +../../third_party/jinja2/bccache.py +../../third_party/jinja2/compiler.py +../../third_party/jinja2/defaults.py +../../third_party/jinja2/environment.py +../../third_party/jinja2/exceptions.py +../../third_party/jinja2/filters.py +../../third_party/jinja2/idtracking.py +../../third_party/jinja2/lexer.py +../../third_party/jinja2/loaders.py +../../third_party/jinja2/nodes.py +../../third_party/jinja2/optimizer.py +../../third_party/jinja2/parser.py +../../third_party/jinja2/runtime.py +../../third_party/jinja2/tests.py +../../third_party/jinja2/utils.py +../../third_party/jinja2/visitor.py +../../third_party/logdog/logdog/__init__.py +../../third_party/logdog/logdog/bootstrap.py +../../third_party/logdog/logdog/stream.py +../../third_party/logdog/logdog/streamname.py +../../third_party/logdog/logdog/varint.py +../../third_party/markupsafe/__init__.py +../../third_party/markupsafe/_compat.py +../../third_party/markupsafe/_native.py +../action_helpers.py +../gn_helpers.py +../print_python_deps.py +../skia_gold_common/__init__.py +../skia_gold_common/skia_gold_properties.py +../skia_gold_common/skia_gold_session.py +../skia_gold_common/skia_gold_session_manager.py +../util/lib/__init__.py +../util/lib/common/chrome_test_server_spawner.py +../util/lib/common/unittest_util.py +../util/lib/results/__init__.py +../util/lib/results/result_sink.py +../util/lib/results/result_types.py +../zip_helpers.py +devil_chromium.py +gyp/dex.py +gyp/util/__init__.py +gyp/util/build_utils.py +gyp/util/md5_check.py +incremental_install/__init__.py +incremental_install/installer.py +pylib/__init__.py +pylib/base/__init__.py +pylib/base/base_test_result.py +pylib/base/environment.py +pylib/base/environment_factory.py +pylib/base/output_manager.py +pylib/base/output_manager_factory.py +pylib/base/test_collection.py +pylib/base/test_exception.py +pylib/base/test_instance.py +pylib/base/test_instance_factory.py +pylib/base/test_run.py +pylib/base/test_run_factory.py +pylib/base/test_server.py +pylib/constants/__init__.py +pylib/constants/host_paths.py +pylib/gtest/__init__.py +pylib/gtest/gtest_test_instance.py +pylib/instrumentation/__init__.py +pylib/instrumentation/instrumentation_parser.py +pylib/instrumentation/instrumentation_test_instance.py +pylib/instrumentation/test_result.py +pylib/junit/__init__.py +pylib/junit/junit_test_instance.py +pylib/local/__init__.py +pylib/local/device/__init__.py +pylib/local/device/local_device_environment.py +pylib/local/device/local_device_gtest_run.py +pylib/local/device/local_device_instrumentation_test_run.py +pylib/local/device/local_device_monkey_test_run.py +pylib/local/device/local_device_test_run.py +pylib/local/emulator/__init__.py +pylib/local/emulator/avd.py +pylib/local/emulator/ini.py +pylib/local/emulator/local_emulator_environment.py +pylib/local/emulator/proto/__init__.py +pylib/local/emulator/proto/avd_pb2.py +pylib/local/local_test_server_spawner.py +pylib/local/machine/__init__.py +pylib/local/machine/local_machine_environment.py +pylib/local/machine/local_machine_junit_test_run.py +pylib/monkey/__init__.py +pylib/monkey/monkey_test_instance.py +pylib/output/__init__.py +pylib/output/local_output_manager.py +pylib/output/noop_output_manager.py +pylib/output/remote_output_manager.py +pylib/results/__init__.py +pylib/results/flakiness_dashboard/__init__.py +pylib/results/flakiness_dashboard/json_results_generator.py +pylib/results/flakiness_dashboard/results_uploader.py +pylib/results/json_results.py +pylib/results/presentation/__init__.py +pylib/results/presentation/standard_gtest_merge.py +pylib/results/presentation/test_results_presentation.py +pylib/results/report_results.py +pylib/symbols/__init__.py +pylib/symbols/deobfuscator.py +pylib/symbols/expensive_line_transformer.py +pylib/symbols/stack_symbolizer.py +pylib/utils/__init__.py +pylib/utils/chrome_proxy_utils.py +pylib/utils/decorators.py +pylib/utils/device_dependencies.py +pylib/utils/dexdump.py +pylib/utils/gold_utils.py +pylib/utils/google_storage_helper.py +pylib/utils/instrumentation_tracing.py +pylib/utils/local_utils.py +pylib/utils/logdog_helper.py +pylib/utils/logging_utils.py +pylib/utils/repo_utils.py +pylib/utils/shared_preference_utils.py +pylib/utils/test_filter.py +pylib/utils/time_profile.py +pylib/valgrind_tools.py +test_runner.py +tombstones.py diff --git a/android/test_wrapper/logdog_wrapper.py b/android/test_wrapper/logdog_wrapper.py new file mode 100755 index 000000000000..56206572e146 --- /dev/null +++ b/android/test_wrapper/logdog_wrapper.py @@ -0,0 +1,170 @@ +#!/usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wrapper for adding logdog streaming support to swarming tasks.""" + +import argparse +import contextlib +import json +import logging +import os +import signal +import subprocess +import sys + +import six + +_SRC_PATH = os.path.abspath(os.path.join( + os.path.dirname(__file__), '..', '..', '..')) +sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil')) +sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'common', + 'py_utils')) + +from devil.utils import signal_handler +from devil.utils import timeout_retry +from py_utils import tempfile_ext + +OUTPUT = 'logdog' +COORDINATOR_HOST = 'luci-logdog.appspot.com' +LOGDOG_TERMINATION_TIMEOUT = 30 + + +def CommandParser(): + # Parses the command line arguments being passed in + if six.PY3: + parser = argparse.ArgumentParser(allow_abbrev=False) + else: + parser = argparse.ArgumentParser() + wrapped = parser.add_mutually_exclusive_group() + wrapped.add_argument( + '--target', + help='The test target to be run. If neither target nor script are set,' + ' any extra args passed to this script are assumed to be the' + ' full test command to run.') + wrapped.add_argument( + '--script', + help='The script target to be run. If neither target nor script are set,' + ' any extra args passed to this script are assumed to be the' + ' full test command to run.') + parser.add_argument('--logdog-bin-cmd', required=True, + help='The logdog bin cmd.') + return parser + + +def CreateStopTestsMethod(proc): + def StopTests(signum, _frame): + logging.error('Forwarding signal %s to test process', str(signum)) + proc.send_signal(signum) + return StopTests + + +@contextlib.contextmanager +def NoLeakingProcesses(popen): + try: + yield popen + finally: + if popen is not None: + try: + if popen.poll() is None: + popen.kill() + except OSError: + logging.warning('Failed to kill %s. Process may be leaked.', + str(popen.pid)) + + +def GetProjectFromLuciContext(): + """Return the "project" from LUCI_CONTEXT. + + LUCI_CONTEXT contains a section "realm.name" whose value follows the format + ":". This method parses and return the "project" part. + + Fallback to "chromium" if realm name is None + """ + project = 'chromium' + ctx_path = os.environ.get('LUCI_CONTEXT') + if ctx_path: + try: + with open(ctx_path) as f: + luci_ctx = json.load(f) + realm_name = luci_ctx.get('realm', {}).get('name') + if realm_name: + project = realm_name.split(':')[0] + except (OSError, IOError, ValueError): + pass + return project + + +def main(): + parser = CommandParser() + args, extra_cmd_args = parser.parse_known_args(sys.argv[1:]) + + logging.basicConfig(level=logging.INFO) + if args.target: + test_cmd = [os.path.join('bin', 'run_%s' % args.target), '-v'] + test_cmd += extra_cmd_args + elif args.script: + test_cmd = [args.script] + test_cmd += extra_cmd_args + else: + test_cmd = extra_cmd_args + + test_env = dict(os.environ) + logdog_cmd = [] + + with tempfile_ext.NamedTemporaryDirectory( + prefix='tmp_android_logdog_wrapper') as temp_directory: + if not os.path.exists(args.logdog_bin_cmd): + logging.error( + 'Logdog binary %s unavailable. Unable to create logdog client', + args.logdog_bin_cmd) + else: + streamserver_uri = 'unix:%s' % os.path.join(temp_directory, + 'butler.sock') + prefix = os.path.join('android', 'swarming', 'logcats', + os.environ.get('SWARMING_TASK_ID')) + project = GetProjectFromLuciContext() + + logdog_cmd = [ + args.logdog_bin_cmd, + '-project', project, + '-output', OUTPUT, + '-prefix', prefix, + '-coordinator-host', COORDINATOR_HOST, + 'serve', + '-streamserver-uri', streamserver_uri] + test_env.update({ + 'LOGDOG_STREAM_PROJECT': project, + 'LOGDOG_STREAM_PREFIX': prefix, + 'LOGDOG_STREAM_SERVER_PATH': streamserver_uri, + 'LOGDOG_COORDINATOR_HOST': COORDINATOR_HOST, + }) + + logdog_proc = None + if logdog_cmd: + logdog_proc = subprocess.Popen(logdog_cmd) + + with NoLeakingProcesses(logdog_proc): + with NoLeakingProcesses( + subprocess.Popen(test_cmd, env=test_env)) as test_proc: + with signal_handler.SignalHandler(signal.SIGTERM, + CreateStopTestsMethod(test_proc)): + result = test_proc.wait() + if logdog_proc: + def logdog_stopped(): + return logdog_proc.poll() is not None + + logdog_proc.terminate() + timeout_retry.WaitFor(logdog_stopped, wait_period=1, + max_tries=LOGDOG_TERMINATION_TIMEOUT) + + # If logdog_proc hasn't finished by this point, allow + # NoLeakingProcesses to kill it. + + + return result + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/test_wrapper/logdog_wrapper.pydeps b/android/test_wrapper/logdog_wrapper.pydeps new file mode 100644 index 000000000000..0e8d039b999a --- /dev/null +++ b/android/test_wrapper/logdog_wrapper.pydeps @@ -0,0 +1,12 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/test_wrapper --output build/android/test_wrapper/logdog_wrapper.pydeps build/android/test_wrapper/logdog_wrapper.py +../../../third_party/catapult/common/py_utils/py_utils/__init__.py +../../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/base_error.py +../../../third_party/catapult/devil/devil/utils/__init__.py +../../../third_party/catapult/devil/devil/utils/reraiser_thread.py +../../../third_party/catapult/devil/devil/utils/signal_handler.py +../../../third_party/catapult/devil/devil/utils/timeout_retry.py +../../../third_party/catapult/devil/devil/utils/watchdog_timer.py +logdog_wrapper.py diff --git a/android/tests/symbolize/Makefile b/android/tests/symbolize/Makefile new file mode 100644 index 000000000000..82c9ea53fa9f --- /dev/null +++ b/android/tests/symbolize/Makefile @@ -0,0 +1,11 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +TOOLCHAIN=../../../../third_party/android_ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi- +CXX=$(TOOLCHAIN)g++ + +lib%.so: %.cc + $(CXX) -nostdlib -g -fPIC -shared $< -o $@ + +all: liba.so libb.so diff --git a/android/tests/symbolize/a.cc b/android/tests/symbolize/a.cc new file mode 100644 index 000000000000..67441185ab38 --- /dev/null +++ b/android/tests/symbolize/a.cc @@ -0,0 +1,14 @@ +// Copyright 2013 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +class A { + public: + A(); + void Foo(int i); + void Bar(const char* c); +}; + +A::A() {} +void A::Foo(int i) {} +void A::Bar(const char* c) {} diff --git a/android/tests/symbolize/b.cc b/android/tests/symbolize/b.cc new file mode 100644 index 000000000000..9279977aa5b0 --- /dev/null +++ b/android/tests/symbolize/b.cc @@ -0,0 +1,14 @@ +// Copyright 2013 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +class B { + public: + B(); + void Baz(float f); + void Qux(double d); +}; + +B::B() {} +void B::Baz(float f) {} +void B::Qux(double d) {} diff --git a/android/tests/symbolize/liba.so b/android/tests/symbolize/liba.so new file mode 100644 index 0000000000000000000000000000000000000000..79cb739121253e1028b65f8d7c13880eabed883a GIT binary patch literal 6908 zcmeHMO>9(E6h3$UI_;ZIr=9Xoa6$?~fY%NbjD;Z6S}F*Q5HO;N&6}A=JKD}PGq0po z2n0=Bpa~?zm7<9o5*OA)m$=u3VdKsiAZgUN(YRz$zwf?xJJTT$f0oV(=bgXr{+&7R z-QLUP(Gy3NQer49JH%LnaQ3d#k?5dgqz$nRvR-0vF=qH4baEtM(;lX#iy;wmlvYY~ z|JS=ju0Q4TEZk)F%Q&F&xH!i+wd55EZ6@6wPK-b6p%cS#_{e5{r_ugN{Wtf^^`y{W zA7l@Du^OB8GXv^!4u&S=_TE ziyu5l+&%dqF4j_fVAE1qKD{q@uRW0XonN*r_up^EBvRJWKZ$>|ynLBCZBZ)&tqim> z(8@q71Fa0SGSJFED+8?zv@-C2&H&G7y*M24jESSLKD*t7-2r_J%(L2$uo)w50W!FN-XvvRB2NQSlKSHrQdhw;;fLMj}$tRd^5 zrP9cvt)5GS<1~+@4<(9eIwMgzmL5#tr>{rfJCZcv7{kcoBSGY@ODuGR^z}hGY=v?7 z@7S*4R^Uk@d$t-&AG9)-KU`;2!byL?q^+`PPWVX7k99>Qn+~D8QhuApShs1%cn^$V z88p+Y;YbU~zh=0kC9fqyaw$UDr}eiP)Yj+AsKVEl>D*gtDmh zI+lfn-w$*aKi~I|xuKz)E)OWIT9yG+)+dSgIQ0gs1{URQ$ifsL>@Nov*$no95pA;q zIl7_!P(reaPxUa_iI+&sYz_8cs&8 zso_q``1#glfNA#g&1&>BiS1xYOhG0q)5T2I&JMuNu?ig5$>Rmx3N^a_%&5d7an-JR zBE4#jS{GigHmHrEEwSCm?<3Uci{LJf-&>~Lr-&YxZ@Et~ZKsSWX?%O?ca|#DCTn1` zeIF$6263gi>TYlsY_mSbpKZdAz}AbWAM-ihWWU^G`}NZ2NMBJVo_TN`#>E?8hP_B7Z)2u-)mCskiE>}F)o-EJV{PFi(aI3Xar3^G$EzF2L z<!(igLmy@}d8zPqUIhc2~#Bq-&4?;I__D9|r_)P6Ke{P5&{RC!P z_Q#s8*EJj<(=R1>_9~>o@xyHTp``r-m}7D+`tBZg_}J}q{}38aTo|3PPvRs4uD?L^ ztc<%8y2jI)0Y3(J1tIpgLcsA8Zpf&7H;vBN*>1d(4GyDz*Mytw5qXyl0y4O9*hXjQ zH{h}XhQh=SH|X?l#um)A88W!n;V?R5qrhdT1Y}}g!ZtcXE^z#A7&5UrIH(T7MlP2ji%8ZtN=4%EP`@h<|$ zP16v^Bs~PPMh)Eq?k)`ezY6P({iVQvf^|(nk{0ptea%<()AaW%tVW!*7W;__B4*UT E0E!c%1poj5 literal 0 HcmV?d00001 diff --git a/android/tests/symbolize/libb.so b/android/tests/symbolize/libb.so new file mode 100644 index 0000000000000000000000000000000000000000..7cf01d43c585ac03b5c312ab80efb694895a9dd3 GIT binary patch literal 6896 zcmeHMO>9(E6h3$UI%THQpDE$b;HZ_90IwY=7>hwVr634RikJusn>U@;cH;bH<_&2P zg`kNhU;>G8rD!5MHyCw^8+Wc)$c`8xX`)M`J7UJ~yYJo3bU-w^bWVESJwM+)_uli) zdvEUTg{k9*l~Q6TD8pj3F&Lv8v?n|!Dd~c5lWdd-O!OK42{Jk4v*{02)A@i1IZUlj zbo{HmB3GaExB@eoap?zC{>slWW>(xBq3xvm!-(-KE`k`2!bUdpJBj{}i{BoX>*+(h zA+!UC5<@!%J>~06(Ww^w8R)I}UC=d{cKNlzytV2F+jN;dGo8*%?w&g9NqaK(xv64@6B|7YS1rVs(CQ?pl$X zAKZ`MK5;)Pv6blPmX)ARKW}Y}+zYi~;(f7|NBIZ+7{5RD@|yl1HyRhr z*c`t@oeXp`(8)k21Dycx04O7&>+g?KJWV<;>~lVfq*@22;?g9+n~(2dkR`9&;Z;3I^x%=v=W7^IN!FU1^Kjt+`vqg_(x=xKt_LE3d1D@#el<0@ zV@GPt+Lzirn#SMgzSMA~>QtSAW7nMR;x6WEGE%F~jm#D7(i>XJ*7Jqjh+WE6%lX`h z>(t!ZNNu^8Ef@0VoHa*B{(AHolYg@0&FaYHnyoq&6SHqrvw3qi=x4{>sIsG zIttA6sMNFWa%GKPvWrdKuG*yqtuB{pZq;;a5%Z<_vRI|E>sSk=y2V?+>#S3)<;x|Y z(QCOV)}mcoG<^&Cl4IogDh4x3p**L1*u^{qZ*i`j6|1(0@jS`2%SEm0(^Dq=pV8-l z@!QOD;dh#R(bpmB`ep6#^B45({C<=9{We4_=?gH=XBm&*bu!O~ji1j0d9!abwaa?S z!DfgU(vM*FWjxk&v#wzPnYe7hl}l(1E(+PiVN3lEm}7D+`n(?J_7%vCPcpb5f-xp> zk^(m$AX-+(<(rmq{| mRTxtFiles; + private final File mProguardMapping; + /** These can be class or dex files. */ + private final Iterable mClasses; + private final Iterable mManifests; + private final Iterable mResourceDirs; + + private final File mReportFile; + private final StringWriter mDebugOutput; + private final PrintWriter mDebugPrinter; + + /** The computed set of unused resources */ + private List mUnused; + + /** + * Map from resource class owners (VM format class) to corresponding resource entries. + * This lets us map back from code references (obfuscated class and possibly obfuscated field + * reference) back to the corresponding resource type and name. + */ + private Map>> mResourceObfuscation = + Maps.newHashMapWithExpectedSize(30); + + /** Obfuscated name of android/support/v7/widget/SuggestionsAdapter.java */ + private String mSuggestionsAdapter; + + /** Obfuscated name of android/support/v7/internal/widget/ResourcesWrapper.java */ + private String mResourcesWrapper; + + /* A Pair class because java does not come with batteries included. */ + private static class Pair { + private U mFirst; + private V mSecond; + + Pair(U first, V second) { + this.mFirst = first; + this.mSecond = second; + } + + public U getFirst() { + return mFirst; + } + + public V getSecond() { + return mSecond; + } + } + + public UnusedResources(Iterable rTxtFiles, Iterable classes, + Iterable manifests, File mapping, Iterable resources, File reportFile) { + mRTxtFiles = rTxtFiles; + mProguardMapping = mapping; + mClasses = classes; + mManifests = manifests; + mResourceDirs = resources; + + mReportFile = reportFile; + if (reportFile != null) { + mDebugOutput = new StringWriter(8 * 1024); + mDebugPrinter = new PrintWriter(mDebugOutput); + } else { + mDebugOutput = null; + mDebugPrinter = null; + } + } + + public void close() { + if (mDebugOutput != null) { + String output = mDebugOutput.toString(); + + if (mReportFile != null) { + File dir = mReportFile.getParentFile(); + if (dir != null) { + if ((dir.exists() || dir.mkdir()) && dir.canWrite()) { + try { + Files.asCharSink(mReportFile, Charsets.UTF_8).write(output); + } catch (IOException ignore) { + } + } + } + } + } + } + + public void analyze() throws IOException, ParserConfigurationException, SAXException { + gatherResourceValues(mRTxtFiles); + recordMapping(mProguardMapping); + + for (File jarOrDir : mClasses) { + recordClassUsages(jarOrDir); + } + recordManifestUsages(mManifests); + recordResources(mResourceDirs); + dumpReferences(); + mModel.processToolsAttributes(); + mUnused = mModel.findUnused(); + } + + public void emitConfig(Path destination) throws IOException { + File destinationFile = destination.toFile(); + if (!destinationFile.exists()) { + destinationFile.getParentFile().mkdirs(); + boolean success = destinationFile.createNewFile(); + if (!success) { + throw new IOException("Could not create " + destination); + } + } + StringBuilder sb = new StringBuilder(); + Collections.sort(mUnused); + for (Resource resource : mUnused) { + sb.append(resource.type + "/" + resource.name + "#remove\n"); + } + Files.asCharSink(destinationFile, UTF_8).write(sb.toString()); + } + + private void dumpReferences() { + if (mDebugPrinter != null) { + mDebugPrinter.print(mModel.dumpReferences()); + } + } + + private void recordResources(Iterable resources) + throws IOException, SAXException, ParserConfigurationException { + for (File resDir : resources) { + File[] resourceFolders = resDir.listFiles(); + assert resourceFolders != null : "Invalid resource directory " + resDir; + for (File folder : resourceFolders) { + ResourceFolderType folderType = ResourceFolderType.getFolderType(folder.getName()); + if (folderType != null) { + recordResources(folderType, folder); + } + } + } + } + + private void recordResources(ResourceFolderType folderType, File folder) + throws ParserConfigurationException, SAXException, IOException { + File[] files = folder.listFiles(); + if (files != null) { + for (File file : files) { + String path = file.getPath(); + mModel.file = file; + try { + boolean isXml = endsWithIgnoreCase(path, DOT_XML); + if (isXml) { + String xml = Files.toString(file, UTF_8); + Document document = XmlUtils.parseDocument(xml, true); + mModel.visitXmlDocument(file, folderType, document); + } else { + mModel.visitBinaryResource(folderType, file); + } + } finally { + mModel.file = null; + } + } + } + } + + void recordMapping(File mapping) throws IOException { + if (mapping == null || !mapping.exists()) { + return; + } + final String arrowString = " -> "; + final String resourceString = ".R$"; + Map nameMap = null; + for (String line : Files.readLines(mapping, UTF_8)) { + // Ignore R8's mapping comments. + if (line.startsWith("#")) { + continue; + } + if (line.startsWith(" ") || line.startsWith("\t")) { + if (nameMap != null) { + // We're processing the members of a resource class: record names into the map + int n = line.length(); + int i = 0; + for (; i < n; i++) { + if (!Character.isWhitespace(line.charAt(i))) { + break; + } + } + if (i < n && line.startsWith("int", i)) { // int or int[] + int start = line.indexOf(' ', i + 3) + 1; + int arrow = line.indexOf(arrowString); + if (start > 0 && arrow != -1) { + int end = line.indexOf(' ', start + 1); + if (end != -1) { + String oldName = line.substring(start, end); + String newName = + line.substring(arrow + arrowString.length()).trim(); + if (!newName.equals(oldName)) { + nameMap.put(newName, oldName); + } + } + } + } + } + continue; + } else { + nameMap = null; + } + int index = line.indexOf(resourceString); + if (index == -1) { + // Record obfuscated names of a few known appcompat usages of + // Resources#getIdentifier that are unlikely to be used for general + // resource name reflection + if (line.startsWith("android.support.v7.widget.SuggestionsAdapter ")) { + mSuggestionsAdapter = + line.substring(line.indexOf(arrowString) + arrowString.length(), + line.indexOf(':') != -1 ? line.indexOf(':') : line.length()) + .trim() + .replace('.', '/') + + DOT_CLASS; + } else if (line.startsWith("android.support.v7.internal.widget.ResourcesWrapper ") + || line.startsWith("android.support.v7.widget.ResourcesWrapper ") + || (mResourcesWrapper == null // Recently wrapper moved + && line.startsWith( + "android.support.v7.widget.TintContextWrapper$TintResources "))) { + mResourcesWrapper = + line.substring(line.indexOf(arrowString) + arrowString.length(), + line.indexOf(':') != -1 ? line.indexOf(':') : line.length()) + .trim() + .replace('.', '/') + + DOT_CLASS; + } + continue; + } + int arrow = line.indexOf(arrowString, index + 3); + if (arrow == -1) { + continue; + } + String typeName = line.substring(index + resourceString.length(), arrow); + ResourceType type = ResourceType.fromClassName(typeName); + if (type == null) { + continue; + } + int end = line.indexOf(':', arrow + arrowString.length()); + if (end == -1) { + end = line.length(); + } + String target = line.substring(arrow + arrowString.length(), end).trim(); + String ownerName = target.replace('.', '/'); + + nameMap = Maps.newHashMap(); + Pair> pair = new Pair(type, nameMap); + mResourceObfuscation.put(ownerName, pair); + // For fast lookup in isResourceClass + mResourceObfuscation.put(ownerName + DOT_CLASS, pair); + } + } + + private void recordManifestUsages(File manifest) + throws IOException, ParserConfigurationException, SAXException { + String xml = Files.toString(manifest, UTF_8); + Document document = XmlUtils.parseDocument(xml, true); + mModel.visitXmlDocument(manifest, null, document); + } + + private void recordManifestUsages(Iterable manifests) + throws IOException, ParserConfigurationException, SAXException { + for (File manifest : manifests) { + recordManifestUsages(manifest); + } + } + + private void recordClassUsages(File file) throws IOException { + assert file.isFile(); + if (file.getPath().endsWith(DOT_DEX)) { + byte[] bytes = Files.toByteArray(file); + recordClassUsages(file, file.getName(), bytes); + } else if (file.getPath().endsWith(DOT_JAR)) { + ZipInputStream zis = null; + try { + FileInputStream fis = new FileInputStream(file); + try { + zis = new ZipInputStream(fis); + ZipEntry entry = zis.getNextEntry(); + while (entry != null) { + String name = entry.getName(); + if (name.endsWith(DOT_DEX)) { + byte[] bytes = ByteStreams.toByteArray(zis); + if (bytes != null) { + recordClassUsages(file, name, bytes); + } + } + + entry = zis.getNextEntry(); + } + } finally { + Closeables.close(fis, true); + } + } finally { + Closeables.close(zis, true); + } + } + } + + private String stringifyResource(Resource resource) { + return String.format("%s:%s:0x%08x", resource.type, resource.name, resource.value); + } + + private void recordClassUsages(File file, String name, byte[] bytes) { + assert name.endsWith(DOT_DEX); + ReferenceChecker callback = new ReferenceChecker() { + @Override + public boolean shouldProcess(String internalName) { + // We do not need to ignore R subclasses since R8 now removes + // unused resource id fields in R subclasses thus their + // remaining presence means real usage. + return true; + } + + @Override + public void referencedInt(int value) { + UnusedResources.this.referencedInt("dex", value, file, name); + } + + @Override + public void referencedString(String value) { + // do nothing. + } + + @Override + public void referencedStaticField(String internalName, String fieldName) { + Resource resource = getResourceFromCode(internalName, fieldName); + if (resource != null) { + ResourceUsageModel.markReachable(resource); + if (mDebugPrinter != null) { + mDebugPrinter.println("Marking " + stringifyResource(resource) + + " reachable: referenced from dex" + + " in " + file + ":" + name + " (static field access " + + internalName + "." + fieldName + ")"); + } + } + } + + @Override + public void referencedMethod( + String internalName, String methodName, String methodDescriptor) { + // Do nothing. + } + }; + ProgramResource resource = ProgramResource.fromBytes( + new PathOrigin(file.toPath()), ProgramResource.Kind.DEX, bytes, null); + ProgramResourceProvider provider = () -> Arrays.asList(resource); + try { + Command command = + (new ResourceShrinker.Builder()).addProgramResourceProvider(provider).build(); + ResourceShrinker.run(command, callback); + } catch (CompilationFailedException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (ExecutionException e) { + e.printStackTrace(); + } + } + + /** Returns whether the given class file name points to an aapt-generated compiled R class. */ + boolean isResourceClass(String name) { + if (mResourceObfuscation.containsKey(name)) { + return true; + } + int index = name.lastIndexOf('/'); + if (index != -1 && name.startsWith("R$", index + 1) && name.endsWith(DOT_CLASS)) { + String typeName = name.substring(index + 3, name.length() - DOT_CLASS.length()); + return ResourceType.fromClassName(typeName) != null; + } + return false; + } + + Resource getResourceFromCode(String owner, String name) { + Pair> pair = mResourceObfuscation.get(owner); + if (pair != null) { + ResourceType type = pair.getFirst(); + Map nameMap = pair.getSecond(); + String renamedField = nameMap.get(name); + if (renamedField != null) { + name = renamedField; + } + return mModel.getResource(type, name); + } + if (isValidResourceType(owner)) { + ResourceType type = + ResourceType.fromClassName(owner.substring(owner.lastIndexOf('$') + 1)); + if (type != null) { + return mModel.getResource(type, name); + } + } + return null; + } + + private Boolean isValidResourceType(String candidateString) { + return candidateString.contains("/") + && candidateString.substring(candidateString.lastIndexOf('/') + 1).contains("$"); + } + + private void gatherResourceValues(Iterable rTxts) throws IOException { + for (File rTxt : rTxts) { + assert rTxt.isFile(); + assert rTxt.getName().endsWith(FN_RESOURCE_TEXT); + addResourcesFromRTxtFile(rTxt); + } + } + + private void addResourcesFromRTxtFile(File file) { + try { + SymbolTable st = readFromAapt(file, null); + for (Symbol symbol : st.getSymbols().values()) { + String symbolValue = symbol.getValue(); + if (symbol.getResourceType() == ResourceType.STYLEABLE) { + if (symbolValue.trim().startsWith("{")) { + // Only add the styleable parent, styleable children are not yet supported. + mModel.addResource(symbol.getResourceType(), symbol.getName(), null); + } + } else { + if (mDebugPrinter != null) { + mDebugPrinter.println("Extracted R.txt resource: " + + symbol.getResourceType() + ":" + symbol.getName() + ":" + + String.format( + "0x%08x", Integer.parseInt(symbolValue.substring(2), 16))); + } + mModel.addResource(symbol.getResourceType(), symbol.getName(), symbolValue); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + ResourceUsageModel getModel() { + return mModel; + } + + private void referencedInt(String context, int value, File file, String currentClass) { + Resource resource = mModel.getResource(value); + if (ResourceUsageModel.markReachable(resource) && mDebugPrinter != null) { + mDebugPrinter.println("Marking " + stringifyResource(resource) + + " reachable: referenced from " + context + " in " + file + ":" + + currentClass); + } + } + + private final ResourceShrinkerUsageModel mModel = new ResourceShrinkerUsageModel(); + + private class ResourceShrinkerUsageModel extends ResourceUsageModel { + public File file; + + /** + * Whether we should ignore tools attribute resource references. + *

    + * For example, for resource shrinking we want to ignore tools attributes, + * whereas for resource refactoring on the source code we do not. + * + * @return whether tools attributes should be ignored + */ + @Override + protected boolean ignoreToolsAttributes() { + return true; + } + + @Override + protected void onRootResourcesFound(List roots) { + if (mDebugPrinter != null) { + mDebugPrinter.println("\nThe root reachable resources are:"); + for (Resource root : roots) { + mDebugPrinter.println(" " + stringifyResource(root) + ","); + } + } + } + + @Override + protected Resource declareResource(ResourceType type, String name, Node node) { + Resource resource = super.declareResource(type, name, node); + resource.addLocation(file); + return resource; + } + + @Override + protected void referencedString(String string) { + // Do nothing + } + } + + public static void main(String[] args) throws Exception { + List rTxtFiles = null; // R.txt files + List classes = null; // Dex/jar w dex + List manifests = null; // manifests + File mapping = null; // mapping + List resources = null; // resources dirs + File log = null; // output log for debugging + Path configPath = null; // output config + for (int i = 0; i < args.length; i += 2) { + switch (args[i]) { + case "--rtxts": + rTxtFiles = Arrays.stream(args[i + 1].split(":")) + .map(s -> new File(s)) + .collect(Collectors.toList()); + break; + case "--dexes": + classes = Arrays.stream(args[i + 1].split(":")) + .map(s -> new File(s)) + .collect(Collectors.toList()); + break; + case "--manifests": + manifests = Arrays.stream(args[i + 1].split(":")) + .map(s -> new File(s)) + .collect(Collectors.toList()); + break; + case "--mapping": + mapping = new File(args[i + 1]); + break; + case "--resourceDirs": + resources = Arrays.stream(args[i + 1].split(":")) + .map(s -> new File(s)) + .collect(Collectors.toList()); + break; + case "--log": + log = new File(args[i + 1]); + break; + case "--outputConfig": + configPath = Paths.get(args[i + 1]); + break; + default: + throw new IllegalArgumentException(args[i] + " is not a valid arg."); + } + } + UnusedResources unusedResources = + new UnusedResources(rTxtFiles, classes, manifests, mapping, resources, log); + unusedResources.analyze(); + unusedResources.close(); + unusedResources.emitConfig(configPath); + } +} diff --git a/android/update_deps/update_third_party_deps.py b/android/update_deps/update_third_party_deps.py new file mode 100755 index 000000000000..50c0e225f095 --- /dev/null +++ b/android/update_deps/update_third_party_deps.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Uploads or downloads third party libraries to or from google cloud storage. + +This script will only work for Android checkouts. +""" + +import argparse +import logging +import os +import sys + + +sys.path.append(os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir))) +from pylib import constants +from pylib.constants import host_paths + +sys.path.append( + os.path.abspath( + os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party', 'depot_tools'))) +import download_from_google_storage +import upload_to_google_storage + + +def _AddBasicArguments(parser): + parser.add_argument( + '--sdk-root', default=constants.ANDROID_SDK_ROOT, + help='base path to the Android SDK root') + parser.add_argument( + '-v', '--verbose', action='store_true', help='print debug information') + parser.add_argument( + '-b', '--bucket-path', required=True, + help='The path of the lib file in Google Cloud Storage.') + parser.add_argument( + '-l', '--local-path', required=True, + help='The base path of the third_party directory') + + +def _CheckPaths(bucket_path, local_path): + if bucket_path.startswith('gs://'): + bucket_url = bucket_path + else: + bucket_url = 'gs://%s' % bucket_path + local_path = os.path.join(host_paths.DIR_SOURCE_ROOT, local_path) + if not os.path.isdir(local_path): + raise IOError( + 'The library local path is not a valid directory: %s' % local_path) + return bucket_url, local_path + + +def _CheckFileList(local_path, file_list): + local_path = os.path.abspath(local_path) + abs_path_list = [os.path.abspath(f) for f in file_list] + for f in abs_path_list: + if os.path.commonprefix([f, local_path]) != local_path: + raise IOError( + '%s in the arguments is not descendant of the specified directory %s' + % (f, local_path)) + return abs_path_list + + +def _PurgeSymlinks(local_path): + for dirpath, _, filenames in os.walk(local_path): + for f in filenames: + path = os.path.join(dirpath, f) + if os.path.islink(path): + os.remove(path) + + +def Upload(arguments): + """Upload files in a third_party directory to google storage""" + bucket_url, local_path = _CheckPaths(arguments.bucket_path, + arguments.local_path) + file_list = _CheckFileList(local_path, arguments.file_list) + return upload_to_google_storage.upload_to_google_storage( + input_filenames=file_list, + base_url=bucket_url, + gsutil=arguments.gsutil, + force=False, + use_md5=False, + num_threads=1, + skip_hashing=False, + gzip=None) + + +def Download(arguments): + """Download files based on sha1 files in a third_party dir from gcs""" + bucket_url, local_path = _CheckPaths(arguments.bucket_path, + arguments.local_path) + _PurgeSymlinks(local_path) + return download_from_google_storage.download_from_google_storage( + local_path, + bucket_url, + gsutil=arguments.gsutil, + num_threads=1, + directory=True, + recursive=True, + force=False, + output=None, + ignore_errors=False, + sha1_file=None, + verbose=arguments.verbose, + auto_platform=False, + extract=False) + + +def main(argv): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(title='commands') + download_parser = subparsers.add_parser( + 'download', help='download the library from the cloud storage') + _AddBasicArguments(download_parser) + download_parser.set_defaults(func=Download) + + upload_parser = subparsers.add_parser( + 'upload', help='find all jar files in a third_party directory and ' + + 'upload them to cloud storage') + _AddBasicArguments(upload_parser) + upload_parser.set_defaults(func=Upload) + upload_parser.add_argument( + '-f', '--file-list', nargs='+', required=True, + help='A list of base paths for files in third_party to upload.') + + arguments = parser.parse_args(argv) + if not os.path.isdir(arguments.sdk_root): + logging.debug('Did not find the Android SDK root directory at "%s".', + arguments.sdk_root) + logging.info('Skipping, not on an android checkout.') + return 0 + + arguments.gsutil = download_from_google_storage.Gsutil( + download_from_google_storage.GSUTIL_DEFAULT_PATH) + return arguments.func(arguments) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/android/update_verification.py b/android/update_verification.py new file mode 100755 index 000000000000..55a403e855db --- /dev/null +++ b/android/update_verification.py @@ -0,0 +1,116 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs semi-automated update testing on a non-rooted device. + +This script will help verify that app data is preserved during an update. +To use this script first run it with the create_app_data option. + +./update_verification.py create_app_data --old-apk --app-data + +The script will then install the old apk, prompt you to create some app data +(bookmarks, etc.), and then save the app data in the path you gave it. + +Next, once you have some app data saved, run this script with the test_update +option. + +./update_verification.py test_update --old-apk --new-apk +--app-data + +This will install the old apk, load the saved app data, install the new apk, +and ask the user to verify that all of the app data was preserved. +""" + +import argparse +import logging +import sys + +# import raw_input when converted to python3 +from six.moves import input # pylint: disable=redefined-builtin +import devil_chromium + +from devil.android import apk_helper +from devil.android import device_denylist +from devil.android import device_errors +from devil.android import device_utils +from devil.utils import run_tests_helper + + +def CreateAppData(device, old_apk, app_data, package_name): + device.Install(old_apk) + input('Set the application state. Once ready, press enter and ' + 'select "Backup my data" on the device.') + device.adb.Backup(app_data, packages=[package_name]) + logging.critical('Application data saved to %s', app_data) + +def TestUpdate(device, old_apk, new_apk, app_data, package_name): + device.Install(old_apk) + device.adb.Restore(app_data) + # Restore command is not synchronous + input('Select "Restore my data" on the device. Then press enter to ' + 'continue.') + if not device.IsApplicationInstalled(package_name): + raise Exception('Expected package %s to already be installed. ' + 'Package name might have changed!' % package_name) + + logging.info('Verifying that %s can be overinstalled.', new_apk) + device.adb.Install(new_apk, reinstall=True) + logging.critical('Successfully updated to the new apk. Please verify that ' + 'the application data is preserved.') + +def main(): + parser = argparse.ArgumentParser( + description="Script to do semi-automated upgrade testing.") + parser.add_argument('-v', '--verbose', action='count', + help='Print verbose log information.') + parser.add_argument('--denylist-file', help='Device denylist JSON file.') + command_parsers = parser.add_subparsers(dest='command') + + subparser = command_parsers.add_parser('create_app_data') + subparser.add_argument('--old-apk', required=True, + help='Path to apk to update from.') + subparser.add_argument('--app-data', required=True, + help='Path to where the app data backup should be ' + 'saved to.') + subparser.add_argument('--package-name', + help='Chrome apk package name.') + + subparser = command_parsers.add_parser('test_update') + subparser.add_argument('--old-apk', required=True, + help='Path to apk to update from.') + subparser.add_argument('--new-apk', required=True, + help='Path to apk to update to.') + subparser.add_argument('--app-data', required=True, + help='Path to where the app data backup is saved.') + subparser.add_argument('--package-name', + help='Chrome apk package name.') + + args = parser.parse_args() + run_tests_helper.SetLogLevel(args.verbose) + + devil_chromium.Initialize() + + denylist = (device_denylist.Denylist(args.denylist_file) + if args.denylist_file else None) + + devices = device_utils.DeviceUtils.HealthyDevices(denylist) + if not devices: + raise device_errors.NoDevicesError() + device = devices[0] + logging.info('Using device %s for testing.', str(device)) + + package_name = (args.package_name if args.package_name + else apk_helper.GetPackageName(args.old_apk)) + if args.command == 'create_app_data': + CreateAppData(device, args.old_apk, args.app_data, package_name) + elif args.command == 'test_update': + TestUpdate( + device, args.old_apk, args.new_apk, args.app_data, package_name) + else: + raise Exception('Unknown test command: %s' % args.command) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/android/video_recorder.py b/android/video_recorder.py new file mode 100755 index 000000000000..39387797536e --- /dev/null +++ b/android/video_recorder.py @@ -0,0 +1,13 @@ +#!/usr/bin/env vpython3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import sys + +import devil_chromium +from devil.android.tools import video_recorder + +if __name__ == '__main__': + devil_chromium.Initialize() + sys.exit(video_recorder.main()) diff --git a/apple/OWNERS b/apple/OWNERS new file mode 100644 index 000000000000..07d900ebed13 --- /dev/null +++ b/apple/OWNERS @@ -0,0 +1,4 @@ +mark@chromium.org +rohitrao@chromium.org +rsesek@chromium.org +sdefresne@chromium.org diff --git a/apple/README.md b/apple/README.md new file mode 100644 index 000000000000..f60185dc9706 --- /dev/null +++ b/apple/README.md @@ -0,0 +1,12 @@ +# About + +`//build/apple` contains: + * GN templates and configurations shared by Apple platforms + * Python build scripts shared by Apple platforms + +This directory should only contain templates, configurations and scripts +that are used exclusively on Apple platforms (currently iOS and macOS). +They must also be independent of the specific platform. + +If a template, configuration or script is limited to only iOS or macOS, +then they should instead be located in `//build/ios` or `//build/mac`. diff --git a/apple/apple_info_plist.gni b/apple/apple_info_plist.gni new file mode 100644 index 000000000000..bf66dbd6b78c --- /dev/null +++ b/apple/apple_info_plist.gni @@ -0,0 +1,60 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/apple/compile_plist.gni") + +# The base template used to generate Info.plist files for iOS and Mac apps and +# frameworks. +# +# Arguments +# +# plist_templates: +# string array, paths to plist files which will be used for the bundle. +# +# executable_name: +# string, name of the generated target used for the product +# and executable name as specified in the output Info.plist. +# +# format: +# string, the format to `plutil -convert` the plist to when +# generating the output. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +# +# output_name: +# (optional) string, name of the generated plist file, default to +# "$target_gen_dir/$target_name.plist". +template("apple_info_plist") { + assert(defined(invoker.executable_name), + "The executable_name must be specified for $target_name") + executable_name = invoker.executable_name + + compile_plist(target_name) { + forward_variables_from(invoker, + [ + "plist_templates", + "testonly", + "deps", + "visibility", + "format", + ]) + + if (defined(invoker.output_name)) { + output_name = invoker.output_name + } else { + output_name = "$target_gen_dir/$target_name.plist" + } + + substitutions = [ + "EXECUTABLE_NAME=$executable_name", + "GCC_VERSION=com.apple.compilers.llvm.clang.1_0", + "PRODUCT_NAME=$executable_name", + ] + if (defined(invoker.extra_substitutions)) { + substitutions += invoker.extra_substitutions + } + } +} diff --git a/apple/compile_entitlements.gni b/apple/compile_entitlements.gni new file mode 100644 index 000000000000..1f84a115d0b1 --- /dev/null +++ b/apple/compile_entitlements.gni @@ -0,0 +1,51 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/apple/compile_plist.gni") + +# Template to merge multiple .entitlements files performing variable +# substitutions. +# +# Arguments +# +# entitlements_templates: +# string array, paths to entitlements files which will be used for the +# bundle. +# +# substitutions: +# string array, 'key=value' pairs used to replace ${key} by value +# when generating the output plist file. +# +# output_name: +# string, name of the generated entitlements file. +template("compile_entitlements") { + assert(defined(invoker.entitlements_templates), + "A list of template plist files must be specified for $target_name") + + compile_plist(target_name) { + forward_variables_from(invoker, + "*", + [ + "entitlements_templates", + "format", + "plist_templates", + ]) + + plist_templates = invoker.entitlements_templates + + # Entitlements files are always encoded in xml1. + format = "xml1" + + # Entitlements files use unsubstitued variables, so define substitutions + # to leave those variables untouched. + if (!defined(substitutions)) { + substitutions = [] + } + + substitutions += [ + "AppIdentifierPrefix=\$(AppIdentifierPrefix)", + "CFBundleIdentifier=\$(CFBundleIdentifier)", + ] + } +} diff --git a/apple/compile_plist.gni b/apple/compile_plist.gni new file mode 100644 index 000000000000..df8de0c21c1b --- /dev/null +++ b/apple/compile_plist.gni @@ -0,0 +1,76 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Template to merge multiple plist files and perform variable substitutions. +# +# Arguments +# +# plist_templates: +# string array, paths to plist files which will be used for the bundle. +# +# format: +# string, the format to `plutil -convert` the plist to when +# generating the output. +# +# substitutions: +# string array, 'key=value' pairs used to replace ${key} by value +# when generating the output plist file. +# +# output_name: +# string, name of the generated plist file. +template("compile_plist") { + assert(defined(invoker.plist_templates), + "A list of template plist files must be specified for $target_name") + assert(defined(invoker.format), + "The plist format must be specified for $target_name") + assert(defined(invoker.substitutions), + "A list of key=value pairs must be specified for $target_name") + assert(defined(invoker.output_name), + "The name of the output file must be specified for $target_name") + + _output_name = invoker.output_name + _merged_name = get_path_info(_output_name, "dir") + "/" + + get_path_info(_output_name, "name") + "_merged." + + get_path_info(_output_name, "extension") + + _merge_target = target_name + "_merge" + + action(_merge_target) { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + + script = "//build/apple/plist_util.py" + sources = invoker.plist_templates + outputs = [ _merged_name ] + args = [ + "merge", + "-f=" + invoker.format, + "-o=" + rebase_path(_merged_name, root_build_dir), + ] + rebase_path(invoker.plist_templates, root_build_dir) + } + + action(target_name) { + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + script = "//build/apple/plist_util.py" + sources = [ _merged_name ] + outputs = [ _output_name ] + args = [ + "substitute", + "-f=" + invoker.format, + "-o=" + rebase_path(_output_name, root_build_dir), + "-t=" + rebase_path(_merged_name, root_build_dir), + ] + foreach(_substitution, invoker.substitutions) { + args += [ "-s=$_substitution" ] + } + deps = [ ":$_merge_target" ] + } +} diff --git a/apple/convert_plist.gni b/apple/convert_plist.gni new file mode 100644 index 000000000000..740bfc77b508 --- /dev/null +++ b/apple/convert_plist.gni @@ -0,0 +1,41 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Convert plist file to given format. +# +# Arguments +# +# source: +# string, path to the plist file to convert +# +# output: +# string, path to the converted plist, must be under $root_build_dir +# +# format: +# string, the format to convert the plist to. Either "binary1" or "xml1". +template("convert_plist") { + assert(defined(invoker.source), "source must be defined for $target_name") + assert(defined(invoker.output), "output must be defined for $target_name") + assert(defined(invoker.format), "format must be defined for $target_name") + + action(target_name) { + forward_variables_from(invoker, + [ + "visibility", + "testonly", + "deps", + ]) + + script = "//build/apple/plist_util.py" + sources = [ invoker.source ] + outputs = [ invoker.output ] + args = [ + "merge", + "--format=${invoker.format}", + "-o", + rebase_path(invoker.output, root_build_dir), + rebase_path(invoker.source, root_build_dir), + ] + } +} diff --git a/apple/plist_util.py b/apple/plist_util.py new file mode 100644 index 000000000000..016a06ac4a5e --- /dev/null +++ b/apple/plist_util.py @@ -0,0 +1,236 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import codecs +import plistlib +import os +import re +import subprocess +import sys +import tempfile +import shlex + +# Xcode substitutes variables like ${PRODUCT_NAME} or $(PRODUCT_NAME) when +# compiling Info.plist. It also supports supports modifiers like :identifier +# or :rfc1034identifier. SUBSTITUTION_REGEXP_LIST is a list of regular +# expressions matching a variable substitution pattern with an optional +# modifier, while INVALID_CHARACTER_REGEXP matches all characters that are +# not valid in an "identifier" value (used when applying the modifier). +INVALID_CHARACTER_REGEXP = re.compile(r'[_/\s]') +SUBSTITUTION_REGEXP_LIST = ( + re.compile(r'\$\{(?P[^}]*?)(?P:[^}]*)?\}'), + re.compile(r'\$\((?P[^}]*?)(?P:[^}]*)?\)'), +) + + +class SubstitutionError(Exception): + def __init__(self, key): + super(SubstitutionError, self).__init__() + self.key = key + + def __str__(self): + return "SubstitutionError: {}".format(self.key) + + +def InterpolateString(value, substitutions): + """Interpolates variable references into |value| using |substitutions|. + + Inputs: + value: a string + substitutions: a mapping of variable names to values + + Returns: + A new string with all variables references ${VARIABLES} replaced by their + value in |substitutions|. Raises SubstitutionError if a variable has no + substitution. + """ + + def repl(match): + variable = match.group('id') + if variable not in substitutions: + raise SubstitutionError(variable) + # Some values need to be identifier and thus the variables references may + # contains :modifier attributes to indicate how they should be converted + # to identifiers ("identifier" replaces all invalid characters by '_' and + # "rfc1034identifier" replaces them by "-" to make valid URI too). + modifier = match.group('modifier') + if modifier == ':identifier': + return INVALID_CHARACTER_REGEXP.sub('_', substitutions[variable]) + elif modifier == ':rfc1034identifier': + return INVALID_CHARACTER_REGEXP.sub('-', substitutions[variable]) + else: + return substitutions[variable] + + for substitution_regexp in SUBSTITUTION_REGEXP_LIST: + value = substitution_regexp.sub(repl, value) + return value + + +def Interpolate(value, substitutions): + """Interpolates variable references into |value| using |substitutions|. + + Inputs: + value: a value, can be a dictionary, list, string or other + substitutions: a mapping of variable names to values + + Returns: + A new value with all variables references ${VARIABLES} replaced by their + value in |substitutions|. Raises SubstitutionError if a variable has no + substitution. + """ + if isinstance(value, dict): + return {k: Interpolate(v, substitutions) for k, v in value.items()} + if isinstance(value, list): + return [Interpolate(v, substitutions) for v in value] + if isinstance(value, str): + return InterpolateString(value, substitutions) + return value + + +def LoadPList(path): + """Loads Plist at |path| and returns it as a dictionary.""" + with open(path, 'rb') as f: + return plistlib.load(f) + + +def SavePList(path, format, data): + """Saves |data| as a Plist to |path| in the specified |format|.""" + # The open() call does not replace the destination file but updates it + # in place, so if more than one hardlink points to destination all of them + # will be modified. This is not what is expected, so delete destination file + # if it does exist. + try: + os.unlink(path) + except FileNotFoundError: + pass + with open(path, 'wb') as f: + plist_format = {'binary1': plistlib.FMT_BINARY, 'xml1': plistlib.FMT_XML} + plistlib.dump(data, f, fmt=plist_format[format]) + + +def MergePList(plist1, plist2): + """Merges |plist1| with |plist2| recursively. + + Creates a new dictionary representing a Property List (.plist) files by + merging the two dictionary |plist1| and |plist2| recursively (only for + dictionary values). List value will be concatenated. + + Args: + plist1: a dictionary representing a Property List (.plist) file + plist2: a dictionary representing a Property List (.plist) file + + Returns: + A new dictionary representing a Property List (.plist) file by merging + |plist1| with |plist2|. If any value is a dictionary, they are merged + recursively, otherwise |plist2| value is used. If values are list, they + are concatenated. + """ + result = plist1.copy() + for key, value in plist2.items(): + if isinstance(value, dict): + old_value = result.get(key) + if isinstance(old_value, dict): + value = MergePList(old_value, value) + if isinstance(value, list): + value = plist1.get(key, []) + plist2.get(key, []) + result[key] = value + return result + + +class Action(object): + """Class implementing one action supported by the script.""" + + @classmethod + def Register(cls, subparsers): + parser = subparsers.add_parser(cls.name, help=cls.help) + parser.set_defaults(func=cls._Execute) + cls._Register(parser) + + +class MergeAction(Action): + """Class to merge multiple plist files.""" + + name = 'merge' + help = 'merge multiple plist files' + + @staticmethod + def _Register(parser): + parser.add_argument('-o', + '--output', + required=True, + help='path to the output plist file') + parser.add_argument('-f', + '--format', + required=True, + choices=('xml1', 'binary1'), + help='format of the plist file to generate') + parser.add_argument( + '-x', + '--xcode-version', + help='version of Xcode, ignored (can be used to force rebuild)') + parser.add_argument('path', nargs="+", help='path to plist files to merge') + + @staticmethod + def _Execute(args): + data = {} + for filename in args.path: + data = MergePList(data, LoadPList(filename)) + SavePList(args.output, args.format, data) + + +class SubstituteAction(Action): + """Class implementing the variable substitution in a plist file.""" + + name = 'substitute' + help = 'perform pattern substitution in a plist file' + + @staticmethod + def _Register(parser): + parser.add_argument('-o', + '--output', + required=True, + help='path to the output plist file') + parser.add_argument('-t', + '--template', + required=True, + help='path to the template file') + parser.add_argument('-s', + '--substitution', + action='append', + default=[], + help='substitution rule in the format key=value') + parser.add_argument('-f', + '--format', + required=True, + choices=('xml1', 'binary1'), + help='format of the plist file to generate') + parser.add_argument( + '-x', + '--xcode-version', + help='version of Xcode, ignored (can be used to force rebuild)') + + @staticmethod + def _Execute(args): + substitutions = {} + for substitution in args.substitution: + key, value = substitution.split('=', 1) + substitutions[key] = value + data = Interpolate(LoadPList(args.template), substitutions) + SavePList(args.output, args.format, data) + + +def Main(): + parser = argparse.ArgumentParser(description='manipulate plist files') + subparsers = parser.add_subparsers() + + for action in [MergeAction, SubstituteAction]: + action.Register(subparsers) + + args = parser.parse_args() + args.func(args) + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/apple/tweak_info_plist.gni b/apple/tweak_info_plist.gni new file mode 100644 index 000000000000..347c5d58d840 --- /dev/null +++ b/apple/tweak_info_plist.gni @@ -0,0 +1,86 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/util/lastchange.gni") + +# Template to run the tweak_info_plist.py script on a plist. +# +# Arguments: +# +# info_plist: +# (optional), string, the plist to tweak. +# +# info_plists: +# (optional), list of string, the plist files to merge and tweak. +# +# args: +# (optional), list of string, the arguments to pass to the +# tweak_info_plist.py script. +# +# Callers should use get_target_outputs() to get the output name. One of +# info_plist or info_plists must be specified. +template("tweak_info_plist") { + _output_name = "$target_gen_dir/${target_name}_tweaked.plist" + + if (defined(invoker.info_plists)) { + assert(!defined(invoker.info_plist), + "Cannot have both info_plist and info_plists for $target_name") + + _source_name = "$target_gen_dir/${target_name}_merged.plist" + _deps = [ ":" + target_name + "_merge_plist" ] + + action(target_name + "_merge_plist") { + forward_variables_from(invoker, + [ + "testonly", + "deps", + ]) + script = "//build/apple/plist_util.py" + sources = invoker.info_plists + outputs = [ _source_name ] + args = [ + "merge", + "-f=xml1", + "-o=" + rebase_path(_source_name, root_build_dir), + ] + rebase_path(invoker.info_plists, root_build_dir) + } + } else { + assert(defined(invoker.info_plist), + "The info_plist must be specified in $target_name") + + _source_name = invoker.info_plist + _deps = [] + if (defined(invoker.deps)) { + _deps += invoker.deps + } + } + + action(target_name) { + forward_variables_from(invoker, + [ + "args", + "testonly", + ]) + script = "//build/apple/tweak_info_plist.py" + inputs = [ + script, + "//build/util/version.py", + lastchange_file, + "//chrome/VERSION", + ] + sources = [ _source_name ] + outputs = [ _output_name ] + if (!defined(args)) { + args = [] + } + args += [ + "--plist", + rebase_path(_source_name, root_build_dir), + "--output", + rebase_path(_output_name, root_build_dir), + "--platform=$current_os", + ] + deps = _deps + } +} diff --git a/apple/tweak_info_plist.py b/apple/tweak_info_plist.py new file mode 100755 index 000000000000..8aa28b002108 --- /dev/null +++ b/apple/tweak_info_plist.py @@ -0,0 +1,450 @@ +#!/usr/bin/env python3 + +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# +# Xcode supports build variable substitutions and CPP; sadly, that doesn't work +# because: +# +# 1. Xcode wants to do the Info.plist work before it runs any build phases, +# this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER +# we'd have to put it in another target so it runs in time. +# 2. Xcode also doesn't check to see if the header being used as a prefix for +# the Info.plist has changed. So even if we updated it, it's only looking +# at the modtime of the info.plist to see if that's changed. +# +# So, we work around all of this by making a script build phase that will run +# during the app build, and simply update the info.plist in place. This way +# by the time the app target is done, the info.plist is correct. +# + + +import optparse +import os +import plistlib +import re +import subprocess +import sys +import tempfile + +TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) + + +def _ConvertPlist(source_plist, output_plist, fmt): + """Convert |source_plist| to |fmt| and save as |output_plist|.""" + assert sys.version_info.major == 2, "Use plistlib directly in Python 3" + return subprocess.call( + ['plutil', '-convert', fmt, '-o', output_plist, source_plist]) + + +def _GetOutput(args): + """Runs a subprocess and waits for termination. Returns (stdout, returncode) + of the process. stderr is attached to the parent.""" + proc = subprocess.Popen(args, stdout=subprocess.PIPE) + stdout, _ = proc.communicate() + return stdout.decode('UTF-8'), proc.returncode + + +def _RemoveKeys(plist, *keys): + """Removes a varargs of keys from the plist.""" + for key in keys: + try: + del plist[key] + except KeyError: + pass + + +def _ApplyVersionOverrides(version, keys, overrides, separator='.'): + """Applies version overrides. + + Given a |version| string as "a.b.c.d" (assuming a default separator) with + version components named by |keys| then overrides any value that is present + in |overrides|. + + >>> _ApplyVersionOverrides('a.b', ['major', 'minor'], {'minor': 'd'}) + 'a.d' + """ + if not overrides: + return version + version_values = version.split(separator) + for i, (key, value) in enumerate(zip(keys, version_values)): + if key in overrides: + version_values[i] = overrides[key] + return separator.join(version_values) + + +def _GetVersion(version_format, values, overrides=None): + """Generates a version number according to |version_format| using the values + from |values| or |overrides| if given.""" + result = version_format + for key in values: + if overrides and key in overrides: + value = overrides[key] + else: + value = values[key] + result = result.replace('@%s@' % key, value) + return result + + +def _AddVersionKeys(plist, version_format_for_key, version=None, + overrides=None): + """Adds the product version number into the plist. Returns True on success and + False on error. The error will be printed to stderr.""" + if not version: + # Pull in the Chrome version number. + VERSION_TOOL = os.path.join(TOP, 'build/util/version.py') + VERSION_FILE = os.path.join(TOP, 'chrome/VERSION') + (stdout, retval) = _GetOutput([ + VERSION_TOOL, '-f', VERSION_FILE, '-t', + '@MAJOR@.@MINOR@.@BUILD@.@PATCH@' + ]) + + # If the command finished with a non-zero return code, then report the + # error up. + if retval != 0: + return False + + version = stdout.strip() + + # Parse the given version number, that should be in MAJOR.MINOR.BUILD.PATCH + # format (where each value is a number). Note that str.isdigit() returns + # True if the string is composed only of digits (and thus match \d+ regexp). + groups = version.split('.') + if len(groups) != 4 or not all(element.isdigit() for element in groups): + print('Invalid version string specified: "%s"' % version, file=sys.stderr) + return False + values = dict(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups)) + + for key in version_format_for_key: + plist[key] = _GetVersion(version_format_for_key[key], values, overrides) + + # Return with no error. + return True + + +def _DoSCMKeys(plist, add_keys): + """Adds the SCM information, visible in about:version, to property list. If + |add_keys| is True, it will insert the keys, otherwise it will remove them.""" + scm_revision = None + if add_keys: + # Pull in the Chrome revision number. + VERSION_TOOL = os.path.join(TOP, 'build/util/version.py') + LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE') + (stdout, retval) = _GetOutput( + [VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t', '@LASTCHANGE@']) + if retval: + return False + scm_revision = stdout.rstrip() + + # See if the operation failed. + _RemoveKeys(plist, 'SCMRevision') + if scm_revision != None: + plist['SCMRevision'] = scm_revision + elif add_keys: + print('Could not determine SCM revision. This may be OK.', file=sys.stderr) + + return True + + +def _AddBreakpadKeys(plist, branding, platform, staging): + """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and + also requires the |branding| argument.""" + plist['BreakpadReportInterval'] = '3600' # Deliberately a string. + plist['BreakpadProduct'] = '%s_%s' % (branding, platform) + plist['BreakpadProductDisplay'] = branding + if staging: + plist['BreakpadURL'] = 'https://clients2.google.com/cr/staging_report' + else: + plist['BreakpadURL'] = 'https://clients2.google.com/cr/report' + + # These are both deliberately strings and not boolean. + plist['BreakpadSendAndExit'] = 'YES' + plist['BreakpadSkipConfirm'] = 'YES' + + +def _RemoveBreakpadKeys(plist): + """Removes any set Breakpad keys.""" + _RemoveKeys(plist, 'BreakpadURL', 'BreakpadReportInterval', 'BreakpadProduct', + 'BreakpadProductDisplay', 'BreakpadVersion', + 'BreakpadSendAndExit', 'BreakpadSkipConfirm') + + +def _TagSuffixes(): + # Keep this list sorted in the order that tag suffix components are to + # appear in a tag value. That is to say, it should be sorted per ASCII. + components = ('full', ) + assert tuple(sorted(components)) == components + + components_len = len(components) + combinations = 1 << components_len + tag_suffixes = [] + for combination in range(0, combinations): + tag_suffix = '' + for component_index in range(0, components_len): + if combination & (1 << component_index): + tag_suffix += '-' + components[component_index] + tag_suffixes.append(tag_suffix) + return tag_suffixes + + +def _AddKeystoneKeys(plist, bundle_identifier, base_tag): + """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and + also requires the |bundle_identifier| argument (com.example.product).""" + plist['KSVersion'] = plist['CFBundleShortVersionString'] + plist['KSProductID'] = bundle_identifier + plist['KSUpdateURL'] = 'https://tools.google.com/service/update2' + + _RemoveKeys(plist, 'KSChannelID') + if base_tag != '': + plist['KSChannelID'] = base_tag + for tag_suffix in _TagSuffixes(): + if tag_suffix: + plist['KSChannelID' + tag_suffix] = base_tag + tag_suffix + + +def _RemoveKeystoneKeys(plist): + """Removes any set Keystone keys.""" + _RemoveKeys(plist, 'KSVersion', 'KSProductID', 'KSUpdateURL') + + tag_keys = ['KSChannelID'] + for tag_suffix in _TagSuffixes(): + tag_keys.append('KSChannelID' + tag_suffix) + _RemoveKeys(plist, *tag_keys) + + +def _AddGTMKeys(plist, platform): + """Adds the GTM metadata keys. This must be called AFTER _AddVersionKeys().""" + plist['GTMUserAgentID'] = plist['CFBundleName'] + if platform == 'ios': + plist['GTMUserAgentVersion'] = plist['CFBundleVersion'] + else: + plist['GTMUserAgentVersion'] = plist['CFBundleShortVersionString'] + + +def _RemoveGTMKeys(plist): + """Removes any set GTM metadata keys.""" + _RemoveKeys(plist, 'GTMUserAgentID', 'GTMUserAgentVersion') + + +def _AddPrivilegedHelperId(plist, privileged_helper_id): + plist['SMPrivilegedExecutables'] = { + privileged_helper_id: 'identifier ' + privileged_helper_id + } + + +def _RemovePrivilegedHelperId(plist): + _RemoveKeys(plist, 'SMPrivilegedExecutables') + + +def Main(argv): + parser = optparse.OptionParser('%prog [options]') + parser.add_option('--plist', + dest='plist_path', + action='store', + type='string', + default=None, + help='The path of the plist to tweak.') + parser.add_option('--output', dest='plist_output', action='store', + type='string', default=None, help='If specified, the path to output ' + \ + 'the tweaked plist, rather than overwriting the input.') + parser.add_option('--breakpad', + dest='use_breakpad', + action='store', + type='int', + default=False, + help='Enable Breakpad [1 or 0]') + parser.add_option( + '--breakpad_staging', + dest='use_breakpad_staging', + action='store_true', + default=False, + help='Use staging breakpad to upload reports. Ignored if --breakpad=0.') + parser.add_option('--keystone', + dest='use_keystone', + action='store', + type='int', + default=False, + help='Enable Keystone [1 or 0]') + parser.add_option('--keystone-base-tag', + default='', + help='Base Keystone tag to set') + parser.add_option('--scm', + dest='add_scm_info', + action='store', + type='int', + default=True, + help='Add SCM metadata [1 or 0]') + parser.add_option('--branding', + dest='branding', + action='store', + type='string', + default=None, + help='The branding of the binary') + parser.add_option('--bundle_id', + dest='bundle_identifier', + action='store', + type='string', + default=None, + help='The bundle id of the binary') + parser.add_option('--platform', + choices=('ios', 'mac'), + default='mac', + help='The target platform of the bundle') + parser.add_option('--add-gtm-metadata', + dest='add_gtm_info', + action='store', + type='int', + default=False, + help='Add GTM metadata [1 or 0]') + parser.add_option( + '--version-overrides', + action='append', + help='Key-value pair to override specific component of version ' + 'like key=value (can be passed multiple time to configure ' + 'more than one override)') + parser.add_option('--format', + choices=('binary1', 'xml1'), + default='xml1', + help='Format to use when writing property list ' + '(default: %(default)s)') + parser.add_option('--version', + dest='version', + action='store', + type='string', + default=None, + help='The version string [major.minor.build.patch]') + parser.add_option('--privileged_helper_id', + dest='privileged_helper_id', + action='store', + type='string', + default=None, + help='The id of the privileged helper executable.') + (options, args) = parser.parse_args(argv) + + if len(args) > 0: + print(parser.get_usage(), file=sys.stderr) + return 1 + + if not options.plist_path: + print('No --plist specified.', file=sys.stderr) + return 1 + + # Read the plist into its parsed format. Convert the file to 'xml1' as + # plistlib only supports that format in Python 2.7. + with tempfile.NamedTemporaryFile() as temp_info_plist: + if sys.version_info.major == 2: + retcode = _ConvertPlist(options.plist_path, temp_info_plist.name, 'xml1') + if retcode != 0: + return retcode + plist = plistlib.readPlist(temp_info_plist.name) + else: + with open(options.plist_path, 'rb') as f: + plist = plistlib.load(f) + + # Convert overrides. + overrides = {} + if options.version_overrides: + for pair in options.version_overrides: + if not '=' in pair: + print('Invalid value for --version-overrides:', pair, file=sys.stderr) + return 1 + key, value = pair.split('=', 1) + overrides[key] = value + if key not in ('MAJOR', 'MINOR', 'BUILD', 'PATCH'): + print('Unsupported key for --version-overrides:', key, file=sys.stderr) + return 1 + + if options.platform == 'mac': + version_format_for_key = { + # Add public version info so "Get Info" works. + 'CFBundleShortVersionString': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@', + + # Honor the 429496.72.95 limit. The maximum comes from splitting + # 2^32 - 1 into 6, 2, 2 digits. The limitation was present in Tiger, + # but it could have been fixed in later OS release, but hasn't been + # tested (it's easy enough to find out with "lsregister -dump). + # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html + # BUILD will always be an increasing value, so BUILD_PATH gives us + # something unique that meetings what LS wants. + 'CFBundleVersion': '@BUILD@.@PATCH@', + } + else: + version_format_for_key = { + 'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@', + 'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@' + } + + if options.use_breakpad: + version_format_for_key['BreakpadVersion'] = \ + '@MAJOR@.@MINOR@.@BUILD@.@PATCH@' + + # Insert the product version. + if not _AddVersionKeys(plist, + version_format_for_key, + version=options.version, + overrides=overrides): + return 2 + + # Add Breakpad if configured to do so. + if options.use_breakpad: + if options.branding is None: + print('Use of Breakpad requires branding.', file=sys.stderr) + return 1 + # Map "target_os" passed from gn via the --platform parameter + # to the platform as known by breakpad. + platform = {'mac': 'Mac', 'ios': 'iOS'}[options.platform] + _AddBreakpadKeys(plist, options.branding, platform, + options.use_breakpad_staging) + else: + _RemoveBreakpadKeys(plist) + + # Add Keystone if configured to do so. + if options.use_keystone: + if options.bundle_identifier is None: + print('Use of Keystone requires the bundle id.', file=sys.stderr) + return 1 + _AddKeystoneKeys(plist, options.bundle_identifier, + options.keystone_base_tag) + else: + _RemoveKeystoneKeys(plist) + + # Adds or removes any SCM keys. + if not _DoSCMKeys(plist, options.add_scm_info): + return 3 + + # Add GTM metadata keys. + if options.add_gtm_info: + _AddGTMKeys(plist, options.platform) + else: + _RemoveGTMKeys(plist) + + # Add SMPrivilegedExecutables keys. + if options.privileged_helper_id: + _AddPrivilegedHelperId(plist, options.privileged_helper_id) + else: + _RemovePrivilegedHelperId(plist) + + output_path = options.plist_path + if options.plist_output is not None: + output_path = options.plist_output + + # Now that all keys have been mutated, rewrite the file. + # Convert Info.plist to the format requested by the --format flag. Any + # format would work on Mac but iOS requires specific format. + if sys.version_info.major == 2: + with tempfile.NamedTemporaryFile() as temp_info_plist: + plistlib.writePlist(plist, temp_info_plist.name) + return _ConvertPlist(temp_info_plist.name, output_path, options.format) + with open(output_path, 'wb') as f: + plist_format = {'binary1': plistlib.FMT_BINARY, 'xml1': plistlib.FMT_XML} + plistlib.dump(plist, f, fmt=plist_format[options.format]) + + +if __name__ == '__main__': + # TODO(https://crbug.com/941669): Temporary workaround until all scripts use + # python3 by default. + if sys.version_info[0] < 3: + os.execvp('python3', ['python3'] + sys.argv) + sys.exit(Main(sys.argv[1:])) diff --git a/apple/write_pkg_info.py b/apple/write_pkg_info.py new file mode 100644 index 000000000000..2f59c2f732fa --- /dev/null +++ b/apple/write_pkg_info.py @@ -0,0 +1,52 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import plist_util +import sys + +# This script creates a PkgInfo file for an OS X .app bundle's plist. +# Usage: python write_pkg_info.py --plist Foo.app/Contents/Info.plist \ +# --output Foo.app/Contents/PkgInfo + + +def Main(): + parser = argparse.ArgumentParser( + description='A script to write PkgInfo files for .app bundles.') + parser.add_argument('--plist', + required=True, + help='Path to the Info.plist for the .app.') + parser.add_argument('--output', + required=True, + help='Path to the desired output file.') + args = parser.parse_args() + + # Remove the output if it exists already. + try: + os.unlink(args.output) + except FileNotFoundError: + pass + + plist = plist_util.LoadPList(args.plist) + package_type = plist['CFBundlePackageType'] + if package_type != 'APPL': + raise ValueError('Expected CFBundlePackageType to be %s, got %s' % \ + ('APPL', package_type)) + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist.get('CFBundleSignature', '????') + if len(signature_code) != 4: + raise ValueError('CFBundleSignature should be exactly four characters, ' + + 'got %s' % signature_code) + + with open(args.output, 'w') as fp: + fp.write('%s%s' % (package_type, signature_code)) + return 0 + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/apple/xcrun.py b/apple/xcrun.py new file mode 100755 index 000000000000..011dd477fabd --- /dev/null +++ b/apple/xcrun.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +""" +Wrapper around xcrun adding support for --developer-dir parameter to set +the DEVELOPER_DIR environment variable, and for converting paths relative +to absolute (since this is required by most of the tool run via xcrun). +""" + +import argparse +import os +import subprocess +import sys + + +def xcrun(command, developer_dir): + environ = dict(os.environ) + if developer_dir: + environ['DEVELOPER_DIR'] = os.path.abspath(developer_dir) + + processed_args = ['/usr/bin/xcrun'] + for arg in command: + if os.path.exists(arg): + arg = os.path.abspath(arg) + processed_args.append(arg) + + process = subprocess.Popen(processed_args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + env=environ) + + stdout, stderr = process.communicate() + sys.stdout.write(stdout) + if process.returncode: + sys.stderr.write(stderr) + sys.exit(process.returncode) + + +def main(args): + parser = argparse.ArgumentParser(add_help=False) + parser.add_argument( + '--developer-dir', + help='path to developer dir to use for the invocation of xcrun') + + parsed, remaining_args = parser.parse_known_args(args) + xcrun(remaining_args, parsed.developer_dir) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/args/OWNERS b/args/OWNERS new file mode 100644 index 000000000000..d218b6bdbd0d --- /dev/null +++ b/args/OWNERS @@ -0,0 +1 @@ +per-file headless.gn=file://headless/OWNERS diff --git a/args/README.txt b/args/README.txt new file mode 100644 index 000000000000..b82fb04ab28a --- /dev/null +++ b/args/README.txt @@ -0,0 +1,27 @@ +This directory is here to hold .gni files that contain sets of GN build +arguments for given configurations. + +Some projects or bots may have build configurations with specific combinations +of flags. Rather than making a new global flag for your specific project and +adding it all over the build to each arg it should affect, you can add a .gni +file here with the variables. + +For example, for project foo you may put in build/args/foo.gni: + + target_os = "android" + use_pulseaudio = false + use_ozone = true + system_libdir = "foo" + +Users wanting to build this configuration would run: + + $ gn args out/mybuild + +And add the following line to their args for that build directory: + + import("//build/args/foo.gni") + # You can set any other args here like normal. + is_component_build = false + +This way everybody can agree on a set of flags for a project, and their builds +stay in sync as the flags in foo.gni are modified. diff --git a/args/chromeos/README.md b/args/chromeos/README.md new file mode 100644 index 000000000000..284225279d09 --- /dev/null +++ b/args/chromeos/README.md @@ -0,0 +1,57 @@ +This directory is used to store GN arg mapping for Chrome OS boards. The values +of the args are determined by processing the [chromeos-chrome ebuild] for a +given board and a given ChromeOS version (stored in the [CHROMEOS_LKGM] file). + +Files in this directory are populated by running `gclient sync` with specific +arguments set in the .gclient file. Specifically: +* The file must have a top-level variable set: `target_os = ["chromeos"]` +* The `"custom_vars"` parameter of the chromium/src.git solution must include + the parameter: `"cros_boards": "{BOARD_NAMES}"` where `{BOARD_NAMES}` is a + colon-separated list of boards you'd like to checkout. +* If you'd like to a checkout a QEMU-bootable image for a given board, include + it in the `cros_boards_with_qemu_images` var rather than the `cros_boards` + var. + +A typical .gclient file is a sibling of the src/ directory, and might look like +this: +``` +solutions = [ + { + "url": "https://chromium.googlesource.com/chromium/src.git", + "managed": False, + "name": "src", + "custom_deps": {}, + "custom_vars" : { + "checkout_src_internal": True, + "cros_boards": "eve:kevin", + # If a QEMU-bootable image is desired for any board, move it from + # the previous var to the following: + "cros_boards_with_qemu_images": "amd64-generic", + }, + }, +] +target_os = ["chromeos"] +``` + +To use these files in a build, simply add the following line to your GN args: +``` +import("//build/args/chromeos/${some_board}.gni") +``` + +That will produce a Chrome OS build of Chrome very similar to what is shipped +for that device. You can also supply additional args or even overwrite ones +supplied in the .gni file after the `import()` line. For example, the following +args will produce a debug build of Chrome for board=eve using goma: +``` +import("//build/args/chromeos/eve.gni") + +is_debug = true +use_goma = true +goma_dir = "/path/to/goma/" +``` + +TODO(bpastene): Make 'cros_boards' a first class citizen in gclient and replace +it with 'target_boards' instead. + +[chromeos-chrome ebuild]: https://chromium.googlesource.com/chromiumos/overlays/chromiumos-overlay/+/HEAD/chromeos-base/chromeos-chrome/chromeos-chrome-9999.ebuild +[CHROMEOS_LKGM]: https://chromium.googlesource.com/chromium/src/+/HEAD/chromeos/CHROMEOS_LKGM diff --git a/args/headless.gn b/args/headless.gn new file mode 100644 index 000000000000..8834eb1bb4ff --- /dev/null +++ b/args/headless.gn @@ -0,0 +1,56 @@ +# GN args template for the Headless Chrome library +# +# Add import to arg.gn in out directory and run gn gen on the directory to use. +# E.g. for out directory out/foo: +# echo 'import("//build/args/headless.gn")' > out/foo/args.gn +# gn gen out/foo +# +# Use gn args to add your own build preference args. + +use_ozone = true +ozone_auto_platforms = false +ozone_platform = "headless" +ozone_platform_headless = true +angle_enable_vulkan = true +angle_enable_swiftshader = true + +# Embed resource.pak into binary to simplify deployment. +headless_use_embedded_resources = true + +# Disable headless commands support. +headless_enable_commands = false + +# Don't use Prefs component, disabling access to Local State prefs. +headless_use_prefs = false + +# Don't use Policy component, disabling all policies. +headless_use_policy = false + +# Remove a dependency on a system fontconfig library. +use_bundled_fontconfig = true + +# In order to simplify deployment we build ICU data file +# into binary. +icu_use_data_file = false + +# Use embedded data instead external files for headless in order +# to simplify deployment. +v8_use_external_startup_data = false + +enable_nacl = false +enable_print_preview = false +enable_remoting = false +use_alsa = false +use_bluez = false +use_cups = false +use_dbus = false +use_gio = false +use_kerberos = false +use_libpci = false +use_pulseaudio = false +use_udev = false +rtc_use_pipewire = false +v8_enable_lazy_source_positions = false +use_glib = false +use_gtk = false +use_pangocairo = false diff --git a/build-ctags.sh b/build-ctags.sh new file mode 100755 index 000000000000..d7756a2ba663 --- /dev/null +++ b/build-ctags.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then + cat < /dev/null || fail $1 + mv -f .tmp_tags tags +} + +# We always build the top level but leave all submodules as optional. +build_dir --extra-excludes "" "top level" + +# Build any other directies that are listed on the command line. +for dir in $@; do + build_dir "$1" + shift +done diff --git a/build_config.h b/build_config.h new file mode 100644 index 000000000000..6db5d9bca059 --- /dev/null +++ b/build_config.h @@ -0,0 +1,385 @@ +// Copyright 2012 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file doesn't belong to any GN target by design for faster build and +// less developer overhead. + +// This file adds build flags about the OS we're currently building on. They are +// defined directly in this file instead of via a `buildflag_header` target in a +// GN file for faster build. They are defined using the corresponding OS defines +// (e.g. OS_WIN) which are also defined in this file (except for OS_CHROMEOS, +// which is set by the build system). These defines are deprecated and should +// NOT be used directly. For example: +// Please Use: #if BUILDFLAG(IS_WIN) +// Deprecated: #if defined(OS_WIN) +// +// Operating System: +// IS_AIX / IS_ANDROID / IS_ASMJS / IS_CHROMEOS / IS_FREEBSD / IS_FUCHSIA / +// IS_IOS / IS_IOS_MACCATALYST / IS_LINUX / IS_MAC / IS_NACL / IS_NETBSD / +// IS_OPENBSD / IS_QNX / IS_SOLARIS / IS_WIN +// Operating System family: +// IS_APPLE: IOS or MAC or IOS_MACCATALYST +// IS_BSD: FREEBSD or NETBSD or OPENBSD +// IS_POSIX: AIX or ANDROID or ASMJS or CHROMEOS or FREEBSD or IOS or LINUX +// or MAC or NACL or NETBSD or OPENBSD or QNX or SOLARIS + +// This file also adds defines specific to the platform, architecture etc. +// +// Platform: +// IS_OZONE +// +// Compiler: +// COMPILER_MSVC / COMPILER_GCC +// +// Processor: +// ARCH_CPU_ARM64 / ARCH_CPU_ARMEL / ARCH_CPU_LOONG32 / ARCH_CPU_LOONG64 / +// ARCH_CPU_MIPS / ARCH_CPU_MIPS64 / ARCH_CPU_MIPS64EL / ARCH_CPU_MIPSEL / +// ARCH_CPU_PPC64 / ARCH_CPU_S390 / ARCH_CPU_S390X / ARCH_CPU_X86 / +// ARCH_CPU_X86_64 / ARCH_CPU_RISCV64 +// Processor family: +// ARCH_CPU_ARM_FAMILY: ARMEL or ARM64 +// ARCH_CPU_LOONG_FAMILY: LOONG32 or LOONG64 +// ARCH_CPU_MIPS_FAMILY: MIPS64EL or MIPSEL or MIPS64 or MIPS +// ARCH_CPU_PPC64_FAMILY: PPC64 +// ARCH_CPU_S390_FAMILY: S390 or S390X +// ARCH_CPU_X86_FAMILY: X86 or X86_64 +// ARCH_CPU_RISCV_FAMILY: Riscv64 +// Processor features: +// ARCH_CPU_31_BITS / ARCH_CPU_32_BITS / ARCH_CPU_64_BITS +// ARCH_CPU_BIG_ENDIAN / ARCH_CPU_LITTLE_ENDIAN + +#ifndef BUILD_BUILD_CONFIG_H_ +#define BUILD_BUILD_CONFIG_H_ + +#include "build/buildflag.h" // IWYU pragma: export + +// A set of macros to use for platform detection. +#if defined(__native_client__) +// __native_client__ must be first, so that other OS_ defines are not set. +#define OS_NACL 1 +#elif defined(ANDROID) +#define OS_ANDROID 1 +#elif defined(__APPLE__) +// Only include TargetConditionals after testing ANDROID as some Android builds +// on the Mac have this header available and it's not needed unless the target +// is really an Apple platform. +#include +#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE +#define OS_IOS 1 +// Catalyst is the technology that allows running iOS apps on macOS. These +// builds are both OS_IOS and OS_IOS_MACCATALYST. +#if defined(TARGET_OS_MACCATALYST) && TARGET_OS_MACCATALYST +#define OS_IOS_MACCATALYST +#endif // defined(TARGET_OS_MACCATALYST) && TARGET_OS_MACCATALYST +#else +#define OS_MAC 1 +#endif // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE +#elif defined(__linux__) +#if !defined(OS_CHROMEOS) +// Do not define OS_LINUX on Chrome OS build. +// The OS_CHROMEOS macro is defined in GN. +#define OS_LINUX 1 +#endif // !defined(OS_CHROMEOS) +// Include a system header to pull in features.h for glibc/uclibc macros. +#include +#if defined(__GLIBC__) && !defined(__UCLIBC__) +// We really are using glibc, not uClibc pretending to be glibc. +#define LIBC_GLIBC 1 +#endif +#elif defined(_WIN32) +#define OS_WIN 1 +#elif defined(__Fuchsia__) +#define OS_FUCHSIA 1 +#elif defined(__FreeBSD__) +#define OS_FREEBSD 1 +#elif defined(__NetBSD__) +#define OS_NETBSD 1 +#elif defined(__OpenBSD__) +#define OS_OPENBSD 1 +#elif defined(__sun) +#define OS_SOLARIS 1 +#elif defined(__QNXNTO__) +#define OS_QNX 1 +#elif defined(_AIX) +#define OS_AIX 1 +#elif defined(__asmjs__) || defined(__wasm__) +#define OS_ASMJS 1 +#elif defined(__MVS__) +#define OS_ZOS 1 +#else +#error Please add support for your platform in build/build_config.h +#endif +// NOTE: Adding a new port? Please follow +// https://chromium.googlesource.com/chromium/src/+/main/docs/new_port_policy.md + +#if defined(OS_MAC) || defined(OS_IOS) +#define OS_APPLE 1 +#endif + +// For access to standard BSD features, use OS_BSD instead of a +// more specific macro. +#if defined(OS_FREEBSD) || defined(OS_NETBSD) || defined(OS_OPENBSD) +#define OS_BSD 1 +#endif + +// For access to standard POSIXish features, use OS_POSIX instead of a +// more specific macro. +#if defined(OS_AIX) || defined(OS_ANDROID) || defined(OS_ASMJS) || \ + defined(OS_FREEBSD) || defined(OS_IOS) || defined(OS_LINUX) || \ + defined(OS_CHROMEOS) || defined(OS_MAC) || defined(OS_NACL) || \ + defined(OS_NETBSD) || defined(OS_OPENBSD) || defined(OS_QNX) || \ + defined(OS_SOLARIS) || defined(OS_ZOS) +#define OS_POSIX 1 +#endif + +// OS build flags +#if defined(OS_AIX) +#define BUILDFLAG_INTERNAL_IS_AIX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_AIX() (0) +#endif + +#if defined(OS_ANDROID) +#define BUILDFLAG_INTERNAL_IS_ANDROID() (1) +#else +#define BUILDFLAG_INTERNAL_IS_ANDROID() (0) +#endif + +#if defined(OS_APPLE) +#define BUILDFLAG_INTERNAL_IS_APPLE() (1) +#else +#define BUILDFLAG_INTERNAL_IS_APPLE() (0) +#endif + +#if defined(OS_ASMJS) +#define BUILDFLAG_INTERNAL_IS_ASMJS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_ASMJS() (0) +#endif + +#if defined(OS_BSD) +#define BUILDFLAG_INTERNAL_IS_BSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_BSD() (0) +#endif + +#if defined(OS_CHROMEOS) +#define BUILDFLAG_INTERNAL_IS_CHROMEOS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_CHROMEOS() (0) +#endif + +#if defined(OS_FREEBSD) +#define BUILDFLAG_INTERNAL_IS_FREEBSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_FREEBSD() (0) +#endif + +#if defined(OS_FUCHSIA) +#define BUILDFLAG_INTERNAL_IS_FUCHSIA() (1) +#else +#define BUILDFLAG_INTERNAL_IS_FUCHSIA() (0) +#endif + +#if defined(OS_IOS) +#define BUILDFLAG_INTERNAL_IS_IOS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_IOS() (0) +#endif + +#if defined(OS_IOS_MACCATALYST) +#define BUILDFLAG_INTERNAL_IS_IOS_MACCATALYST() (1) +#else +#define BUILDFLAG_INTERNAL_IS_IOS_MACCATALYST() (0) +#endif + +#if defined(OS_LINUX) +#define BUILDFLAG_INTERNAL_IS_LINUX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_LINUX() (0) +#endif + +#if defined(OS_MAC) +#define BUILDFLAG_INTERNAL_IS_MAC() (1) +#else +#define BUILDFLAG_INTERNAL_IS_MAC() (0) +#endif + +#if defined(OS_NACL) +#define BUILDFLAG_INTERNAL_IS_NACL() (1) +#else +#define BUILDFLAG_INTERNAL_IS_NACL() (0) +#endif + +#if defined(OS_NETBSD) +#define BUILDFLAG_INTERNAL_IS_NETBSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_NETBSD() (0) +#endif + +#if defined(OS_OPENBSD) +#define BUILDFLAG_INTERNAL_IS_OPENBSD() (1) +#else +#define BUILDFLAG_INTERNAL_IS_OPENBSD() (0) +#endif + +#if defined(OS_POSIX) +#define BUILDFLAG_INTERNAL_IS_POSIX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_POSIX() (0) +#endif + +#if defined(OS_QNX) +#define BUILDFLAG_INTERNAL_IS_QNX() (1) +#else +#define BUILDFLAG_INTERNAL_IS_QNX() (0) +#endif + +#if defined(OS_SOLARIS) +#define BUILDFLAG_INTERNAL_IS_SOLARIS() (1) +#else +#define BUILDFLAG_INTERNAL_IS_SOLARIS() (0) +#endif + +#if defined(OS_WIN) +#define BUILDFLAG_INTERNAL_IS_WIN() (1) +#else +#define BUILDFLAG_INTERNAL_IS_WIN() (0) +#endif + +#if defined(USE_OZONE) +#define BUILDFLAG_INTERNAL_IS_OZONE() (1) +#else +#define BUILDFLAG_INTERNAL_IS_OZONE() (0) +#endif + +// Compiler detection. Note: clang masquerades as GCC on POSIX and as MSVC on +// Windows. +#if defined(__GNUC__) +#define COMPILER_GCC 1 +#elif defined(_MSC_VER) +#define COMPILER_MSVC 1 +#else +#error Please add support for your compiler in build/build_config.h +#endif + +// Processor architecture detection. For more info on what's defined, see: +// http://msdn.microsoft.com/en-us/library/b0084kay.aspx +// http://www.agner.org/optimize/calling_conventions.pdf +// or with gcc, run: "echo | gcc -E -dM -" +#if defined(_M_X64) || defined(__x86_64__) +#define ARCH_CPU_X86_FAMILY 1 +#define ARCH_CPU_X86_64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(_M_IX86) || defined(__i386__) +#define ARCH_CPU_X86_FAMILY 1 +#define ARCH_CPU_X86 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__s390x__) +#define ARCH_CPU_S390_FAMILY 1 +#define ARCH_CPU_S390X 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_BIG_ENDIAN 1 +#elif defined(__s390__) +#define ARCH_CPU_S390_FAMILY 1 +#define ARCH_CPU_S390 1 +#define ARCH_CPU_31_BITS 1 +#define ARCH_CPU_BIG_ENDIAN 1 +#elif (defined(__PPC64__) || defined(__PPC__)) && defined(__BIG_ENDIAN__) +#define ARCH_CPU_PPC64_FAMILY 1 +#define ARCH_CPU_PPC64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_BIG_ENDIAN 1 +#elif defined(__PPC64__) +#define ARCH_CPU_PPC64_FAMILY 1 +#define ARCH_CPU_PPC64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__ARMEL__) +#define ARCH_CPU_ARM_FAMILY 1 +#define ARCH_CPU_ARMEL 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__aarch64__) || defined(_M_ARM64) +#define ARCH_CPU_ARM_FAMILY 1 +#define ARCH_CPU_ARM64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__pnacl__) || defined(__asmjs__) || defined(__wasm__) +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__MIPSEL__) +#if defined(__LP64__) +#define ARCH_CPU_MIPS_FAMILY 1 +#define ARCH_CPU_MIPS64EL 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#else +#define ARCH_CPU_MIPS_FAMILY 1 +#define ARCH_CPU_MIPSEL 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#endif +#elif defined(__MIPSEB__) +#if defined(__LP64__) +#define ARCH_CPU_MIPS_FAMILY 1 +#define ARCH_CPU_MIPS64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_BIG_ENDIAN 1 +#else +#define ARCH_CPU_MIPS_FAMILY 1 +#define ARCH_CPU_MIPS 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_BIG_ENDIAN 1 +#endif +#elif defined(__loongarch32) +#define ARCH_CPU_LOONG_FAMILY 1 +#define ARCH_CPU_LOONG32 1 +#define ARCH_CPU_32_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__loongarch64) +#define ARCH_CPU_LOONG_FAMILY 1 +#define ARCH_CPU_LOONG64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#elif defined(__riscv) && (__riscv_xlen == 64) +#define ARCH_CPU_RISCV_FAMILY 1 +#define ARCH_CPU_RISCV64 1 +#define ARCH_CPU_64_BITS 1 +#define ARCH_CPU_LITTLE_ENDIAN 1 +#else +#error Please add support for your architecture in build/build_config.h +#endif + +// Type detection for wchar_t. +#if defined(OS_WIN) +#define WCHAR_T_IS_UTF16 +#elif defined(OS_FUCHSIA) +#define WCHAR_T_IS_UTF32 +#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \ + (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff) +#define WCHAR_T_IS_UTF32 +#elif defined(OS_POSIX) && defined(COMPILER_GCC) && defined(__WCHAR_MAX__) && \ + (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff) +// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to +// compile in this mode (in particular, Chrome doesn't). This is intended for +// other projects using base who manage their own dependencies and make sure +// short wchar works for them. +#define WCHAR_T_IS_UTF16 +#else +#error Please add support for your compiler in build/build_config.h +#endif + +#if defined(OS_ANDROID) +// The compiler thinks std::string::const_iterator and "const char*" are +// equivalent types. +#define STD_STRING_ITERATOR_IS_CHAR_POINTER +// The compiler thinks std::u16string::const_iterator and "char16*" are +// equivalent types. +#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER +#endif + +#endif // BUILD_BUILD_CONFIG_H_ diff --git a/buildflag.h b/buildflag.h new file mode 100644 index 000000000000..634697986cea --- /dev/null +++ b/buildflag.h @@ -0,0 +1,47 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_BUILDFLAG_H_ +#define BUILD_BUILDFLAG_H_ + +// These macros un-mangle the names of the build flags in a way that looks +// natural, and gives errors if the flag is not defined. Normally in the +// preprocessor it's easy to make mistakes that interpret "you haven't done +// the setup to know what the flag is" as "flag is off". Normally you would +// include the generated header rather than include this file directly. +// +// This is for use with generated headers. See build/buildflag_header.gni. + +// This dance of two macros does a concatenation of two preprocessor args using +// ## doubly indirectly because using ## directly prevents macros in that +// parameter from being expanded. +#define BUILDFLAG_CAT_INDIRECT(a, b) a ## b +#define BUILDFLAG_CAT(a, b) BUILDFLAG_CAT_INDIRECT(a, b) + +// Accessor for build flags. +// +// To test for a value, if the build file specifies: +// +// ENABLE_FOO=true +// +// Then you would check at build-time in source code with: +// +// #include "foo_flags.h" // The header the build file specified. +// +// #if BUILDFLAG(ENABLE_FOO) +// ... +// #endif +// +// There will no #define called ENABLE_FOO so if you accidentally test for +// whether that is defined, it will always be negative. You can also use +// the value in expressions: +// +// const char kSpamServerName[] = BUILDFLAG(SPAM_SERVER_NAME); +// +// Because the flag is accessed as a preprocessor macro with (), an error +// will be thrown if the proper header defining the internal flag value has +// not been included. +#define BUILDFLAG(flag) (BUILDFLAG_CAT(BUILDFLAG_INTERNAL_, flag)()) + +#endif // BUILD_BUILDFLAG_H_ diff --git a/buildflag_header.gni b/buildflag_header.gni new file mode 100644 index 000000000000..f7b42f724dd7 --- /dev/null +++ b/buildflag_header.gni @@ -0,0 +1,137 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates a header with preprocessor defines specified by the build file. +# +# The flags are converted to function-style defines with mangled names and +# code uses an accessor macro to access the values. This is to try to +# minimize bugs where code checks whether something is defined or not, and +# the proper header isn't included, meaning the answer will always be silently +# false or might vary across the code base. +# +# In the GN template, specify build flags in the template as a list +# of strings that encode key/value pairs like this: +# +# flags = [ "ENABLE_FOO=1", "ENABLE_BAR=$enable_bar" ] +# +# The GN values "true" and "false" will be mapped to 0 and 1 for boolean +# #if flags to be expressed naturally. This means you can't directly make a +# define that generates C++ value of true or false for use in code. If you +# REALLY need this, you can also use the string "(true)" and "(false)" to +# prevent the rewriting. + +# To check the value of the flag in C code: +# +# #include "path/to/here/header_file.h" +# +# #if BUILDFLAG(ENABLE_FOO) +# ... +# #endif +# +# const char kSpamServerUrl[] = BUILDFLAG(SPAM_SERVER_URL); +# +# There will be no #define called ENABLE_FOO so if you accidentally test for +# that in an ifdef it will always be negative. +# +# +# Template parameters +# +# flags [required, list of strings] +# Flag values as described above. +# +# header [required, string] +# File name for generated header. By default, this will go in the +# generated file directory for this target, and you would include it +# with: +# #include "/

    " +# +# header_dir [optional, string] +# Override the default location of the generated header. The string will +# be treated as a subdirectory of the root_gen_dir. For example: +# header_dir = "foo/bar" +# Then you can include the header as: +# #include "foo/bar/baz.h" +# +# deps, public_deps, testonly, visibility +# Normal meaning. +# +# +# Grit defines +# +# If one .grd file uses a flag, just add to the grit target: +# +# defines = [ +# "enable_doom_melon=$enable_doom_melon", +# ] +# +# If multiple .grd files use it, you'll want to put the defines in a .gni file +# so it can be shared. Generally this .gni file should include all grit defines +# for a given module (for some definition of "module"). Then do: +# +# defines = ui_grit_defines +# +# If you forget to do this, the flag will be implicitly false in the .grd file +# and those resources won't be compiled. You'll know because the resource +# #define won't be generated and any code that uses it won't compile. If you +# see a missing IDS_* string, this is probably the reason. +# +# +# Example +# +# buildflag_header("foo_buildflags") { +# header = "foo_buildflags.h" +# +# flags = [ +# # This uses the GN build flag enable_doom_melon as the definition. +# "ENABLE_DOOM_MELON=$enable_doom_melon", +# +# # This force-enables the flag. +# "ENABLE_SPACE_LASER=true", +# +# # This will expand to the quoted C string when used in source code. +# "SPAM_SERVER_URL=\"http://www.example.com/\"", +# ] +# } +template("buildflag_header") { + action(target_name) { + script = "//build/write_buildflag_header.py" + + if (defined(invoker.header_dir)) { + header_file = "${invoker.header_dir}/${invoker.header}" + } else { + # Compute the path from the root to this file. + header_file = rebase_path(".", "//") + "/${invoker.header}" + } + + outputs = [ "$root_gen_dir/$header_file" ] + + # Always write --flags to the file so it's not empty. Empty will confuse GN + # into thinking the response file isn't used. + response_file_contents = [ "--flags" ] + if (defined(invoker.flags)) { + response_file_contents += invoker.flags + } + + args = [ + "--output", + header_file, # Not rebased, Python script puts it inside gen-dir. + "--rulename", + get_label_info(":$target_name", "label_no_toolchain"), + "--gen-dir", + rebase_path(root_gen_dir, root_build_dir), + "--definitions", + "{{response_file_name}}", + ] + + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "testonly", + "visibility", + ]) + + public_deps = [ "//build:buildflag_header_h" ] + } +} diff --git a/check_gn_headers.py b/check_gn_headers.py new file mode 100755 index 000000000000..6bfb878a0a2e --- /dev/null +++ b/check_gn_headers.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Find header files missing in GN. + +This script gets all the header files from ninja_deps, which is from the true +dependency generated by the compiler, and report if they don't exist in GN. +""" + +import argparse +import json +import os +import re +import shutil +import subprocess +import sys +import tempfile +from multiprocessing import Process, Queue + +SRC_DIR = os.path.abspath( + os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.pardir)) +DEPOT_TOOLS_DIR = os.path.join(SRC_DIR, 'third_party', 'depot_tools') + + +def GetHeadersFromNinja(out_dir, skip_obj, q): + """Return all the header files from ninja_deps""" + + def NinjaSource(): + cmd = [ + os.path.join(SRC_DIR, 'third_party', 'ninja', 'ninja'), '-C', out_dir, + '-t', 'deps' + ] + # A negative bufsize means to use the system default, which usually + # means fully buffered. + popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1) + for line in iter(popen.stdout.readline, ''): + yield line.rstrip() + + popen.stdout.close() + return_code = popen.wait() + if return_code: + raise subprocess.CalledProcessError(return_code, cmd) + + ans, err = set(), None + try: + ans = ParseNinjaDepsOutput(NinjaSource(), out_dir, skip_obj) + except Exception as e: + err = str(e) + q.put((ans, err)) + + +def ParseNinjaDepsOutput(ninja_out, out_dir, skip_obj): + """Parse ninja output and get the header files""" + all_headers = {} + + # Ninja always uses "/", even on Windows. + prefix = '../../' + + is_valid = False + obj_file = '' + for line in ninja_out: + if line.startswith(' '): + if not is_valid: + continue + if line.endswith('.h') or line.endswith('.hh'): + f = line.strip() + if f.startswith(prefix): + f = f[6:] # Remove the '../../' prefix + # build/ only contains build-specific files like build_config.h + # and buildflag.h, and system header files, so they should be + # skipped. + if f.startswith(out_dir) or f.startswith('out'): + continue + if not f.startswith('build'): + all_headers.setdefault(f, []) + if not skip_obj: + all_headers[f].append(obj_file) + else: + is_valid = line.endswith('(VALID)') + obj_file = line.split(':')[0] + + return all_headers + + +def GetHeadersFromGN(out_dir, q): + """Return all the header files from GN""" + + tmp = None + ans, err = set(), None + try: + # Argument |dir| is needed to make sure it's on the same drive on Windows. + # dir='' means dir='.', but doesn't introduce an unneeded prefix. + tmp = tempfile.mkdtemp(dir='') + shutil.copy2(os.path.join(out_dir, 'args.gn'), + os.path.join(tmp, 'args.gn')) + # Do "gn gen" in a temp dir to prevent dirtying |out_dir|. + gn_exe = 'gn.bat' if sys.platform == 'win32' else 'gn' + subprocess.check_call([ + os.path.join(DEPOT_TOOLS_DIR, gn_exe), 'gen', tmp, '--ide=json', '-q']) + gn_json = json.load(open(os.path.join(tmp, 'project.json'))) + ans = ParseGNProjectJSON(gn_json, out_dir, tmp) + except Exception as e: + err = str(e) + finally: + if tmp: + shutil.rmtree(tmp) + q.put((ans, err)) + + +def ParseGNProjectJSON(gn, out_dir, tmp_out): + """Parse GN output and get the header files""" + all_headers = set() + + for _target, properties in gn['targets'].items(): + sources = properties.get('sources', []) + public = properties.get('public', []) + # Exclude '"public": "*"'. + if type(public) is list: + sources += public + for f in sources: + if f.endswith('.h') or f.endswith('.hh'): + if f.startswith('//'): + f = f[2:] # Strip the '//' prefix. + if f.startswith(tmp_out): + f = out_dir + f[len(tmp_out):] + all_headers.add(f) + + return all_headers + + +def GetDepsPrefixes(q): + """Return all the folders controlled by DEPS file""" + prefixes, err = set(), None + try: + gclient_exe = 'gclient.bat' if sys.platform == 'win32' else 'gclient' + gclient_out = subprocess.check_output([ + os.path.join(DEPOT_TOOLS_DIR, gclient_exe), + 'recurse', '--no-progress', '-j1', + 'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]'], + universal_newlines=True) + for i in gclient_out.split('\n'): + if i.startswith('src/'): + i = i[4:] + prefixes.add(i) + except Exception as e: + err = str(e) + q.put((prefixes, err)) + + +def IsBuildClean(out_dir): + cmd = [os.path.join(DEPOT_TOOLS_DIR, 'ninja'), '-C', out_dir, '-n'] + try: + out = subprocess.check_output(cmd) + return 'no work to do.' in out + except Exception as e: + print(e) + return False + +def ParseWhiteList(whitelist): + out = set() + for line in whitelist.split('\n'): + line = re.sub(r'#.*', '', line).strip() + if line: + out.add(line) + return out + + +def FilterOutDepsedRepo(files, deps): + return {f for f in files if not any(f.startswith(d) for d in deps)} + + +def GetNonExistingFiles(lst): + out = set() + for f in lst: + if not os.path.isfile(f): + out.add(f) + return out + + +def main(): + + def DumpJson(data): + if args.json: + with open(args.json, 'w') as f: + json.dump(data, f) + + def PrintError(msg): + DumpJson([]) + parser.error(msg) + + parser = argparse.ArgumentParser(description=''' + NOTE: Use ninja to build all targets in OUT_DIR before running + this script.''') + parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release', + help='output directory of the build') + parser.add_argument('--json', + help='JSON output filename for missing headers') + parser.add_argument('--whitelist', help='file containing whitelist') + parser.add_argument('--skip-dirty-check', action='store_true', + help='skip checking whether the build is dirty') + parser.add_argument('--verbose', action='store_true', + help='print more diagnostic info') + + args, _extras = parser.parse_known_args() + + if not os.path.isdir(args.out_dir): + parser.error('OUT_DIR "%s" does not exist.' % args.out_dir) + + if not args.skip_dirty_check and not IsBuildClean(args.out_dir): + dirty_msg = 'OUT_DIR looks dirty. You need to build all there.' + if args.json: + # Assume running on the bots. Silently skip this step. + # This is possible because "analyze" step can be wrong due to + # underspecified header files. See crbug.com/725877 + print(dirty_msg) + DumpJson([]) + return 0 + else: + # Assume running interactively. + parser.error(dirty_msg) + + d_q = Queue() + d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, True, d_q,)) + d_p.start() + + gn_q = Queue() + gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,)) + gn_p.start() + + deps_q = Queue() + deps_p = Process(target=GetDepsPrefixes, args=(deps_q,)) + deps_p.start() + + d, d_err = d_q.get() + gn, gn_err = gn_q.get() + missing = set(d.keys()) - gn + nonexisting = GetNonExistingFiles(gn) + + deps, deps_err = deps_q.get() + missing = FilterOutDepsedRepo(missing, deps) + nonexisting = FilterOutDepsedRepo(nonexisting, deps) + + d_p.join() + gn_p.join() + deps_p.join() + + if d_err: + PrintError(d_err) + if gn_err: + PrintError(gn_err) + if deps_err: + PrintError(deps_err) + if len(GetNonExistingFiles(d)) > 0: + print('Non-existing files in ninja deps:', GetNonExistingFiles(d)) + PrintError('Found non-existing files in ninja deps. You should ' + + 'build all in OUT_DIR.') + if len(d) == 0: + PrintError('OUT_DIR looks empty. You should build all there.') + if any((('/gen/' in i) for i in nonexisting)): + PrintError('OUT_DIR looks wrong. You should build all there.') + + if args.whitelist: + whitelist = ParseWhiteList(open(args.whitelist).read()) + missing -= whitelist + nonexisting -= whitelist + + missing = sorted(missing) + nonexisting = sorted(nonexisting) + + DumpJson(sorted(missing + nonexisting)) + + if len(missing) == 0 and len(nonexisting) == 0: + return 0 + + if len(missing) > 0: + print('\nThe following files should be included in gn files:') + for i in missing: + print(i) + + if len(nonexisting) > 0: + print('\nThe following non-existing files should be removed from gn files:') + for i in nonexisting: + print(i) + + if args.verbose: + # Only get detailed obj dependency here since it is slower. + GetHeadersFromNinja(args.out_dir, False, d_q) + d, d_err = d_q.get() + print('\nDetailed dependency info:') + for f in missing: + print(f) + for cc in d[f]: + print(' ', cc) + + print('\nMissing headers sorted by number of affected object files:') + count = {k: len(v) for (k, v) in d.items()} + for f in sorted(count, key=count.get, reverse=True): + if f in missing: + print(count[f], f) + + if args.json: + # Assume running on the bots. Temporarily return 0 before + # https://crbug.com/937847 is fixed. + return 0 + return 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/check_gn_headers_unittest.py b/check_gn_headers_unittest.py new file mode 100755 index 000000000000..954d95bfc959 --- /dev/null +++ b/check_gn_headers_unittest.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import json +import unittest +import check_gn_headers + + +ninja_input = r''' +obj/a.o: #deps 1, deps mtime 123 (VALID) + ../../a.cc + ../../dir/path/b.h + ../../c.hh + +obj/b.o: #deps 1, deps mtime 123 (STALE) + ../../b.cc + ../../dir2/path/b.h + ../../c2.hh + +obj/c.o: #deps 1, deps mtime 123 (VALID) + ../../c.cc + ../../build/a.h + gen/b.h + ../../out/Release/gen/no.h + ../../dir3/path/b.h + ../../c3.hh +''' + + +gn_input = json.loads(r''' +{ + "others": [], + "targets": { + "//:All": { + }, + "//:base": { + "public": [ "//base/p.h" ], + "sources": [ "//base/a.cc", "//base/a.h", "//base/b.hh" ], + "visibility": [ "*" ] + }, + "//:star_public": { + "public": "*", + "sources": [ "//base/c.h", "//tmp/gen/a.h" ], + "visibility": [ "*" ] + } + } +} +''') + + +whitelist = r''' + white-front.c +a/b/c/white-end.c # comment + dir/white-both.c #more comment + +# empty line above +a/b/c +''' + + +class CheckGnHeadersTest(unittest.TestCase): + def testNinja(self): + headers = check_gn_headers.ParseNinjaDepsOutput( + ninja_input.split('\n'), 'out/Release', False) + expected = { + 'dir/path/b.h': ['obj/a.o'], + 'c.hh': ['obj/a.o'], + 'dir3/path/b.h': ['obj/c.o'], + 'c3.hh': ['obj/c.o'], + } + self.assertEqual(headers, expected) + + def testGn(self): + headers = check_gn_headers.ParseGNProjectJSON(gn_input, + 'out/Release', 'tmp') + expected = set([ + 'base/a.h', + 'base/b.hh', + 'base/c.h', + 'base/p.h', + 'out/Release/gen/a.h', + ]) + self.assertEqual(headers, expected) + + def testWhitelist(self): + output = check_gn_headers.ParseWhiteList(whitelist) + expected = set([ + 'white-front.c', + 'a/b/c/white-end.c', + 'dir/white-both.c', + 'a/b/c', + ]) + self.assertEqual(output, expected) + + +if __name__ == '__main__': + logging.getLogger().setLevel(logging.DEBUG) + unittest.main(verbosity=2) diff --git a/check_gn_headers_whitelist.txt b/check_gn_headers_whitelist.txt new file mode 100644 index 000000000000..dfefd7d2458f --- /dev/null +++ b/check_gn_headers_whitelist.txt @@ -0,0 +1,232 @@ +# Do not add files to this whitelist unless you are adding a new OS or +# changing the GN arguments on bots. + +ash/accelerators/accelerator_table.h +ash/ash_export.h +ash/constants/ash_switches.h +ash/metrics/task_switch_metrics_recorder.h +ash/metrics/task_switch_source.h +ash/metrics/user_metrics_recorder.h +ash/public/cpp/ash_public_export.h +ash/public/cpp/shelf_types.h +ash/session/session_observer.h +ash/shell.h +ash/wm/system_modal_container_event_filter_delegate.h +cc/cc_export.h +cc/input/browser_controls_state.h +cc/input/event_listener_properties.h +cc/input/scrollbar.h +cc/layers/performance_properties.h +chrome/browser/android/android_theme_resources.h +chrome/browser/android/resource_id.h +chrome/browser/ash/login/signin/oauth2_login_manager.h +chrome/browser/ash/login/signin/oauth2_token_fetcher.h +chrome/browser/ash/profiles/profile_helper.h +chrome/browser/ash/settings/cros_settings.h +chrome/browser/certificate_provider/certificate_provider.h +chrome/browser/certificate_provider/certificate_provider_service.h +chrome/browser/certificate_provider/certificate_provider_service_factory.h +chrome/browser/certificate_provider/certificate_requests.h +chrome/browser/certificate_provider/pin_dialog_manager.h +chrome/browser/certificate_provider/sign_requests.h +chrome/browser/certificate_provider/thread_safe_certificate_map.h +chrome/browser/component_updater/component_installer_errors.h +chrome/browser/download/download_file_icon_extractor.h +chrome/browser/extensions/api/networking_cast_private/chrome_networking_cast_private_delegate.h +chrome/browser/mac/bluetooth_utility.h +chrome/browser/media/router/mojo/media_route_provider_util_win.h +chrome/browser/media/webrtc/desktop_media_list_ash.h +chrome/browser/media/webrtc/desktop_media_list_observer.h +chrome/browser/media/webrtc/rtp_dump_type.h +chrome/browser/media_galleries/media_file_system_context.h +chrome/browser/notifications/displayed_notifications_dispatch_callback.h +chrome/browser/ui/app_icon_loader_delegate.h +chrome/browser/ash/app_list/app_list_syncable_service_factory.h +chrome/browser/ui/ash/ash_util.h +chrome/browser/ui/ash/multi_user/multi_user_util.h +chrome/browser/ui/network_profile_bubble.h +chrome/browser/ui/views/frame/browser_frame_header_ash.h +chrome/install_static/chromium_install_modes.h +chrome/install_static/install_constants.h +chrome/install_static/install_details.h +chrome/install_static/install_modes.h +chrome/install_static/install_util.h +chrome/install_static/test/scoped_install_details.h +chrome/installer/util/google_update_settings.h +components/cdm/browser/cdm_message_filter_android.h +components/device_event_log/device_event_log_export.h +components/login/login_export.h +components/media_router/common/providers/cast/certificate/cast_crl_root_ca_cert_der-inc.h +components/nacl/browser/nacl_browser_delegate.h +components/nacl/renderer/ppb_nacl_private.h +components/policy/core/browser/configuration_policy_handler_parameters.h +components/policy/proto/policy_proto_export.h +components/rlz/rlz_tracker_delegate.h +components/session_manager/session_manager_types.h +components/sessions/core/sessions_export.h +components/sync/engine/connection_status.h +components/sync/engine/net/network_time_update_callback.h +components/translate/core/browser/translate_infobar_delegate.h +components/user_manager/user.h +components/user_manager/user_image/user_image.h +components/user_manager/user_manager.h +components/wifi/wifi_export.h +components/wifi/wifi_service.h +content/browser/background_fetch/background_fetch_constants.h +content/common/mac/attributed_string_coder.h +content/public/browser/context_factory.h +content/public/browser/media_observer.h +content/public/common/gpu_stream_constants.h +content/renderer/external_popup_menu.h +content/shell/android/shell_descriptors.h +extensions/browser/api/clipboard/clipboard_api.h +extensions/browser/api/webcam_private/webcam.h +extensions/browser/api/webcam_private/webcam_private_api.h +extensions/browser/entry_info.h +extensions/browser/extension_event_histogram_value.h +extensions/browser/extension_function_histogram_value.h +google_apis/gcm/base/encryptor.h +google_apis/gcm/base/gcm_export.h +gpu/GLES2/gl2chromium.h +gpu/GLES2/gl2chromium_autogen.h +gpu/GLES2/gl2extchromium.h +gpu/command_buffer/client/context_support.h +gpu/command_buffer/client/gles2_implementation_unittest_autogen.h +gpu/command_buffer/client/gles2_interface_autogen.h +gpu/command_buffer/client/gles2_interface_stub_autogen.h +gpu/command_buffer/client/gles2_interface_stub_impl_autogen.h +gpu/command_buffer/client/gpu_control_client.h +gpu/command_buffer/client/ref_counted.h +gpu/command_buffer/client/shared_memory_limits.h +gpu/command_buffer/common/command_buffer_shared.h +gpu/command_buffer/common/gles2_cmd_utils_autogen.h +gpu/command_buffer/common/gles2_cmd_utils_implementation_autogen.h +gpu/command_buffer/common/gpu_memory_allocation.h +gpu/command_buffer/service/gles2_cmd_decoder_unittest_extensions_autogen.h +gpu/command_buffer/service/memory_tracking.h +gpu/config/gpu_lists_version.h +gpu/gles2_conform_support/gtf/gtf_stubs.h +gpu/gpu_export.h +ipc/ipc_channel_proxy_unittest_messages.h +ipc/ipc_message_null_macros.h +media/audio/audio_logging.h +media/base/routing_token_callback.h +media/base/video_renderer_sink.h +media/cast/common/mod_util.h +media/cast/net/rtcp/rtcp_session.h +media/filters/ffmpeg_aac_bitstream_converter.h +media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h +media/filters/h264_to_annex_b_bitstream_converter.h +media/formats/mp4/avc.h +media/formats/mp4/bitstream_converter.h +media/formats/mp4/fourccs.h +media/formats/mp4/rcheck.h +media/formats/mpeg/adts_stream_parser.h +media/formats/mpeg/mpeg1_audio_stream_parser.h +media/formats/mpeg/mpeg_audio_stream_parser_base.h +media/gpu/media_gpu_export.h +mojo/core/broker_messages.h +mojo/core/system_impl_export.h +mojo/public/cpp/bindings/strong_associated_binding_set.h +mojo/public/cpp/bindings/tests/mojo_test_blink_export.h +mojo/public/cpp/test_support/test_support.h +net/base/winsock_init.h +net/cert/cert_type.h +net/cert/cert_verify_proc_android.h +net/cert/scoped_nss_types.h +net/dns/notify_watcher_mac.h +net/http/http_status_code_list.h +ppapi/cpp/pass_ref.h +ppapi/lib/gl/include/GLES2/gl2.h +ppapi/lib/gl/include/GLES2/gl2ext.h +ppapi/lib/gl/include/GLES2/gl2platform.h +ppapi/lib/gl/include/KHR/khrplatform.h +ppapi/nacl_irt/irt_manifest.h +ppapi/nacl_irt/public/irt_ppapi.h +ppapi/native_client/src/shared/ppapi_proxy/ppruntime.h +ppapi/native_client/src/untrusted/pnacl_irt_shim/irt_shim_ppapi.h +ppapi/native_client/src/untrusted/pnacl_irt_shim/pnacl_shim.h +ppapi/native_client/src/untrusted/pnacl_irt_shim/shim_ppapi.h +ppapi/proxy/dispatch_reply_message.h +ppapi/proxy/plugin_proxy_delegate.h +ppapi/proxy/plugin_resource_callback.h +ppapi/proxy/ppapi_proxy_export.h +ppapi/proxy/resource_message_filter.h +ppapi/proxy/video_decoder_constants.h +ppapi/shared_impl/api_id.h +ppapi/shared_impl/dir_contents.h +ppapi/shared_impl/ppapi_shared_export.h +ppapi/shared_impl/singleton_resource_id.h +remoting/base/chromoting_event_log_writer.h +remoting/base/logging.h +remoting/client/display/gl_renderer_delegate.h +remoting/client/display/gl_texture_ids.h +remoting/codec/webrtc_video_encoder.h +remoting/host/linux/x11_keyboard.h +remoting/host/worker_process_ipc_delegate.h +remoting/protocol/audio_source.h +remoting/protocol/audio_stream.h +remoting/protocol/cursor_shape_stub.h +remoting/protocol/message_channel_factory.h +remoting/protocol/test_event_matchers.h +remoting/protocol/video_feedback_stub.h +remoting/protocol/video_stream.h +sandbox/linux/system_headers/capability.h +skia/ext/convolver_mips_dspr2.h +skia/ext/skia_commit_hash.h +third_party/hunspell/src/hunspell/hunvisapi.h +third_party/khronos/EGL/egl.h +third_party/khronos/EGL/eglext.h +third_party/khronos/EGL/eglplatform.h +third_party/khronos/GLES2/gl2.h +third_party/khronos/GLES2/gl2ext.h +third_party/khronos/GLES2/gl2platform.h +third_party/khronos/GLES3/gl3.h +third_party/khronos/GLES3/gl3platform.h +third_party/khronos/KHR/khrplatform.h +third_party/leveldatabase/chromium_logger.h +third_party/libaddressinput/chromium/addressinput_util.h +third_party/libphonenumber/phonenumber_api.h +third_party/libudev/libudev0.h +third_party/libudev/libudev1.h +third_party/libvpx/source/config/linux/x64/vp8_rtcd.h +third_party/libvpx/source/config/linux/x64/vp9_rtcd.h +third_party/libvpx/source/config/linux/x64/vpx_config.h +third_party/libvpx/source/config/linux/x64/vpx_dsp_rtcd.h +third_party/libvpx/source/config/linux/x64/vpx_scale_rtcd.h +third_party/libvpx/source/config/nacl/vp8_rtcd.h +third_party/libvpx/source/config/nacl/vp9_rtcd.h +third_party/libvpx/source/config/nacl/vpx_config.h +third_party/libvpx/source/config/nacl/vpx_dsp_rtcd.h +third_party/libvpx/source/config/nacl/vpx_scale_rtcd.h +third_party/libvpx/source/config/vpx_version.h +third_party/opus/src/src/opus_private.h +third_party/opus/src/tests/test_opus_common.h +third_party/protobuf/src/google/protobuf/compiler/csharp/csharp_names.h +third_party/qcms/src/halffloat.h +third_party/qcms/src/tests/qcms_test_util.h +third_party/qcms/src/tests/timing.h +third_party/snappy/linux/config.h +third_party/speech-dispatcher/libspeechd.h +third_party/sqlite/sqlite3.h +third_party/wayland/include/config.h +third_party/wayland/include/src/wayland-version.h +third_party/woff2/src/port.h +third_party/yasm/source/config/linux/config.h +third_party/yasm/source/config/linux/libyasm-stdint.h +third_party/zlib/contrib/minizip/crypt.h +tools/ipc_fuzzer/message_lib/all_message_null_macros.h +ui/base/clipboard/clipboard_test_template.h +ui/events/keycodes/keyboard_codes_posix.h +ui/gfx/overlay_transform.h +ui/gfx/scoped_ns_graphics_context_save_gstate_mac.h +ui/gfx/swap_result.h +ui/gfx/sys_color_change_listener.h +ui/gl/GL/glextchromium.h +ui/gl/gl_bindings_api_autogen_egl.h +ui/gl/gl_bindings_api_autogen_gl.h +ui/gl/gl_bindings_api_autogen_glx.h +ui/gl/gpu_preference.h +ui/gl/gpu_switching_observer.h +ui/gl/progress_reporter.h +ui/ozone/public/ozone_switches.h diff --git a/check_return_value.py b/check_return_value.py new file mode 100755 index 000000000000..2337e962da1e --- /dev/null +++ b/check_return_value.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""This program wraps an arbitrary command and prints "1" if the command ran +successfully.""" + + +import os +import subprocess +import sys + +devnull = open(os.devnull, 'wb') +if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull): + print(1) +else: + print(0) diff --git a/chromeos/.style.yapf b/chromeos/.style.yapf new file mode 100644 index 000000000000..fdd07237cbe3 --- /dev/null +++ b/chromeos/.style.yapf @@ -0,0 +1,2 @@ +[style] +based_on_style = yapf diff --git a/chromeos/OWNERS b/chromeos/OWNERS new file mode 100644 index 000000000000..e1058c853e86 --- /dev/null +++ b/chromeos/OWNERS @@ -0,0 +1 @@ +bpastene@chromium.org diff --git a/chromeos/PRESUBMIT.py b/chromeos/PRESUBMIT.py new file mode 100644 index 000000000000..b9734e6aa5fb --- /dev/null +++ b/chromeos/PRESUBMIT.py @@ -0,0 +1,38 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit script for build/chromeos/. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for +details on the presubmit API built into depot_tools. +""" + + +USE_PYTHON3 = True + + +def CommonChecks(input_api, output_api): + results = [] + # These tests don't run on Windows and give verbose and cryptic failure + # messages. Linting the code on a platform where it will not run is also not + # valuable and gives spurious errors. + if input_api.sys.platform != 'win32': + results += input_api.canned_checks.RunPylint( + input_api, output_api, pylintrc='pylintrc', version='2.6') + tests = input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + '.', [r'^.+_test\.py$'], + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True) + results += input_api.RunTests(tests) + return results + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/chromeos/generate_skylab_deps.py b/chromeos/generate_skylab_deps.py new file mode 100755 index 000000000000..a929245ecf70 --- /dev/null +++ b/chromeos/generate_skylab_deps.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import re +import sys + +# The basic shell script for client test run in Skylab. The arguments listed +# here will be fed by autotest at the run time. +# +# * test-launcher-summary-output: the path for the json result. It will be +# assigned by autotest, who will upload it to GCS upon test completion. +# * test-launcher-shard-index: the index for this test run. +# * test-launcher-total-shards: the total test shards. +# * test_args: arbitrary runtime arguments configured in test_suites.pyl, +# attached after '--'. +BASIC_SHELL_SCRIPT = """ +#!/bin/sh + +while [[ $# -gt 0 ]]; do + case "$1" in + --test-launcher-summary-output) + summary_output=$2 + shift 2 + ;; + + --test-launcher-shard-index) + shard_index=$2 + shift 2 + ;; + + --test-launcher-total-shards) + total_shards=$2 + shift 2 + ;; + + --) + test_args=$2 + break + ;; + + *) + break + ;; + esac +done + +if [ ! -d $(dirname $summary_output) ] ; then + mkdir -p $(dirname $summary_output) +fi + +cd `dirname $0` && cd .. +""" + + +def build_test_script(args): + # Build the shell script that will be used on the device to invoke the test. + # Stored here as a list of lines. + device_test_script_contents = BASIC_SHELL_SCRIPT.split('\n') + + test_invocation = ('LD_LIBRARY_PATH=./ ./%s ' + ' --test-launcher-summary-output=$summary_output' + ' --test-launcher-shard-index=$shard_index' + ' --test-launcher-total-shards=$total_shards' + ' $test_args' % args.test_exe) + + device_test_script_contents.append(test_invocation) + with open(args.output, 'w') as w: + w.write('\n'.join(device_test_script_contents)) + os.chmod(args.output, 0o755) + + +def build_filter_file(args): + # TODO(b/227381644): This expression is hard to follow and should be + # simplified. This would require a change on the cros infra side as well + tast_expr_dict = {} + default_disabled_tests = [] + if args.disabled_tests is not None: + default_disabled_tests = [ + '!"name:{0}"'.format(test) for test in args.disabled_tests + ] + + default_enabled_test_term = '' + if args.enabled_tests is not None: + default_enabled_test_term = (' || ').join( + ['"name:{0}"'.format(test) for test in args.enabled_tests]) + + # Generate the default expression to be used when there is no known key + tast_expr = args.tast_expr if args.tast_expr else "" + + if default_disabled_tests: + default_disabled_term = " && ".join(default_disabled_tests) + tast_expr = "{0} && {1}".format(tast_expr, default_disabled_term) if \ + tast_expr else default_disabled_term + + if default_enabled_test_term: + tast_expr = "{0} && ({1})".format( + tast_expr, + default_enabled_test_term) if tast_expr else default_enabled_test_term + + tast_expr_dict['default'] = "({0})".format(tast_expr) + + # Generate an expression for each collection in the gni file + if args.tast_control is not None: + with open(args.tast_control, 'r') as tast_control_file: + gni = tast_control_file.read() + filter_lists = re.findall(r'(.*) = \[([^\]]*)\]', gni) + for filter_list in filter_lists: + tast_expr = args.tast_expr if args.tast_expr else "" + + milestone_disabled_tests = { + '!"name:{0}"'.format(test) + for test in re.findall(r'"([^"]+)"', filter_list[1]) + } + + milestone_disabled_tests.update(default_disabled_tests) + + if milestone_disabled_tests: + tast_expr = "{0} && {1}".format( + tast_expr, " && ".join(milestone_disabled_tests) + ) if tast_expr else " && ".join(milestone_disabled_tests) + + if default_enabled_test_term: + tast_expr = "{0} && ({1})".format( + tast_expr, default_enabled_test_term + ) if tast_expr else default_enabled_test_term + + if tast_expr: + tast_expr_dict[filter_list[0]] = "({0})".format(tast_expr) + + if len(tast_expr_dict) > 0: + with open(args.output, "w") as file: + json.dump(tast_expr_dict, file, indent=2) + os.chmod(args.output, 0o644) + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(dest='command') + + script_gen_parser = subparsers.add_parser('generate-runner') + script_gen_parser.add_argument( + '--test-exe', + type=str, + required=True, + help='Path to test executable to run inside the device.') + script_gen_parser.add_argument('--verbose', '-v', action='store_true') + script_gen_parser.add_argument( + '--output', + required=True, + type=str, + help='Path to create the runner script.') + script_gen_parser.set_defaults(func=build_test_script) + + filter_gen_parser = subparsers.add_parser('generate-filter') + filter_gen_parser.add_argument( + '--tast-expr', + type=str, + required=False, + help='Tast expression to determine tests to run. This creates the ' + 'initial set of tests that can be further filtered.') + filter_gen_parser.add_argument( + '--enabled-tests', + type=str, + required=False, + action='append', + help='Name of tests to allow to test (unnamed tests will not run).') + filter_gen_parser.add_argument( + '--disabled-tests', + type=str, + required=False, + action='append', + help='Names of tests to disable from running') + filter_gen_parser.add_argument( + '--tast-control', + type=str, + required=False, + help='Filename for the tast_control file containing version skew ' + 'test filters to generate.') + filter_gen_parser.add_argument( + '--output', + required=True, + type=str, + help='Path to create the plain text filter file.') + filter_gen_parser.set_defaults(func=build_filter_file) + + args = parser.parse_args() + + if (args.command == "generate-filter" and args.disabled_tests is None and + args.enabled_tests is None and args.tast_expr is None): + parser.error( + '--disabled-tests, --enabled-tests, or --tast-expr must be provided ' + 'to generate-filter') + + args.func(args) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/chromeos/generate_skylab_deps_test.py b/chromeos/generate_skylab_deps_test.py new file mode 100755 index 000000000000..9a30825f0db1 --- /dev/null +++ b/chromeos/generate_skylab_deps_test.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 +# +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest +from unittest import mock + +import generate_skylab_deps + +TAST_CONTROL = ''' +# Ignore comments +tast_disabled_tests_from_chrome_all = [ + "example.all.test1", +] +tast_disabled_tests_from_chrome_m100 = [ + "example.m100.test1", +] +tast_disabled_tests_from_lacros_all = [] +''' + +TAST_EXPR = '"group:mainline" && "dep:chrome" && !informational' + +REQUIRED_ARGS = ['script', 'generate-filter', '--output', 'output.filter'] + + +class GenerateSkylabDepsTest(unittest.TestCase): + + def testTastExpr(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + ['--tast-expr', TAST_EXPR] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual(filter_dict['default'], '(%s)' % TAST_EXPR) + + def testTastExprAndDisableTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', TAST_EXPR, '--disabled-tests', 'disabled.test1', + '--disabled-tests', 'disabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual( + filter_dict['default'], + '(%s && !"name:disabled.test1" && !"name:disabled.test2")' % + TAST_EXPR) + + def testEnableTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--enabled-tests', 'enabled.test1', '--enabled-tests', 'enabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual(filter_dict['default'], + '("name:enabled.test1" || "name:enabled.test2")') + + def testTastControlWithTastExpr(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', + TAST_EXPR, + '--tast-control', + 'mocked_input', + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual(filter_dict['default'], '(%s)' % TAST_EXPR) + self.assertEqual(filter_dict['tast_disabled_tests_from_chrome_m100'], + '(%s && !"name:example.m100.test1")' % TAST_EXPR) + + def testTastControlWithTastExprAndDisabledTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', TAST_EXPR, '--tast-control', 'mocked_input', + '--disabled-tests', 'disabled.test1', '--disabled-tests', + 'disabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual( + filter_dict['default'], + '("group:mainline" && "dep:chrome" && !informational && !'\ + '"name:disabled.test1" && !"name:disabled.test2")' + ) + + # The list from a set is indeterminent + self.assertIn('"group:mainline" && "dep:chrome" && !informational', + filter_dict['tast_disabled_tests_from_chrome_m100']) + self.assertIn('&& !"name:disabled.test1"', + filter_dict['tast_disabled_tests_from_chrome_m100']) + self.assertIn('&& !"name:disabled.test2"', + filter_dict['tast_disabled_tests_from_chrome_m100']) + self.assertIn('&& !"name:example.m100.test1"', + filter_dict['tast_disabled_tests_from_chrome_m100']) + + def testTastControlWithTastExprAndEnabledTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-expr', TAST_EXPR, '--tast-control', 'mocked_input', + '--enabled-tests', 'enabled.test1', '--enabled-tests', 'enabled.test2' + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + self.assertEqual( + filter_dict['default'], + '("group:mainline" && "dep:chrome" && !informational && '\ + '("name:enabled.test1" || "name:enabled.test2"))' + ) + self.assertEqual( + filter_dict['tast_disabled_tests_from_chrome_m100'], + '("group:mainline" && "dep:chrome" && !informational && '\ + '!"name:example.m100.test1" && ("name:enabled.test1" '\ + '|| "name:enabled.test2"))' + ) + + def testTastControlWithEnabledTests(self): + file_mock = mock.mock_open(read_data=TAST_CONTROL) + args = REQUIRED_ARGS + [ + '--tast-control', + 'mocked_input', + '--enabled-tests', + 'enabled.test1', + '--enabled-tests', + 'enabled.test2', + ] + + with mock.patch('sys.argv', args),\ + mock.patch('builtins.open', file_mock),\ + mock.patch('os.chmod'),\ + mock.patch("json.dump", mock.MagicMock()) as dump: + generate_skylab_deps.main() + filter_dict = dump.call_args[0][0] + # Should not include 'all' collection from TAST_CONTROL since that would + # need to be passed in the --disabled-tests to be included + self.assertEqual(filter_dict['default'], + '("name:enabled.test1" || "name:enabled.test2")') + self.assertEqual( + filter_dict['tast_disabled_tests_from_chrome_m100'], + '(!"name:example.m100.test1" && '\ + '("name:enabled.test1" || "name:enabled.test2"))' + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/chromeos/pylintrc b/chromeos/pylintrc new file mode 100644 index 000000000000..2a721bf2709d --- /dev/null +++ b/chromeos/pylintrc @@ -0,0 +1,15 @@ +[FORMAT] + +max-line-length=80 + +[MESSAGES CONTROL] + +disable=abstract-class-not-used,bad-continuation,bad-indentation,duplicate-code,fixme,invalid-name,locally-disabled,locally-enabled,missing-docstring,star-args,too-few-public-methods,too-many-arguments,too-many-branches,too-many-instance-attributes,too-many-lines,too-many-locals,too-many-public-methods,too-many-statements,wrong-import-position + +[REPORTS] + +reports=no + +[VARIABLES] + +dummy-variables-rgx=^_.*$|dummy diff --git a/chromeos/test_runner.py b/chromeos/test_runner.py new file mode 100755 index 000000000000..14c31e1c4a55 --- /dev/null +++ b/chromeos/test_runner.py @@ -0,0 +1,990 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import collections +import json +import logging +import os +import re +import shutil +import signal +import socket +import sys +import tempfile +import six + +# The following non-std imports are fetched via vpython. See the list at +# //.vpython +import dateutil.parser # pylint: disable=import-error +import jsonlines # pylint: disable=import-error +import psutil # pylint: disable=import-error + +CHROMIUM_SRC_PATH = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..')) + +# Use the android test-runner's gtest results support library for generating +# output json ourselves. +sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'android')) +from pylib.base import base_test_result # pylint: disable=import-error +from pylib.results import json_results # pylint: disable=import-error + +sys.path.insert(0, os.path.join(CHROMIUM_SRC_PATH, 'build', 'util')) +# TODO(crbug.com/1421441): Re-enable the 'no-name-in-module' check. +from lib.results import result_sink # pylint: disable=import-error,no-name-in-module + +assert not six.PY2, 'Py2 not supported for this file.' + +import subprocess # pylint: disable=import-error,wrong-import-order + +DEFAULT_CROS_CACHE = os.path.abspath( + os.path.join(CHROMIUM_SRC_PATH, 'build', 'cros_cache')) +CHROMITE_PATH = os.path.abspath( + os.path.join(CHROMIUM_SRC_PATH, 'third_party', 'chromite')) +CROS_RUN_TEST_PATH = os.path.abspath( + os.path.join(CHROMITE_PATH, 'bin', 'cros_run_test')) + +LACROS_LAUNCHER_SCRIPT_PATH = os.path.abspath( + os.path.join(CHROMIUM_SRC_PATH, 'build', 'lacros', + 'mojo_connection_lacros_launcher.py')) + +# This is a special hostname that resolves to a different DUT in the lab +# depending on which lab machine you're on. +LAB_DUT_HOSTNAME = 'variable_chromeos_device_hostname' + +SYSTEM_LOG_LOCATIONS = [ + '/home/chronos/crash/', + '/var/log/chrome/', + '/var/log/messages', + '/var/log/ui/', +] + +TAST_DEBUG_DOC = 'https://bit.ly/2LgvIXz' + + +class TestFormatError(Exception): + pass + + +class RemoteTest: + + # This is a basic shell script that can be appended to in order to invoke the + # test on the device. + BASIC_SHELL_SCRIPT = [ + '#!/bin/sh', + + # /home and /tmp are mounted with "noexec" in the device, but some of our + # tools and tests use those dirs as a workspace (eg: vpython downloads + # python binaries to ~/.vpython-root and /tmp/vpython_bootstrap). + # /usr/local/tmp doesn't have this restriction, so change the location of + # the home and temp dirs for the duration of the test. + 'export HOME=/usr/local/tmp', + 'export TMPDIR=/usr/local/tmp', + ] + + def __init__(self, args, unknown_args): + self._additional_args = unknown_args + self._path_to_outdir = args.path_to_outdir + self._test_launcher_summary_output = args.test_launcher_summary_output + self._logs_dir = args.logs_dir + self._use_vm = args.use_vm + self._rdb_client = result_sink.TryInitClient() + + self._retries = 0 + self._timeout = None + self._test_launcher_shard_index = args.test_launcher_shard_index + self._test_launcher_total_shards = args.test_launcher_total_shards + + # The location on disk of a shell script that can be optionally used to + # invoke the test on the device. If it's not set, we assume self._test_cmd + # contains the test invocation. + self._on_device_script = None + + self._test_cmd = [ + CROS_RUN_TEST_PATH, + '--board', + args.board, + '--cache-dir', + args.cros_cache, + ] + if args.use_vm: + self._test_cmd += [ + '--start', + # Don't persist any filesystem changes after the VM shutsdown. + '--copy-on-write', + ] + else: + self._test_cmd += [ + '--device', args.device if args.device else LAB_DUT_HOSTNAME + ] + if args.logs_dir: + for log in SYSTEM_LOG_LOCATIONS: + self._test_cmd += ['--results-src', log] + self._test_cmd += [ + '--results-dest-dir', + os.path.join(args.logs_dir, 'system_logs') + ] + if args.flash: + self._test_cmd += ['--flash'] + if args.public_image: + self._test_cmd += ['--public-image'] + + self._test_env = setup_env() + + @property + def suite_name(self): + raise NotImplementedError('Child classes need to define suite name.') + + @property + def test_cmd(self): + return self._test_cmd + + def write_test_script_to_disk(self, script_contents): + # Since we're using an on_device_script to invoke the test, we'll need to + # set cwd. + self._test_cmd += [ + '--remote-cmd', + '--cwd', + os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH), + ] + logging.info('Running the following command on the device:') + logging.info('\n%s', '\n'.join(script_contents)) + fd, tmp_path = tempfile.mkstemp(suffix='.sh', dir=self._path_to_outdir) + os.fchmod(fd, 0o755) + with os.fdopen(fd, 'w') as f: + f.write('\n'.join(script_contents) + '\n') + return tmp_path + + def run_test(self): + # Traps SIGTERM and kills all child processes of cros_run_test when it's + # caught. This will allow us to capture logs from the device if a test hangs + # and gets timeout-killed by swarming. See also: + # https://chromium.googlesource.com/infra/luci/luci-py/+/main/appengine/swarming/doc/Bot.md#graceful-termination_aka-the-sigterm-and-sigkill-dance + test_proc = None + + def _kill_child_procs(trapped_signal, _): + logging.warning('Received signal %d. Killing child processes of test.', + trapped_signal) + if not test_proc or not test_proc.pid: + # This shouldn't happen? + logging.error('Test process not running.') + return + for child in psutil.Process(test_proc.pid).children(): + logging.warning('Killing process %s', child) + child.kill() + + signal.signal(signal.SIGTERM, _kill_child_procs) + + for i in range(self._retries + 1): + logging.info('########################################') + logging.info('Test attempt #%d', i) + logging.info('########################################') + test_proc = subprocess.Popen( + self._test_cmd, + stdout=sys.stdout, + stderr=sys.stderr, + env=self._test_env) + try: + test_proc.wait(timeout=self._timeout) + except subprocess.TimeoutExpired: # pylint: disable=no-member + logging.error('Test timed out. Sending SIGTERM.') + # SIGTERM the proc and wait 10s for it to close. + test_proc.terminate() + try: + test_proc.wait(timeout=10) + except subprocess.TimeoutExpired: # pylint: disable=no-member + # If it hasn't closed in 10s, SIGKILL it. + logging.error('Test did not exit in time. Sending SIGKILL.') + test_proc.kill() + test_proc.wait() + logging.info('Test exitted with %d.', test_proc.returncode) + if test_proc.returncode == 0: + break + + self.post_run(test_proc.returncode) + # Allow post_run to override test proc return code. (Useful when the host + # side Tast bin returns 0 even for failed tests.) + return test_proc.returncode + + def post_run(self, _): + if self._on_device_script: + os.remove(self._on_device_script) + + @staticmethod + def get_artifacts(path): + """Crawls a given directory for file artifacts to attach to a test. + + Args: + path: Path to a directory to search for artifacts. + Returns: + A dict mapping name of the artifact to its absolute filepath. + """ + artifacts = {} + for dirpath, _, filenames in os.walk(path): + for f in filenames: + artifact_path = os.path.join(dirpath, f) + artifact_id = os.path.relpath(artifact_path, path) + # Some artifacts will have non-Latin characters in the filename, eg: + # 'ui_tree_Chinese Pinyin-你好.txt'. ResultDB's API rejects such + # characters as an artifact ID, so force the file name down into ascii. + # For more info, see: + # https://source.chromium.org/chromium/infra/infra/+/main:go/src/go.chromium.org/luci/resultdb/proto/v1/artifact.proto;drc=3bff13b8037ca76ec19f9810033d914af7ec67cb;l=46 + artifact_id = artifact_id.encode('ascii', 'replace').decode() + artifact_id = artifact_id.replace('\\', '?') + artifacts[artifact_id] = { + 'filePath': artifact_path, + } + return artifacts + + +class TastTest(RemoteTest): + + def __init__(self, args, unknown_args): + super().__init__(args, unknown_args) + + self._suite_name = args.suite_name + self._tast_vars = args.tast_vars + self._tast_retries = args.tast_retries + self._tests = args.tests + # The CQ passes in '--gtest_filter' when specifying tests to skip. Store it + # here and parse it later to integrate it into Tast executions. + self._gtest_style_filter = args.gtest_filter + self._attr_expr = args.attr_expr + self._should_strip = args.strip_chrome + self._deploy_lacros = args.deploy_lacros + self._deploy_chrome = args.deploy_chrome + + if not self._logs_dir: + # The host-side Tast bin returns 0 when tests fail, so we need to capture + # and parse its json results to reliably determine if tests fail. + raise TestFormatError( + 'When using the host-side Tast bin, "--logs-dir" must be passed in ' + 'order to parse its results.') + + # If the first test filter is negative, it should be safe to assume all of + # them are, so just test the first filter. + if self._gtest_style_filter and self._gtest_style_filter[0] == '-': + raise TestFormatError('Negative test filters not supported for Tast.') + + @property + def suite_name(self): + return self._suite_name + + def build_test_command(self): + unsupported_args = [ + '--test-launcher-retry-limit', + '--test-launcher-batch-limit', + '--gtest_repeat', + ] + for unsupported_arg in unsupported_args: + if any(arg.startswith(unsupported_arg) for arg in self._additional_args): + logging.info( + '%s not supported for Tast tests. The arg will be ignored.', + unsupported_arg) + self._additional_args = [ + arg for arg in self._additional_args + if not arg.startswith(unsupported_arg) + ] + + # Lacros deployment mounts itself by default. + if self._deploy_lacros: + self._test_cmd.extend([ + '--deploy-lacros', '--lacros-launcher-script', + LACROS_LAUNCHER_SCRIPT_PATH + ]) + if self._deploy_chrome: + self._test_cmd.extend(['--deploy', '--mount']) + else: + self._test_cmd.extend(['--deploy', '--mount']) + self._test_cmd += [ + '--build-dir', + os.path.relpath(self._path_to_outdir, CHROMIUM_SRC_PATH) + ] + self._additional_args + + # Capture tast's results in the logs dir as well. + if self._logs_dir: + self._test_cmd += [ + '--results-dir', + self._logs_dir, + ] + self._test_cmd += [ + '--tast-total-shards=%d' % self._test_launcher_total_shards, + '--tast-shard-index=%d' % self._test_launcher_shard_index, + ] + # If we're using a test filter, replace the contents of the Tast + # conditional with a long list of "name:test" expressions, one for each + # test in the filter. + if self._gtest_style_filter: + if self._attr_expr or self._tests: + logging.warning( + 'Presence of --gtest_filter will cause the specified Tast expr' + ' or test list to be ignored.') + names = [] + for test in self._gtest_style_filter.split(':'): + names.append('"name:%s"' % test) + self._attr_expr = '(' + ' || '.join(names) + ')' + + if self._attr_expr: + # Don't use pipes.quote() here. Something funky happens with the arg + # as it gets passed down from cros_run_test to tast. (Tast picks up the + # escaping single quotes and complains that the attribute expression + # "must be within parentheses".) + self._test_cmd.append('--tast=%s' % self._attr_expr) + else: + self._test_cmd.append('--tast') + self._test_cmd.extend(self._tests) + + for v in self._tast_vars or []: + self._test_cmd.extend(['--tast-var', v]) + + if self._tast_retries: + self._test_cmd.append('--tast-retries=%d' % self._tast_retries) + + # Mounting ash-chrome gives it enough disk space to not need stripping, + # but only for one not instrumented with code coverage. + # Lacros uses --nostrip by default, so there is no need to specify. + if not self._deploy_lacros and not self._should_strip: + self._test_cmd.append('--nostrip') + + def post_run(self, return_code): + tast_results_path = os.path.join(self._logs_dir, 'streamed_results.jsonl') + if not os.path.exists(tast_results_path): + logging.error( + 'Tast results not found at %s. Falling back to generic result ' + 'reporting.', tast_results_path) + return super().post_run(return_code) + + # See the link below for the format of the results: + # https://godoc.org/chromium.googlesource.com/chromiumos/platform/tast.git/src/chromiumos/cmd/tast/run#TestResult + with jsonlines.open(tast_results_path) as reader: + tast_results = collections.deque(reader) + + suite_results = base_test_result.TestRunResults() + for test in tast_results: + errors = test['errors'] + start, end = test['start'], test['end'] + # Use dateutil to parse the timestamps since datetime can't handle + # nanosecond precision. + duration = dateutil.parser.parse(end) - dateutil.parser.parse(start) + # If the duration is negative, Tast has likely reported an incorrect + # duration. See https://issuetracker.google.com/issues/187973541. Round + # up to 0 in that case to avoid confusing RDB. + duration_ms = max(duration.total_seconds() * 1000, 0) + if bool(test['skipReason']): + result = base_test_result.ResultType.SKIP + elif errors: + result = base_test_result.ResultType.FAIL + else: + result = base_test_result.ResultType.PASS + primary_error_message = None + error_log = '' + if errors: + # See the link below for the format of these errors: + # https://source.chromium.org/chromiumos/chromiumos/codesearch/+/main:src/platform/tast/src/chromiumos/tast/cmd/tast/internal/run/resultsjson/resultsjson.go + primary_error_message = errors[0]['reason'] + for err in errors: + error_log += err['stack'] + '\n' + debug_link = ("If you're unsure why this test failed, consult the steps " + 'outlined here.' % TAST_DEBUG_DOC) + base_result = base_test_result.BaseTestResult( + test['name'], result, duration=duration_ms, log=error_log) + suite_results.AddResult(base_result) + self._maybe_handle_perf_results(test['name']) + + if self._rdb_client: + # Walk the contents of the test's "outDir" and atttach any file found + # inside as an RDB 'artifact'. (This could include system logs, screen + # shots, etc.) + artifacts = self.get_artifacts(test['outDir']) + self._rdb_client.Post( + test['name'], + result, + duration_ms, + error_log, + None, + artifacts=artifacts, + failure_reason=primary_error_message, + html_artifact=debug_link) + + if self._rdb_client and self._logs_dir: + # Attach artifacts from the device that don't apply to a single test. + artifacts = self.get_artifacts( + os.path.join(self._logs_dir, 'system_logs')) + artifacts.update( + self.get_artifacts(os.path.join(self._logs_dir, 'crashes'))) + self._rdb_client.ReportInvocationLevelArtifacts(artifacts) + + if self._test_launcher_summary_output: + with open(self._test_launcher_summary_output, 'w') as f: + json.dump(json_results.GenerateResultsDict([suite_results]), f) + + if not suite_results.DidRunPass(): + return 1 + if return_code: + logging.warning( + 'No failed tests found, but exit code of %d was returned from ' + 'cros_run_test.', return_code) + return return_code + return 0 + + def _maybe_handle_perf_results(self, test_name): + """Prepares any perf results from |test_name| for process_perf_results. + + - process_perf_results looks for top level directories containing a + perf_results.json file and a test_results.json file. The directory names + are used as the benchmark names. + - If a perf_results.json or results-chart.json file exists in the + |test_name| results directory, a top level directory is created and the + perf results file is copied to perf_results.json. + - A trivial test_results.json file is also created to indicate that the test + succeeded (this function would not be called otherwise). + - When process_perf_results is run, it will find the expected files in the + named directory and upload the benchmark results. + """ + + perf_results = os.path.join(self._logs_dir, 'tests', test_name, + 'perf_results.json') + # TODO(stevenjb): Remove check for crosbolt results-chart.json file. + if not os.path.exists(perf_results): + perf_results = os.path.join(self._logs_dir, 'tests', test_name, + 'results-chart.json') + if os.path.exists(perf_results): + benchmark_dir = os.path.join(self._logs_dir, test_name) + if not os.path.isdir(benchmark_dir): + os.makedirs(benchmark_dir) + shutil.copyfile(perf_results, + os.path.join(benchmark_dir, 'perf_results.json')) + # process_perf_results.py expects a test_results.json file. + test_results = {'valid': True, 'failures': []} + with open(os.path.join(benchmark_dir, 'test_results.json'), 'w') as out: + json.dump(test_results, out) + + +class GTestTest(RemoteTest): + + # The following list corresponds to paths that should not be copied over to + # the device during tests. In other words, these files are only ever used on + # the host. + _FILE_IGNORELIST = [ + re.compile(r'.*build/android.*'), + re.compile(r'.*build/chromeos.*'), + re.compile(r'.*build/cros_cache.*'), + # The following matches anything under //testing/ that isn't under + # //testing/buildbot/filters/. + re.compile(r'.*testing/(?!buildbot/filters).*'), + re.compile(r'.*third_party/chromite.*'), + ] + + def __init__(self, args, unknown_args): + super().__init__(args, unknown_args) + + self._test_exe = args.test_exe + self._runtime_deps_path = args.runtime_deps_path + self._vpython_dir = args.vpython_dir + + self._on_device_script = None + self._env_vars = args.env_var + self._stop_ui = args.stop_ui + self._trace_dir = args.trace_dir + + @property + def suite_name(self): + return self._test_exe + + def build_test_command(self): + # To keep things easy for us, ensure both types of output locations are + # the same. + if self._test_launcher_summary_output and self._logs_dir: + json_out_dir = os.path.dirname(self._test_launcher_summary_output) or '.' + if os.path.abspath(json_out_dir) != os.path.abspath(self._logs_dir): + raise TestFormatError( + '--test-launcher-summary-output and --logs-dir must point to ' + 'the same directory.') + + if self._test_launcher_summary_output: + result_dir, result_file = os.path.split( + self._test_launcher_summary_output) + # If args.test_launcher_summary_output is a file in cwd, result_dir will + # be an empty string, so replace it with '.' when this is the case so + # cros_run_test can correctly handle it. + if not result_dir: + result_dir = '.' + device_result_file = '/tmp/%s' % result_file + self._test_cmd += [ + '--results-src', + device_result_file, + '--results-dest-dir', + result_dir, + ] + + if self._trace_dir and self._logs_dir: + trace_path = os.path.dirname(self._trace_dir) or '.' + if os.path.abspath(trace_path) != os.path.abspath(self._logs_dir): + raise TestFormatError( + '--trace-dir and --logs-dir must point to the same directory.') + + if self._trace_dir: + trace_path, trace_dirname = os.path.split(self._trace_dir) + device_trace_dir = '/tmp/%s' % trace_dirname + self._test_cmd += [ + '--results-src', + device_trace_dir, + '--results-dest-dir', + trace_path, + ] + + # Build the shell script that will be used on the device to invoke the test. + # Stored here as a list of lines. + device_test_script_contents = self.BASIC_SHELL_SCRIPT[:] + for var_name, var_val in self._env_vars: + device_test_script_contents += ['export %s=%s' % (var_name, var_val)] + + if self._vpython_dir: + vpython_path = os.path.join(self._path_to_outdir, self._vpython_dir, + 'vpython3') + cpython_path = os.path.join(self._path_to_outdir, self._vpython_dir, + 'bin', 'python3') + if not os.path.exists(vpython_path) or not os.path.exists(cpython_path): + raise TestFormatError( + '--vpython-dir must point to a dir with both ' + 'infra/3pp/tools/cpython3 and infra/tools/luci/vpython installed.') + vpython_spec_path = os.path.relpath( + os.path.join(CHROMIUM_SRC_PATH, '.vpython3'), self._path_to_outdir) + # Initialize the vpython cache. This can take 10-20s, and some tests + # can't afford to wait that long on the first invocation. + device_test_script_contents.extend([ + 'export PATH=$PWD/%s:$PWD/%s/bin/:$PATH' % + (self._vpython_dir, self._vpython_dir), + 'vpython3 -vpython-spec %s -vpython-tool install' % + (vpython_spec_path), + ]) + + test_invocation = ('LD_LIBRARY_PATH=./ ./%s --test-launcher-shard-index=%d ' + '--test-launcher-total-shards=%d' % + (self._test_exe, self._test_launcher_shard_index, + self._test_launcher_total_shards)) + if self._test_launcher_summary_output: + test_invocation += ' --test-launcher-summary-output=%s' % ( + device_result_file) + + if self._trace_dir: + device_test_script_contents.extend([ + 'rm -rf %s' % device_trace_dir, + 'sudo -E -u chronos -- /bin/bash -c "mkdir -p %s"' % device_trace_dir, + ]) + test_invocation += ' --trace-dir=%s' % device_trace_dir + + if self._additional_args: + test_invocation += ' %s' % ' '.join(self._additional_args) + + if self._stop_ui: + device_test_script_contents += [ + 'stop ui', + ] + # The UI service on the device owns the chronos user session, so shutting + # it down as chronos kills the entire execution of the test. So we'll have + # to run as root up until the test invocation. + test_invocation = ( + 'sudo -E -u chronos -- /bin/bash -c "%s"' % test_invocation) + # And we'll need to chown everything since cros_run_test's "--as-chronos" + # option normally does that for us. + device_test_script_contents.append('chown -R chronos: ../..') + else: + self._test_cmd += [ + # Some tests fail as root, so run as the less privileged user + # 'chronos'. + '--as-chronos', + ] + + device_test_script_contents.append(test_invocation) + + self._on_device_script = self.write_test_script_to_disk( + device_test_script_contents) + + runtime_files = [os.path.relpath(self._on_device_script)] + runtime_files += self._read_runtime_files() + if self._vpython_dir: + # --vpython-dir is relative to the out dir, but --files expects paths + # relative to src dir, so fix the path up a bit. + runtime_files.append( + os.path.relpath( + os.path.abspath( + os.path.join(self._path_to_outdir, self._vpython_dir)), + CHROMIUM_SRC_PATH)) + + for f in runtime_files: + self._test_cmd.extend(['--files', f]) + + self._test_cmd += [ + '--', + './' + os.path.relpath(self._on_device_script, self._path_to_outdir) + ] + + def _read_runtime_files(self): + if not self._runtime_deps_path: + return [] + + abs_runtime_deps_path = os.path.abspath( + os.path.join(self._path_to_outdir, self._runtime_deps_path)) + with open(abs_runtime_deps_path) as runtime_deps_file: + files = [l.strip() for l in runtime_deps_file if l] + rel_file_paths = [] + for f in files: + rel_file_path = os.path.relpath( + os.path.abspath(os.path.join(self._path_to_outdir, f))) + if not any(regex.match(rel_file_path) for regex in self._FILE_IGNORELIST): + rel_file_paths.append(rel_file_path) + return rel_file_paths + + def post_run(self, _): + if self._on_device_script: + os.remove(self._on_device_script) + + if self._test_launcher_summary_output and self._rdb_client: + logging.error('Native ResultDB integration is not supported for GTests. ' + 'Upload results via result_adapter instead. ' + 'See crbug.com/1330441.') + + +def device_test(args, unknown_args): + # cros_run_test has trouble with relative paths that go up directories, + # so cd to src/, which should be the root of all data deps. + os.chdir(CHROMIUM_SRC_PATH) + + # TODO: Remove the above when depot_tool's pylint is updated to include the + # fix to https://github.com/PyCQA/pylint/issues/710. + if args.test_type == 'tast': + test = TastTest(args, unknown_args) + else: + test = GTestTest(args, unknown_args) + + test.build_test_command() + logging.info('Running the following command on the device:') + logging.info(' '.join(test.test_cmd)) + + return test.run_test() + + +def host_cmd(args, cmd_args): + if not cmd_args: + raise TestFormatError('Must specify command to run on the host.') + if args.deploy_chrome and not args.path_to_outdir: + raise TestFormatError( + '--path-to-outdir must be specified if --deploy-chrome is passed.') + + cros_run_test_cmd = [ + CROS_RUN_TEST_PATH, + '--board', + args.board, + '--cache-dir', + os.path.join(CHROMIUM_SRC_PATH, args.cros_cache), + ] + if args.use_vm: + cros_run_test_cmd += [ + '--start', + # Don't persist any filesystem changes after the VM shutsdown. + '--copy-on-write', + ] + else: + cros_run_test_cmd += [ + '--device', args.device if args.device else LAB_DUT_HOSTNAME + ] + if args.verbose: + cros_run_test_cmd.append('--debug') + if args.flash: + cros_run_test_cmd.append('--flash') + if args.public_image: + cros_run_test_cmd += ['--public-image'] + + if args.logs_dir: + for log in SYSTEM_LOG_LOCATIONS: + cros_run_test_cmd += ['--results-src', log] + cros_run_test_cmd += [ + '--results-dest-dir', + os.path.join(args.logs_dir, 'system_logs') + ] + + test_env = setup_env() + if args.deploy_chrome or args.deploy_lacros: + if args.deploy_lacros: + cros_run_test_cmd.extend([ + '--deploy-lacros', '--lacros-launcher-script', + LACROS_LAUNCHER_SCRIPT_PATH + ]) + if args.deploy_chrome: + # Mounting ash-chrome gives it enough disk space to not need stripping + # most of the time. + cros_run_test_cmd.extend(['--deploy', '--mount']) + else: + # Mounting ash-chrome gives it enough disk space to not need stripping + # most of the time. + cros_run_test_cmd.extend(['--deploy', '--mount']) + + if not args.strip_chrome: + cros_run_test_cmd.append('--nostrip') + + cros_run_test_cmd += [ + '--build-dir', + os.path.join(CHROMIUM_SRC_PATH, args.path_to_outdir) + ] + + cros_run_test_cmd += [ + '--host-cmd', + '--', + ] + cmd_args + + logging.info('Running the following command:') + logging.info(' '.join(cros_run_test_cmd)) + + return subprocess.call( + cros_run_test_cmd, stdout=sys.stdout, stderr=sys.stderr, env=test_env) + + +def setup_env(): + """Returns a copy of the current env with some needed vars added.""" + env = os.environ.copy() + # Some chromite scripts expect chromite/bin to be on PATH. + env['PATH'] = env['PATH'] + ':' + os.path.join(CHROMITE_PATH, 'bin') + # deploy_chrome needs a set of GN args used to build chrome to determine if + # certain libraries need to be pushed to the device. It looks for the args via + # an env var. To trigger the default deploying behavior, give it a dummy set + # of args. + # TODO(crbug.com/823996): Make the GN-dependent deps controllable via cmd + # line args. + if not env.get('GN_ARGS'): + env['GN_ARGS'] = 'enable_nacl = true' + if not env.get('USE'): + env['USE'] = 'highdpi' + return env + + +def add_common_args(*parsers): + for parser in parsers: + parser.add_argument('--verbose', '-v', action='store_true') + parser.add_argument( + '--board', type=str, required=True, help='Type of CrOS device.') + parser.add_argument( + '--deploy-chrome', + action='store_true', + help='Will deploy a locally built ash-chrome binary to the device ' + 'before running the host-cmd.') + parser.add_argument( + '--deploy-lacros', action='store_true', help='Deploy a lacros-chrome.') + parser.add_argument( + '--cros-cache', + type=str, + default=DEFAULT_CROS_CACHE, + help='Path to cros cache.') + parser.add_argument( + '--path-to-outdir', + type=str, + required=True, + help='Path to output directory, all of whose contents will be ' + 'deployed to the device.') + parser.add_argument( + '--runtime-deps-path', + type=str, + help='Runtime data dependency file from GN.') + parser.add_argument( + '--vpython-dir', + type=str, + help='Location on host of a directory containing a vpython binary to ' + 'deploy to the device before the test starts. The location of ' + 'this dir will be added onto PATH in the device. WARNING: The ' + 'arch of the device might not match the arch of the host, so ' + 'avoid using "${platform}" when downloading vpython via CIPD.') + parser.add_argument( + '--logs-dir', + type=str, + dest='logs_dir', + help='Will copy everything under /var/log/ from the device after the ' + 'test into the specified dir.') + # Shard args are parsed here since we might also specify them via env vars. + parser.add_argument( + '--test-launcher-shard-index', + type=int, + default=os.environ.get('GTEST_SHARD_INDEX', 0), + help='Index of the external shard to run.') + parser.add_argument( + '--test-launcher-total-shards', + type=int, + default=os.environ.get('GTEST_TOTAL_SHARDS', 1), + help='Total number of external shards.') + parser.add_argument( + '--flash', + action='store_true', + help='Will flash the device to the current SDK version before running ' + 'the test.') + parser.add_argument( + '--public-image', + action='store_true', + help='Will flash a public "full" image to the device.') + parser.add_argument( + '--magic-vm-cache', + help='Path to the magic CrOS VM cache dir. See the comment above ' + '"magic_cros_vm_cache" in mixins.pyl for more info.') + + vm_or_device_group = parser.add_mutually_exclusive_group() + vm_or_device_group.add_argument( + '--use-vm', + action='store_true', + help='Will run the test in the VM instead of a device.') + vm_or_device_group.add_argument( + '--device', + type=str, + help='Hostname (or IP) of device to run the test on. This arg is not ' + 'required if --use-vm is set.') + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(dest='test_type') + # Host-side test args. + host_cmd_parser = subparsers.add_parser( + 'host-cmd', + help='Runs a host-side test. Pass the host-side command to run after ' + '"--". If --use-vm is passed, hostname and port for the device ' + 'will be 127.0.0.1:9222.') + host_cmd_parser.set_defaults(func=host_cmd) + host_cmd_parser.add_argument( + '--strip-chrome', + action='store_true', + help='Strips symbols from ash-chrome or lacros-chrome before deploying ' + ' to the device.') + + gtest_parser = subparsers.add_parser( + 'gtest', help='Runs a device-side gtest.') + gtest_parser.set_defaults(func=device_test) + gtest_parser.add_argument( + '--test-exe', + type=str, + required=True, + help='Path to test executable to run inside the device.') + + # GTest args. Some are passed down to the test binary in the device. Others + # are parsed here since they might need tweaking or special handling. + gtest_parser.add_argument( + '--test-launcher-summary-output', + type=str, + help='When set, will pass the same option down to the test and retrieve ' + 'its result file at the specified location.') + gtest_parser.add_argument( + '--stop-ui', + action='store_true', + help='Will stop the UI service in the device before running the test.') + gtest_parser.add_argument( + '--trace-dir', + type=str, + help='When set, will pass down to the test to generate the trace and ' + 'retrieve the trace files to the specified location.') + gtest_parser.add_argument( + '--env-var', + nargs=2, + action='append', + default=[], + help='Env var to set on the device for the duration of the test. ' + 'Expected format is "--env-var SOME_VAR_NAME some_var_value". Specify ' + 'multiple times for more than one var.') + + # Tast test args. + # pylint: disable=line-too-long + tast_test_parser = subparsers.add_parser( + 'tast', + help='Runs a device-side set of Tast tests. For more details, see: ' + 'https://chromium.googlesource.com/chromiumos/platform/tast/+/main/docs/running_tests.md' + ) + tast_test_parser.set_defaults(func=device_test) + tast_test_parser.add_argument( + '--suite-name', + type=str, + required=True, + help='Name to apply to the set of Tast tests to run. This has no effect ' + 'on what is executed, but is used mainly for test results reporting ' + 'and tracking (eg: flakiness dashboard).') + tast_test_parser.add_argument( + '--test-launcher-summary-output', + type=str, + help='Generates a simple GTest-style JSON result file for the test run.') + tast_test_parser.add_argument( + '--attr-expr', + type=str, + help='A boolean expression whose matching tests will run ' + '(eg: ("dep:chrome")).') + tast_test_parser.add_argument( + '--strip-chrome', + action='store_true', + help='Strips symbols from ash-chrome before deploying to the device.') + tast_test_parser.add_argument( + '--tast-var', + action='append', + dest='tast_vars', + help='Runtime variables for Tast tests, and the format are expected to ' + 'be "key=value" pairs.') + tast_test_parser.add_argument( + '--tast-retries', + type=int, + dest='tast_retries', + help='Number of retries for failed Tast tests on the same DUT.') + tast_test_parser.add_argument( + '--test', + '-t', + action='append', + dest='tests', + help='A Tast test to run in the device (eg: "login.Chrome").') + tast_test_parser.add_argument( + '--gtest_filter', + type=str, + help="Similar to GTest's arg of the same name, this will filter out the " + "specified tests from the Tast run. However, due to the nature of Tast's " + 'cmd-line API, this will overwrite the value(s) of "--test" above.') + + add_common_args(gtest_parser, tast_test_parser, host_cmd_parser) + args, unknown_args = parser.parse_known_args() + # Re-add N-1 -v/--verbose flags to the args we'll pass to whatever we are + # running. The assumption is that only one verbosity incrase would be meant + # for this script since it's a boolean value instead of increasing verbosity + # with more instances. + verbose_flags = [a for a in sys.argv if a in ('-v', '--verbose')] + if verbose_flags: + unknown_args += verbose_flags[1:] + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN) + + if not args.use_vm and not args.device: + logging.warning( + 'The test runner is now assuming running in the lab environment, if ' + 'this is unintentional, please re-invoke the test runner with the ' + '"--use-vm" arg if using a VM, otherwise use the "--device=" arg ' + 'to specify a DUT.') + + # If we're not running on a VM, but haven't specified a hostname, assume + # we're on a lab bot and are trying to run a test on a lab DUT. See if the + # magic lab DUT hostname resolves to anything. (It will in the lab and will + # not on dev machines.) + try: + socket.getaddrinfo(LAB_DUT_HOSTNAME, None) + except socket.gaierror: + logging.error('The default lab DUT hostname of %s is unreachable.', + LAB_DUT_HOSTNAME) + return 1 + + if args.flash and args.public_image: + # The flashing tools depend on being unauthenticated with GS when flashing + # public images, so make sure the env var GS uses to locate its creds is + # unset in that case. + os.environ.pop('BOTO_CONFIG', None) + + if args.magic_vm_cache: + full_vm_cache_path = os.path.join(CHROMIUM_SRC_PATH, args.magic_vm_cache) + if os.path.exists(full_vm_cache_path): + with open(os.path.join(full_vm_cache_path, 'swarming.txt'), 'w') as f: + f.write('non-empty file to make swarming persist this cache') + + return args.func(args, unknown_args) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/chromeos/test_runner_test.py b/chromeos/test_runner_test.py new file mode 100755 index 000000000000..c61c7a443341 --- /dev/null +++ b/chromeos/test_runner_test.py @@ -0,0 +1,381 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import os +import shutil +import sys +import tempfile +import unittest +import six + +# The following non-std imports are fetched via vpython. See the list at +# //.vpython +import mock # pylint: disable=import-error +from parameterized import parameterized # pylint: disable=import-error + +import test_runner + +_TAST_TEST_RESULTS_JSON = { + "name": "login.Chrome", + "errors": None, + "start": "2020-01-01T15:41:30.799228462-08:00", + "end": "2020-01-01T15:41:53.318914698-08:00", + "skipReason": "" +} + + +class TestRunnerTest(unittest.TestCase): + + def setUp(self): + self._tmp_dir = tempfile.mkdtemp() + self.mock_rdb = mock.patch.object( + test_runner.result_sink, 'TryInitClient', return_value=None) + self.mock_rdb.start() + + def tearDown(self): + shutil.rmtree(self._tmp_dir, ignore_errors=True) + self.mock_rdb.stop() + + def safeAssertItemsEqual(self, list1, list2): + """A Py3 safe version of assertItemsEqual. + + See https://bugs.python.org/issue17866. + """ + if six.PY3: + self.assertSetEqual(set(list1), set(list2)) + else: + self.assertCountEqual(list1, list2) + + +class TastTests(TestRunnerTest): + + def get_common_tast_args(self, use_vm): + return [ + 'script_name', + 'tast', + '--suite-name=chrome_all_tast_tests', + '--board=eve', + '--flash', + '--path-to-outdir=out_eve/Release', + '--logs-dir=%s' % self._tmp_dir, + '--use-vm' if use_vm else '--device=localhost:2222', + ] + + def get_common_tast_expectations(self, use_vm, is_lacros=False): + expectation = [ + test_runner.CROS_RUN_TEST_PATH, + '--board', + 'eve', + '--cache-dir', + test_runner.DEFAULT_CROS_CACHE, + '--results-dest-dir', + '%s/system_logs' % self._tmp_dir, + '--flash', + '--build-dir', + 'out_eve/Release', + '--results-dir', + self._tmp_dir, + '--tast-total-shards=1', + '--tast-shard-index=0', + ] + expectation.extend(['--start', '--copy-on-write'] + if use_vm else ['--device', 'localhost:2222']) + for p in test_runner.SYSTEM_LOG_LOCATIONS: + expectation.extend(['--results-src', p]) + + if not is_lacros: + expectation += [ + '--mount', + '--deploy', + '--nostrip', + ] + return expectation + + def test_tast_gtest_filter(self): + """Tests running tast tests with a gtest-style filter.""" + with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f: + json.dump(_TAST_TEST_RESULTS_JSON, f) + + args = self.get_common_tast_args(False) + [ + '--attr-expr=( "group:mainline" && "dep:chrome" && !informational)', + '--gtest_filter=login.Chrome:ui.WindowControl', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + + test_runner.main() + # The gtest filter should cause the Tast expr to be replaced with a list + # of the tests in the filter. + expected_cmd = self.get_common_tast_expectations(False) + [ + '--tast=("name:login.Chrome" || "name:ui.WindowControl")' + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + @parameterized.expand([ + [True], + [False], + ]) + def test_tast_attr_expr(self, use_vm): + """Tests running a tast tests specified by an attribute expression.""" + with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f: + json.dump(_TAST_TEST_RESULTS_JSON, f) + + args = self.get_common_tast_args(use_vm) + [ + '--attr-expr=( "group:mainline" && "dep:chrome" && !informational)', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + + test_runner.main() + expected_cmd = self.get_common_tast_expectations(use_vm) + [ + '--tast=( "group:mainline" && "dep:chrome" && !informational)', + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + @parameterized.expand([ + [True], + [False], + ]) + def test_tast_lacros(self, use_vm): + """Tests running a tast tests for Lacros.""" + with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f: + json.dump(_TAST_TEST_RESULTS_JSON, f) + + args = self.get_common_tast_args(use_vm) + [ + '-t=lacros.Basic', + '--deploy-lacros', + ] + + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + + test_runner.main() + expected_cmd = self.get_common_tast_expectations( + use_vm, is_lacros=True) + [ + '--tast', + 'lacros.Basic', + '--deploy-lacros', + '--lacros-launcher-script', + test_runner.LACROS_LAUNCHER_SCRIPT_PATH, + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + @parameterized.expand([ + [True], + [False], + ]) + def test_tast_with_vars(self, use_vm): + """Tests running a tast tests with runtime variables.""" + with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f: + json.dump(_TAST_TEST_RESULTS_JSON, f) + + args = self.get_common_tast_args(use_vm) + [ + '-t=login.Chrome', + '--tast-var=key=value', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + test_runner.main() + expected_cmd = self.get_common_tast_expectations(use_vm) + [ + '--tast', 'login.Chrome', '--tast-var', 'key=value' + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + @parameterized.expand([ + [True], + [False], + ]) + def test_tast_retries(self, use_vm): + """Tests running a tast tests with retries.""" + with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f: + json.dump(_TAST_TEST_RESULTS_JSON, f) + + args = self.get_common_tast_args(use_vm) + [ + '-t=login.Chrome', + '--tast-retries=1', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + test_runner.main() + expected_cmd = self.get_common_tast_expectations(use_vm) + [ + '--tast', 'login.Chrome', '--tast-retries=1' + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + @parameterized.expand([ + [True], + [False], + ]) + def test_tast(self, use_vm): + """Tests running a tast tests.""" + with open(os.path.join(self._tmp_dir, 'streamed_results.jsonl'), 'w') as f: + json.dump(_TAST_TEST_RESULTS_JSON, f) + + args = self.get_common_tast_args(use_vm) + [ + '-t=login.Chrome', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + + test_runner.main() + expected_cmd = self.get_common_tast_expectations(use_vm) + [ + '--tast', 'login.Chrome' + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + +class GTestTest(TestRunnerTest): + + @parameterized.expand([ + [True], + [False], + ]) + def test_gtest(self, use_vm): + """Tests running a gtest.""" + fd_mock = mock.mock_open() + + args = [ + 'script_name', + 'gtest', + '--test-exe=out_eve/Release/base_unittests', + '--board=eve', + '--path-to-outdir=out_eve/Release', + '--use-vm' if use_vm else '--device=localhost:2222', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen,\ + mock.patch.object(os, 'fdopen', fd_mock),\ + mock.patch.object(os, 'remove') as mock_remove,\ + mock.patch.object(tempfile, 'mkstemp', + return_value=(3, 'out_eve/Release/device_script.sh')),\ + mock.patch.object(os, 'fchmod'): + mock_popen.return_value.returncode = 0 + + test_runner.main() + self.assertEqual(1, mock_popen.call_count) + expected_cmd = [ + test_runner.CROS_RUN_TEST_PATH, '--board', 'eve', '--cache-dir', + test_runner.DEFAULT_CROS_CACHE, '--as-chronos', '--remote-cmd', + '--cwd', 'out_eve/Release', '--files', + 'out_eve/Release/device_script.sh' + ] + expected_cmd.extend(['--start', '--copy-on-write'] + if use_vm else ['--device', 'localhost:2222']) + expected_cmd.extend(['--', './device_script.sh']) + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + fd_mock().write.assert_called_once_with( + '#!/bin/sh\nexport HOME=/usr/local/tmp\n' + 'export TMPDIR=/usr/local/tmp\n' + 'LD_LIBRARY_PATH=./ ./out_eve/Release/base_unittests ' + '--test-launcher-shard-index=0 --test-launcher-total-shards=1\n') + mock_remove.assert_called_once_with('out_eve/Release/device_script.sh') + + def test_gtest_with_vpython(self): + """Tests building a gtest with --vpython-dir.""" + args = mock.MagicMock() + args.test_exe = 'base_unittests' + args.test_launcher_summary_output = None + args.trace_dir = None + args.runtime_deps_path = None + args.path_to_outdir = self._tmp_dir + args.vpython_dir = self._tmp_dir + args.logs_dir = self._tmp_dir + + # With vpython_dir initially empty, the test_runner should error out + # due to missing vpython binaries. + gtest = test_runner.GTestTest(args, None) + with self.assertRaises(test_runner.TestFormatError): + gtest.build_test_command() + + # Create the two expected tools, and the test should be ready to run. + with open(os.path.join(args.vpython_dir, 'vpython3'), 'w'): + pass # Just touch the file. + os.mkdir(os.path.join(args.vpython_dir, 'bin')) + with open(os.path.join(args.vpython_dir, 'bin', 'python3'), 'w'): + pass + gtest = test_runner.GTestTest(args, None) + gtest.build_test_command() + + +class HostCmdTests(TestRunnerTest): + + @parameterized.expand([ + [True, False, True], + [False, True, True], + [True, True, False], + [False, True, False], + ]) + def test_host_cmd(self, is_lacros, is_ash, strip_chrome): + args = [ + 'script_name', + 'host-cmd', + '--board=eve', + '--flash', + '--path-to-outdir=out/Release', + '--device=localhost:2222', + ] + if is_lacros: + args += ['--deploy-lacros'] + if is_ash: + args += ['--deploy-chrome'] + if strip_chrome: + args += ['--strip-chrome'] + args += [ + '--', + 'fake_cmd', + ] + with mock.patch.object(sys, 'argv', args),\ + mock.patch.object(test_runner.subprocess, 'Popen') as mock_popen: + mock_popen.return_value.returncode = 0 + + test_runner.main() + expected_cmd = [ + test_runner.CROS_RUN_TEST_PATH, + '--board', + 'eve', + '--cache-dir', + test_runner.DEFAULT_CROS_CACHE, + '--flash', + '--device', + 'localhost:2222', + '--build-dir', + os.path.join(test_runner.CHROMIUM_SRC_PATH, 'out/Release'), + '--host-cmd', + ] + if is_lacros: + expected_cmd += [ + '--deploy-lacros', + '--lacros-launcher-script', + test_runner.LACROS_LAUNCHER_SCRIPT_PATH, + ] + if is_ash: + expected_cmd += ['--mount', '--deploy'] + if not strip_chrome: + expected_cmd += ['--nostrip'] + + expected_cmd += [ + '--', + 'fake_cmd', + ] + + self.safeAssertItemsEqual(expected_cmd, mock_popen.call_args[0][0]) + + +if __name__ == '__main__': + unittest.main() diff --git a/ciopfs.sha1 b/ciopfs.sha1 new file mode 100644 index 000000000000..c1855a347e9e --- /dev/null +++ b/ciopfs.sha1 @@ -0,0 +1 @@ +5454b3c4f1c9992047e7ae9d6d14d5b49b1b12f3 \ No newline at end of file diff --git a/cipd/cipd.gni b/cipd/cipd.gni new file mode 100644 index 000000000000..852adeff8c4a --- /dev/null +++ b/cipd/cipd.gni @@ -0,0 +1,140 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Build targets for constructing CIPD packages. +# +# Prepares a CIPD archive and generates a manifest file. +# +# TODO(crbug.com/1042819): Add support for including directories. +# +# Parameters: +# package_definition_yaml: CIPD package definition filename. "cipd.yaml" +# if unspecified. +# package: The path where the package will be located inside the CIPD +# repository. +# description: Sets the "description" field in CIPD package definition. +# install_mode: String, should be either "symlink" or "copy". Defaults to +# "symlink". +# deps: A list of targets to build prior to copying files. +# sources: A list of files to copy into the staging root. +# source_directories: A list of directories to include in the package. Should +# only be used when listing out all the files (in a given +# directory) in |sources| is unfeasible. +# +# Example: +# cipd_package_definition("chromedriver") { +# package = "path/to/cipd/package" +# description = "Prebuilt test binary." +# install_mode = "copy" +# deps = [ "//path/to:test_binary_target" ] +# sources = [ "//path/to:test_binary_file" ] +# } +# +template("cipd_package_definition") { + forward_variables_from(invoker, + [ + "deps", + "data", + "source_directories", + "data_deps", + "sources", + "testonly", + ]) + + assert(defined(sources) || defined(source_directories), + "At least one sources input must be specified.") + + _install_mode = "symlink" + if (defined(invoker.install_mode)) { + _install_mode = invoker.install_mode + } + assert(_install_mode == "copy" || _install_mode == "symlink", + "\"install_mode\" arg should be either \"copy\" or \"symlink\".") + + _cipd_definition_yaml = "cipd.yaml" + if (defined(invoker.package_definition_yaml)) { + _cipd_definition_yaml = invoker.package_definition_yaml + } + + _package_staging_dir = "${target_gen_dir}/${target_name}" + + _yaml_contents = [ + "package: ${invoker.package}", + "description: ${invoker.description}", + "root: " + rebase_path(_package_staging_dir), + "install_mode: ${_install_mode}", + "data:", + ] + + if (defined(sources)) { + foreach(source, sources) { + _yaml_contents += [ " - file: " + get_path_info(source, "file") ] + } + copy(target_name) { + outputs = [ "${_package_staging_dir}/{{source_file_part}}" ] + } + } + + if (defined(source_directories)) { + foreach(directory, source_directories) { + _yaml_contents += [ " - dir: " + directory ] + } + } + + write_file("${_package_staging_dir}/${_cipd_definition_yaml}", _yaml_contents) +} + +# Create a cipd file based on inputs and FILES.cfg config. Most of the arguments +# are similar with |cipd_package_definition| above. +# +# Additional parameters: +# +# package_definition_yaml: The output yaml file. Default is +# ${target_name}_cipd.yaml. +# files_file: The file defines what files and directories to include. +# Example: //tools/build/chromeos/FILES.cfg. +# buildtype: str, required. It can be "dev" or "official". +# Only when the file has the same buildtype, it will be included. +# arch: str, required. It can be "32bit", "64bit", "arm". +# +# Example: +# cipd_package_definition_by_file("chrome_cipd") { +# package = "path/to/cipd/package" +# description = "Prebuilt test binary." +# install_mode = "copy" +# files_file = "//chrome/tools/build/chromeos/FILES.json" +# buildtype = "dev" +# arch = "64bit" +# deps = [ "//path/to:test_binary_target" ] +# } +template("cipd_package_definition_by_file") { + forward_variables_from(invoker, + [ + "deps", + "data", + "data_deps", + "sources", + "testonly", + ]) + _output_yaml_filename = "${target_name}_cipd.yaml" + if (defined(invoker.package_definition_yaml)) { + _output_yaml_filename = invoker.package_definition_yaml + } + action(target_name) { + script = "//build/cipd/cipd_from_file.py" + inputs = [ invoker.files_file ] + args = [ + "--description=" + invoker.description, + "--buildtype=" + invoker.buildtype, + "--arch=" + invoker.arch, + "--files_file=" + rebase_path(invoker.files_file, root_build_dir), + "--package=" + invoker.package, + "--install_mode=" + invoker.install_mode, + "--output_yaml_file=" + + rebase_path("${root_out_dir}/" + _output_yaml_filename, + root_build_dir), + ] + outputs = [ "${root_out_dir}/" + _output_yaml_filename ] + } +} diff --git a/cipd/cipd_from_file.py b/cipd/cipd_from_file.py new file mode 100755 index 000000000000..979b2b538895 --- /dev/null +++ b/cipd/cipd_from_file.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Script to generate yaml file based on FILES.cfg.""" + +import argparse +import os + + +def _ParseFilesCfg(files_file): + """Return the dictionary of archive file info read from the given file.""" + if not os.path.exists(files_file): + raise IOError('Files list does not exist (%s).' % files_file) + exec_globals = {'__builtins__': None} + + exec(open(files_file).read(), exec_globals) + return exec_globals['FILES'] + + +def _Process(args): + yaml_content = ('package: ' + args.package + '\ndescription: ' + + args.description + '\ninstall_mode: ' + args.install_mode + + '\ndata:\n') + fileobj = _ParseFilesCfg(args.files_file) + for item in fileobj: + if 'buildtype' in item: + if args.buildtype not in item['buildtype']: + continue + if 'arch' in item: + if args.arch not in item['arch']: + continue + if 'type' in item and item['type'] == 'folder': + yaml_content += ' - dir: ' + item['filename'] + '\n' + else: + yaml_content += ' - file: ' + item['filename'] + '\n' + + with open(args.output_yaml_file, 'w') as f: + f.write(yaml_content) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--output_yaml_file', help='File to create.') + parser.add_argument( + '--package', + help='The path where the package will be located inside the CIPD\ + repository.') + parser.add_argument( + '--description', + help='Sets the "description" field in CIPD package definition.') + parser.add_argument('--install_mode', + help='String, should be either "symlink" or "copy".') + parser.add_argument('--files_file', + help='FILES.cfg describes what files to include.') + parser.add_argument('--buildtype', help='buildtype for FILES.cfg.') + parser.add_argument('--arch', help='arch for FILES.cfg') + + args = parser.parse_args() + + _Process(args) + + +if __name__ == '__main__': + main() diff --git a/clobber.py b/clobber.py new file mode 100755 index 000000000000..e886737dcc0c --- /dev/null +++ b/clobber.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""This script provides methods for clobbering build directories.""" + +import argparse +import os +import shutil +import subprocess +import sys + + +def extract_gn_build_commands(build_ninja_file): + """Extracts from a build.ninja the commands to run GN. + + The commands to run GN are the gn rule and build.ninja build step at the + top of the build.ninja file. We want to keep these when deleting GN builds + since we want to preserve the command-line flags to GN. + + On error, returns the empty string.""" + result = "" + with open(build_ninja_file, 'r') as f: + # Reads until the first empty line after the "build build.ninja:" target. + # We assume everything before it necessary as well (eg the + # "ninja_required_version" line). + found_build_dot_ninja_target = False + for line in f.readlines(): + result += line + if line.startswith('build build.ninja:'): + found_build_dot_ninja_target = True + if found_build_dot_ninja_target and line[0] == '\n': + return result + return '' # We got to EOF and didn't find what we were looking for. + + +def _rmtree(d): + # For unknown reasons (anti-virus?) rmtree of Chromium build directories + # often fails on Windows. + if sys.platform.startswith('win'): + subprocess.check_call(['rmdir', '/s', '/q', d], shell=True) + else: + shutil.rmtree(d) + + +def _clean_dir(build_dir): + # Remove files/sub directories individually instead of recreating the build + # dir because it fails when the build dir is symlinked or mounted. + for e in os.scandir(build_dir): + if e.is_dir(): + _rmtree(e.path) + else: + os.remove(e.path) + + +def delete_build_dir(build_dir): + # GN writes a build.ninja.d file. Note that not all GN builds have args.gn. + build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d') + if not os.path.exists(build_ninja_d_file): + _clean_dir(build_dir) + return + + # GN builds aren't automatically regenerated when you sync. To avoid + # messing with the GN workflow, erase everything but the args file, and + # write a dummy build.ninja file that will automatically rerun GN the next + # time Ninja is run. + build_ninja_file = os.path.join(build_dir, 'build.ninja') + build_commands = extract_gn_build_commands(build_ninja_file) + + try: + gn_args_file = os.path.join(build_dir, 'args.gn') + with open(gn_args_file, 'r') as f: + args_contents = f.read() + except IOError: + args_contents = '' + + exception_during_rm = None + try: + # _clean_dir() may fail, such as when chrome.exe is running, + # and we still want to restore args.gn/build.ninja/build.ninja.d, so catch + # the exception and rethrow it later. + # We manually rm files inside the build dir rather than using "gn clean/gen" + # since we may not have run all necessary DEPS hooks yet at this point. + _clean_dir(build_dir) + except Exception as e: + exception_during_rm = e + + # Put back the args file (if any). + if args_contents != '': + with open(gn_args_file, 'w') as f: + f.write(args_contents) + + # Write the build.ninja file sufficiently to regenerate itself. + with open(os.path.join(build_dir, 'build.ninja'), 'w') as f: + if build_commands != '': + f.write(build_commands) + else: + # Couldn't parse the build.ninja file, write a default thing. + f.write('''ninja_required_version = 1.7.2 + +rule gn + command = gn -q gen //out/%s/ + description = Regenerating ninja files + +build build.ninja: gn + generator = 1 + depfile = build.ninja.d +''' % (os.path.split(build_dir)[1])) + + # Write a .d file for the build which references a nonexistant file. This + # will make Ninja always mark the build as dirty. + with open(build_ninja_d_file, 'w') as f: + f.write('build.ninja: nonexistant_file.gn\n') + + if exception_during_rm: + # Rethrow the exception we caught earlier. + raise exception_during_rm + + +def clobber(out_dir): + """Clobber contents of build directory. + + Don't delete the directory itself: some checkouts have the build directory + mounted.""" + for f in os.listdir(out_dir): + path = os.path.join(out_dir, f) + if os.path.isfile(path): + os.unlink(path) + elif os.path.isdir(path): + delete_build_dir(path) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('out_dir', help='The output directory to clobber') + args = parser.parse_args() + clobber(args.out_dir) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/clobber_unittest.py b/clobber_unittest.py new file mode 100755 index 000000000000..d38c447b1443 --- /dev/null +++ b/clobber_unittest.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import pathlib +import shutil +import sys +import tempfile +import textwrap +import unittest +from unittest import mock + +import clobber + + +class TestExtractBuildCommand(unittest.TestCase): + def setUp(self): + self.build_ninja_file, self.build_ninja_path = tempfile.mkstemp(text=True) + + def tearDown(self): + os.close(self.build_ninja_file) + os.remove(self.build_ninja_path) + + def test_normal_extraction(self): + build_ninja_file_contents = textwrap.dedent(""" + ninja_required_version = 1.7.2 + + rule gn + command = ../../buildtools/gn --root=../.. -q --regeneration gen . + pool = console + description = Regenerating ninja files + + build build.ninja.stamp: gn + generator = 1 + depfile = build.ninja.d + + build build.ninja: phony build.ninja.stamp + generator = 1 + + pool build_toolchain_action_pool + depth = 72 + + pool build_toolchain_link_pool + depth = 23 + + subninja toolchain.ninja + subninja clang_newlib_x64/toolchain.ninja + subninja glibc_x64/toolchain.ninja + subninja irt_x64/toolchain.ninja + subninja nacl_bootstrap_x64/toolchain.ninja + subninja newlib_pnacl/toolchain.ninja + + build blink_python_tests: phony obj/blink_python_tests.stamp + build blink_tests: phony obj/blink_tests.stamp + + default all + """) # Based off of a standard linux build dir. + with open(self.build_ninja_path, 'w') as f: + f.write(build_ninja_file_contents) + + expected_build_ninja_file_contents = textwrap.dedent(""" + ninja_required_version = 1.7.2 + + rule gn + command = ../../buildtools/gn --root=../.. -q --regeneration gen . + pool = console + description = Regenerating ninja files + + build build.ninja.stamp: gn + generator = 1 + depfile = build.ninja.d + + build build.ninja: phony build.ninja.stamp + generator = 1 + + """) + + self.assertEqual(clobber.extract_gn_build_commands(self.build_ninja_path), + expected_build_ninja_file_contents) + + def test_unexpected_format(self): + # No "build build.ninja:" line should make it return an empty string. + build_ninja_file_contents = textwrap.dedent(""" + ninja_required_version = 1.7.2 + + rule gn + command = ../../buildtools/gn --root=../.. -q --regeneration gen . + pool = console + description = Regenerating ninja files + + subninja toolchain.ninja + + build blink_python_tests: phony obj/blink_python_tests.stamp + build blink_tests: phony obj/blink_tests.stamp + + """) + with open(self.build_ninja_path, 'w') as f: + f.write(build_ninja_file_contents) + + self.assertEqual(clobber.extract_gn_build_commands(self.build_ninja_path), + '') + + +class TestDelete(unittest.TestCase): + def setUp(self): + self.build_dir = tempfile.mkdtemp() + + pathlib.Path(os.path.join(self.build_dir, 'build.ninja')).touch() + pathlib.Path(os.path.join(self.build_dir, 'build.ninja.d')).touch() + + def tearDown(self): + shutil.rmtree(self.build_dir) + + def test_delete_build_dir_full(self): + # Create a dummy file in the build dir and ensure it gets removed. + dummy_file = os.path.join(self.build_dir, 'dummy') + pathlib.Path(dummy_file).touch() + + clobber.delete_build_dir(self.build_dir) + + self.assertFalse(os.path.exists(dummy_file)) + + def test_delete_build_dir_fail(self): + # Make delete_dir() throw to ensure it's handled gracefully. + + with mock.patch('clobber._clean_dir', side_effect=OSError): + with self.assertRaises(OSError): + clobber.delete_build_dir(self.build_dir) + + @unittest.skipIf(sys.platform == 'win32', 'Symlinks are not allowed on Windows by default') + def test_delete_build_dir_link(self): + with tempfile.TemporaryDirectory() as tmpdir: + # create a symlink. + build_dir = os.path.join(tmpdir, 'link') + os.symlink(self.build_dir, build_dir) + + # create a dummy file. + dummy_file = os.path.join(build_dir, 'dummy') + pathlib.Path(dummy_file).touch() + clobber.delete_build_dir(build_dir) + + self.assertFalse(os.path.exists(dummy_file)) + + +if __name__ == '__main__': + unittest.main() diff --git a/compiled_action.gni b/compiled_action.gni new file mode 100644 index 000000000000..6a632bdfa6bf --- /dev/null +++ b/compiled_action.gni @@ -0,0 +1,167 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file introduces two related templates that act like action and +# action_foreach but instead of running a Python script, it will compile a +# given tool in the host toolchain and run that (either once or over the list +# of inputs, depending on the variant). +# +# Parameters +# +# tool (required) +# [label] Label of the tool to run. This should be an executable, and +# this label should not include a toolchain (anything in parens). The +# host compile of this tool will be used. +# +# outputs (required) +# [list of files] Like the outputs of action (if using "compiled_action", +# this would be just the list of outputs), or action_foreach (if using +# "compiled_action_foreach", this would contain source expansions mapping +# input to output files). +# +# args (required) +# [list of strings] Same meaning as action/action_foreach. +# +# inputs (optional) +# Files the binary takes as input. The step will be re-run whenever any +# of these change. If inputs is empty, the step will run only when the +# binary itself changes. +# +# depfile +# deps +# visibility (all optional) +# Same meaning as action/action_foreach. +# +# +# Example of usage: +# +# compiled_action("run_my_tool") { +# tool = "//tools/something:mytool" +# outputs = [ +# "$target_gen_dir/mysource.cc", +# "$target_gen_dir/mysource.h", +# ] +# +# # The tool takes this input. +# inputs = [ "my_input_file.idl" ] +# +# # In this case, the tool takes as arguments the input file and the output +# # build dir (both relative to the "cd" that the script will be run in) +# # and will produce the output files listed above. +# args = [ +# rebase_path("my_input_file.idl", root_build_dir), +# "--output-dir", rebase_path(target_gen_dir, root_build_dir), +# ] +# } +# +# You would typically declare your tool like this: +# if (host_toolchain == current_toolchain) { +# executable("mytool") { +# ... +# } +# } +# The if statement around the executable is optional. That says "I only care +# about this target in the host toolchain". Usually this is what you want, and +# saves unnecessarily compiling your tool for the target platform. But if you +# need a target build of your tool as well, just leave off the if statement. + +if (host_os == "win") { + _host_executable_suffix = ".exe" +} else { + _host_executable_suffix = "" +} + +template("compiled_action") { + assert(defined(invoker.tool), "tool must be defined for $target_name") + assert(defined(invoker.outputs), "outputs must be defined for $target_name") + assert(defined(invoker.args), "args must be defined for $target_name") + + assert(!defined(invoker.sources), + "compiled_action doesn't take a sources arg. Use inputs instead.") + + action(target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "depfile", + "inputs", + "outputs", + "testonly", + "visibility", + ]) + if (!defined(deps)) { + deps = [] + } + if (!defined(inputs)) { + inputs = [] + } + + script = "//build/gn_run_binary.py" + + # Constuct the host toolchain version of the tool. + host_tool = invoker.tool + "($host_toolchain)" + + # Get the path to the executable. Currently, this assumes that the tool + # does not specify output_name so that the target name is the name to use. + # If that's not the case, we'll need another argument to the script to + # specify this, since we can't know what the output name is (it might be in + # another file not processed yet). + host_executable = + get_label_info(host_tool, "root_out_dir") + "/" + + get_label_info(host_tool, "name") + _host_executable_suffix + + deps += [ host_tool ] + + # The script takes as arguments the binary to run, and then the arguments + # to pass it. + args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args + } +} + +template("compiled_action_foreach") { + assert(defined(invoker.sources), "sources must be defined for $target_name") + assert(defined(invoker.tool), "tool must be defined for $target_name") + assert(defined(invoker.outputs), "outputs must be defined for $target_name") + assert(defined(invoker.args), "args must be defined for $target_name") + + action_foreach(target_name) { + forward_variables_from(invoker, + [ + "deps", + "depfile", + "inputs", + "outputs", + "sources", + "testonly", + "visibility", + ]) + if (!defined(deps)) { + deps = [] + } + if (!defined(inputs)) { + inputs = [] + } + + script = "//build/gn_run_binary.py" + + # Constuct the host toolchain version of the tool. + host_tool = invoker.tool + "($host_toolchain)" + + # Get the path to the executable. Currently, this assumes that the tool + # does not specify output_name so that the target name is the name to use. + # If that's not the case, we'll need another argument to the script to + # specify this, since we can't know what the output name is (it might be in + # another file not processed yet). + host_executable = + get_label_info(host_tool, "root_out_dir") + "/" + + get_label_info(host_tool, "name") + _host_executable_suffix + + deps += [ host_tool ] + + # The script takes as arguments the binary to run, and then the arguments + # to pass it. + args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args + } +} diff --git a/compute_build_timestamp.py b/compute_build_timestamp.py new file mode 100755 index 000000000000..befe8445608d --- /dev/null +++ b/compute_build_timestamp.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Returns a timestamp that approximates the build date. + +build_type impacts the timestamp generated, both relative to the date of the +last recent commit: +- default: the build date is set to the most recent first Sunday of a month at + 5:00am. The reason is that it is a time where invalidating the build cache + shouldn't have major repercussions (due to lower load). +- official: the build date is set to the time of the most recent commit. +Either way, it is guaranteed to be in the past and always in UTC. +""" + +# The requirements for the timestamp: +# (1) for the purposes of continuous integration, longer duration +# between cache invalidation is better, but >=1mo is preferable. +# (2) for security purposes, timebombs would ideally be as close to +# the actual time of the build as possible. It must be in the past. +# (3) HSTS certificate pinning is valid for 70 days. To make CI builds enforce +# HTST pinning, <=1mo is preferable. +# +# On Windows, the timestamp is also written in the PE/COFF file header of +# executables of dlls. That timestamp and the executable's file size are +# the only two pieces of information that identify a given executable on +# the symbol server, so rarely changing timestamps can cause conflicts there +# as well. We only upload symbols for official builds to the symbol server. + + +import argparse +import calendar +import datetime +import doctest +import os +import sys + + +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + + +def GetFirstSundayOfMonth(year, month): + """Returns the first sunday of the given month of the given year. + + >>> GetFirstSundayOfMonth(2016, 2) + 7 + >>> GetFirstSundayOfMonth(2016, 3) + 6 + >>> GetFirstSundayOfMonth(2000, 1) + 2 + """ + weeks = calendar.Calendar().monthdays2calendar(year, month) + # Return the first day in the first week that is a Sunday. + return [date_day[0] for date_day in weeks[0] if date_day[1] == 6][0] + + +def GetUnofficialBuildDate(build_date): + """Gets the approximate build date given the specific build type. + + >>> GetUnofficialBuildDate(datetime.datetime(2016, 2, 6, 1, 2, 3)) + datetime.datetime(2016, 1, 3, 5, 0) + >>> GetUnofficialBuildDate(datetime.datetime(2016, 2, 7, 5)) + datetime.datetime(2016, 2, 7, 5, 0) + >>> GetUnofficialBuildDate(datetime.datetime(2016, 2, 8, 5)) + datetime.datetime(2016, 2, 7, 5, 0) + """ + + if build_date.hour < 5: + # The time is locked at 5:00 am in UTC to cause the build cache + # invalidation to not happen exactly at midnight. Use the same calculation + # as the day before. + # See //base/build_time.cc. + build_date = build_date - datetime.timedelta(days=1) + build_date = datetime.datetime(build_date.year, build_date.month, + build_date.day, 5, 0, 0) + + day = build_date.day + month = build_date.month + year = build_date.year + first_sunday = GetFirstSundayOfMonth(year, month) + # If our build is after the first Sunday, we've already refreshed our build + # cache on a quiet day, so just use that day. + # Otherwise, take the first Sunday of the previous month. + if day >= first_sunday: + day = first_sunday + else: + month -= 1 + if month == 0: + month = 12 + year -= 1 + day = GetFirstSundayOfMonth(year, month) + return datetime.datetime( + year, month, day, build_date.hour, build_date.minute, build_date.second) + + +def main(): + if doctest.testmod()[0]: + return 1 + argument_parser = argparse.ArgumentParser() + argument_parser.add_argument( + 'build_type', help='The type of build', choices=('official', 'default')) + args = argument_parser.parse_args() + + # The mtime of the revision in build/util/LASTCHANGE is stored in a file + # next to it. Read it, to get a deterministic time close to "now". + # That date is then modified as described at the top of the file so that + # it changes less frequently than with every commit. + # This intentionally always uses build/util/LASTCHANGE's commit time even if + # use_dummy_lastchange is set. + lastchange_file = os.path.join(THIS_DIR, 'util', 'LASTCHANGE.committime') + last_commit_timestamp = int(open(lastchange_file).read()) + build_date = datetime.datetime.utcfromtimestamp(last_commit_timestamp) + + # For official builds we want full fidelity time stamps because official + # builds are typically added to symbol servers and Windows symbol servers + # use the link timestamp as the prime differentiator, but for unofficial + # builds we do lots of quantization to avoid churn. + offset = 0 + if args.build_type == 'official': + if os.name == 'nt': + version_path = os.path.join(THIS_DIR, os.pardir, 'chrome', 'VERSION') + with open(version_path) as f: + patch_line = f.readlines()[3].strip() + # Use the patch number as an offset to the build date so that multiple + # versions with different patch numbers built from the same source code + # will get different build_date values. This is critical for Windows + # symbol servers, to avoid collisions. + assert patch_line.startswith('PATCH=') + offset = int(patch_line[6:]) + else: + build_date = GetUnofficialBuildDate(build_date) + print(offset + int(calendar.timegm(build_date.utctimetuple()))) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/config/BUILD.gn b/config/BUILD.gn new file mode 100644 index 000000000000..749b0855ff2a --- /dev/null +++ b/config/BUILD.gn @@ -0,0 +1,421 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/c++/c++.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/dcheck_always_on.gni") +import("//build/config/features.gni") + +# Subprojects need to override arguments in {mac,ios}_sdk_overrides.gni in their +# .gn config, but those arguments are only used on macOS. Including +# mac_sdk_overrides.gni insures that this doesn't trigger an unused argument +# warning. +import("//build/config/ios/ios_sdk_overrides.gni") +import("//build/config/mac/mac_sdk_overrides.gni") + +import("//build/config/pch.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/ui.gni") +import("//build/toolchain/goma.gni") +if (is_android) { + import("//build/config/android/abi.gni") +} + +# ============================================== +# PLEASE DO NOT ADD MORE THINGS TO THIS LIST +# ============================================== +# +# Legacy feature defines applied to all targets. +# +# These are applied to every single compile in the build and most of them are +# only relevant to a few files. This bloats command lines and causes +# unnecessary recompiles when flags are flipped. +# +# To pass defines to source code from the build, use the buildflag system which +# will write headers containing the defines you need. This isolates the define +# and means its definition can participate in the build graph, only recompiling +# things when it actually changes. +# +# See //build/buildflag_header.gni for instructions on generating headers. +# +# This will also allow you to scope your build flag to a BUILD.gn file (or a +# .gni file if you need it from more than one place) rather than making global +# flags. See //build/config/BUILDCONFIG.gn for advice on where to define +# build flags. +config("feature_flags") { + defines = [] + if (dcheck_always_on) { + defines += [ "DCHECK_ALWAYS_ON=1" ] + } + if (use_udev) { + # TODO(brettw) should probably be "=1". + defines += [ "USE_UDEV" ] + } + if (use_aura) { + defines += [ "USE_AURA=1" ] + } + if (use_glib) { + defines += [ "USE_GLIB=1" ] + } + if (use_ozone && !is_android) { + # Chrome code should check BUILDFLAG(IS_OZONE) instead of + # defined(USE_OZONE). + # + # Note that some Chrome OS builds unconditionally set |use_ozone| to true, + # but they also build some targets with the Android toolchain. This ensures + # that Android targets still build with USE_OZONE=0 in such cases. + # + # TODO(crbug.com/837032): Maybe this can be cleaned up if we can avoid + # setting use_ozone globally. + defines += [ "USE_OZONE=1" ] + } + if (is_asan || is_hwasan || is_lsan || is_tsan || is_msan) { + defines += [ "MEMORY_TOOL_REPLACES_ALLOCATOR" ] + } + if (is_asan) { + defines += [ "ADDRESS_SANITIZER" ] + } + if (is_lsan) { + defines += [ "LEAK_SANITIZER" ] + } + if (is_tsan) { + defines += [ + "THREAD_SANITIZER", + "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1", + ] + } + if (is_msan) { + defines += [ "MEMORY_SANITIZER" ] + } + if (is_ubsan || is_ubsan_vptr || is_ubsan_security) { + defines += [ "UNDEFINED_SANITIZER" ] + } + if (is_official_build) { + defines += [ "OFFICIAL_BUILD" ] + } + + # ============================================== + # PLEASE DO NOT ADD MORE THINGS TO THIS LIST + # ============================================== + # + # See the comment at the top. +} + +# Debug/release ---------------------------------------------------------------- + +config("debug") { + defines = [ + "_DEBUG", + "DYNAMIC_ANNOTATIONS_ENABLED=1", + ] + + if (is_nacl) { + defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ] + } + + if (is_win) { + if (!enable_iterator_debugging && !use_custom_libcxx) { + # Iterator debugging is enabled by default by the compiler on debug + # builds, and we have to tell it to turn it off. + defines += [ "_HAS_ITERATOR_DEBUGGING=0" ] + } + } else if ((is_linux || is_chromeos) && current_cpu == "x64" && + enable_iterator_debugging) { + # Enable libstdc++ debugging facilities to help catch problems early, see + # http://crbug.com/65151 . + # TODO(phajdan.jr): Should we enable this for all of POSIX? + defines += [ "_GLIBCXX_DEBUG=1" ] + } +} + +config("release") { + defines = [ "NDEBUG" ] + + # Sanitizers. + if (is_tsan) { + defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=1" ] + } else { + defines += [ "NVALGRIND" ] + if (!is_nacl) { + # NaCl always enables dynamic annotations. Currently this value is set to + # 1 for all .nexes. + defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ] + } + } + + if (is_ios) { + # Disable NSAssert and GTMDevAssert (from Google Toolbox for Mac). This + # follows XCode's default behavior for Release builds. + defines += [ "NS_BLOCK_ASSERTIONS=1" ] + } +} + +# Default libraries ------------------------------------------------------------ + +# This config defines the default libraries applied to all targets. +config("default_libs") { + if (is_win) { + # TODO(brettw) this list of defaults should probably be smaller, and + # instead the targets that use the less common ones (e.g. wininet or + # winspool) should include those explicitly. + libs = [ + "advapi32.lib", + "comdlg32.lib", + "dbghelp.lib", + "dnsapi.lib", + "gdi32.lib", + "msimg32.lib", + "odbc32.lib", + "odbccp32.lib", + "oleaut32.lib", + "shell32.lib", + "shlwapi.lib", + "user32.lib", + "usp10.lib", + "uuid.lib", + "version.lib", + "wininet.lib", + "winmm.lib", + "winspool.lib", + "ws2_32.lib", + + # Please don't add more stuff here. We should actually be making this + # list smaller, since all common things should be covered. If you need + # some extra libraries, please just add a libs = [ "foo.lib" ] to your + # target that needs it. + ] + if (current_os == "winuwp") { + # These libraries are needed for Windows UWP (i.e. store apps). + libs += [ + "dloadhelper.lib", + "WindowsApp.lib", + ] + } else { + # These libraries are not compatible with Windows UWP (i.e. store apps.) + libs += [ + "delayimp.lib", + "kernel32.lib", + "ole32.lib", + ] + } + } else if (is_android) { + libs = [ + "dl", + "m", + ] + } else if (is_mac) { + # Targets should choose to explicitly link frameworks they require. Since + # linking can have run-time side effects, nothing should be listed here. + libs = [] + } else if (is_ios) { + # The libraries listed here will be specified for both the target and the + # host. Only the common ones should be listed here. + frameworks = [ + "CoreFoundation.framework", + "CoreGraphics.framework", + "CoreText.framework", + "Foundation.framework", + ] + } else if (is_linux || is_chromeos) { + libs = [ + "dl", + "pthread", + "rt", + ] + } +} + +group("common_deps") { + visibility = [ + ":executable_deps", + ":loadable_module_deps", + ":rust_bin_deps", + ":rust_cdylib_deps", + ":rust_dylib_deps", + ":shared_library_deps", + ] + + # WARNING: This group is a dependency of **every executable and shared + # library**. Please be careful adding new dependencies here. + public_deps = [] + + if (using_sanitizer) { + public_deps += [ "//build/config/sanitizers:deps" ] + } + + if (use_custom_libcxx) { + public_deps += [ "//buildtools/third_party/libc++" ] + } + + if (use_afl) { + public_deps += [ "//third_party/afl" ] + } + + if (is_android && use_order_profiling) { + public_deps += [ "//base/android/orderfile:orderfile_instrumentation" ] + } + + if (is_fuchsia) { + public_deps += + [ "//third_party/fuchsia-sdk/sdk/build/config:runtime_library_group" ] + if (is_asan) { + public_deps += [ "//build/config/fuchsia:asan_runtime_library" ] + } + } +} + +# Only the executable template in BUILDCONFIG.gn should reference this. +group("executable_deps") { + public_deps = [ ":common_deps" ] + if (export_libcxxabi_from_executables) { + public_deps += [ "//buildtools/third_party/libc++abi" ] + } + public_configs = [ "//build/config/sanitizers:link_executable" ] +} + +# Only the rust_bin template in BUILDCONFIG.gn should reference this. +group("rust_bin_deps") { + public_deps = [ ":common_deps" ] + if (export_libcxxabi_from_executables) { + public_deps += [ "//buildtools/third_party/libc++abi" ] + } + public_configs = [ "//build/config/sanitizers:link_executable" ] +} + +# Only the loadable_module template in BUILDCONFIG.gn should reference this. +group("loadable_module_deps") { + public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] +} + +# Only the shared_library template in BUILDCONFIG.gn should reference this. +group("shared_library_deps") { + public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] +} + +# Only the rust_dylib template in BUILDCONFIG.gn should reference this. +group("rust_dylib_deps") { + public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] +} + +# Only the rust_cdylib template in BUILDCONFIG.gn should reference this. +group("rust_cdylib_deps") { + public_deps = [ ":common_deps" ] + + public_configs = [ "//build/config/sanitizers:link_shared_library" ] +} + +# Executable configs ----------------------------------------------------------- + +# Windows linker setup for EXEs and DLLs. +if (is_win) { + _windows_linker_configs = [ + "//build/config/win:sdk_link", + "//build/config/win:common_linker_setup", + ] +} + +# This config defines the configs applied to all executables. +config("executable_config") { + configs = [] + + if (is_win) { + configs += _windows_linker_configs + configs += [ "//build/config/win:exe_flags" ] + } else if (is_mac) { + configs += [ "//build/config/mac:mac_dynamic_flags" ] + } else if (is_ios) { + configs += [ + "//build/config/ios:ios_dynamic_flags", + "//build/config/ios:ios_executable_flags", + ] + } else if (is_linux || is_chromeos || is_android || current_os == "aix") { + configs += [ "//build/config/gcc:executable_config" ] + if (is_castos || is_cast_android) { + configs += [ "//build/config/chromecast:executable_config" ] + } + } + + # If we're using the prebuilt instrumented libraries with the sanitizers, we + # need to add ldflags to every binary to make sure they are picked up. + if (prebuilt_instrumented_libraries_available) { + configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ] + } + if (use_locally_built_instrumented_libraries) { + configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ] + } +} + +# Shared library configs ------------------------------------------------------- + +# This config defines the configs applied to all shared libraries. +config("shared_library_config") { + configs = [] + + if (is_win) { + configs += _windows_linker_configs + } else if (is_mac) { + configs += [ "//build/config/mac:mac_dynamic_flags" ] + } else if (is_ios) { + configs += [ + "//build/config/ios:ios_dynamic_flags", + "//build/config/ios:ios_shared_library_flags", + ] + } else if (is_castos || is_cast_android) { + configs += [ "//build/config/chromecast:shared_library_config" ] + } else if (is_linux || is_chromeos || current_os == "aix") { + configs += [ "//build/config/gcc:shared_library_config" ] + } + + # If we're using the prebuilt instrumented libraries with the sanitizers, we + # need to add ldflags to every binary to make sure they are picked up. + if (prebuilt_instrumented_libraries_available) { + configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ] + } + if (use_locally_built_instrumented_libraries) { + configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ] + } +} + +# Add this config to your target to enable precompiled headers. +# +# Precompiled headers are done on a per-target basis. If you have just a couple +# of files, the time it takes to precompile (~2 seconds) can actually be longer +# than the time saved. On a Z620, a 100 file target compiles about 2 seconds +# faster with precompiled headers, with greater savings for larger targets. +# +# Recommend precompiled headers for targets with more than 50 .cc files. +config("precompiled_headers") { + if (enable_precompiled_headers) { + if (is_win) { + # This is a string rather than a file GN knows about. It has to match + # exactly what's in the /FI flag below, and what might appear in the + # source code in quotes for an #include directive. + precompiled_header = "build/precompile.h" + + # This is a file that GN will compile with the above header. It will be + # implicitly added to the sources (potentially multiple times, with one + # variant for each language used in the target). + precompiled_source = "//build/precompile.cc" + + # Force include the header. + cflags = [ "/FI$precompiled_header" ] + } else if (is_mac || is_linux) { + precompiled_source = "//build/precompile.h" + } + } +} + +# Add this config to link steps in order to compress debug sections. This is +# especially useful on 32-bit architectures in order to keep file sizes under +# 4gb. +config("compress_debug_sections") { + ldflags = [ "-gz" ] +} diff --git a/config/BUILDCONFIG.gn b/config/BUILDCONFIG.gn new file mode 100644 index 000000000000..33651426b6d5 --- /dev/null +++ b/config/BUILDCONFIG.gn @@ -0,0 +1,753 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# ============================================================================= +# WHAT IS THIS FILE? +# ============================================================================= +# +# This is the main GN build configuration. This file is loaded after the +# build args (args.gn) for the build directory and after the toplevel ".gn" +# file (which points to this file as the build configuration). +# +# This file will be executed and the resulting context will be used to execute +# every other file in the build. So variables declared here (that don't start +# with an underscore) will be implicitly global. + +# ============================================================================= +# PLATFORM SELECTION +# ============================================================================= +# +# There are two main things to set: "os" and "cpu". The "toolchain" is the name +# of the GN thing that encodes combinations of these things. +# +# Users typically only set the variables "target_os" and "target_cpu" in "gn +# args", the rest are set up by our build and internal to GN. +# +# There are three different types of each of these things: The "host" +# represents the computer doing the compile and never changes. The "target" +# represents the main thing we're trying to build. The "current" represents +# which configuration is currently being defined, which can be either the +# host, the target, or something completely different (like nacl). GN will +# run the same build file multiple times for the different required +# configuration in the same build. +# +# This gives the following variables: +# - host_os, host_cpu, host_toolchain +# - target_os, target_cpu, default_toolchain +# - current_os, current_cpu, current_toolchain. +# +# Note the default_toolchain isn't symmetrical (you would expect +# target_toolchain). This is because the "default" toolchain is a GN built-in +# concept, and "target" is something our build sets up that's symmetrical with +# its GYP counterpart. Potentially the built-in default_toolchain variable +# could be renamed in the future. +# +# When writing build files, to do something only for the host: +# if (current_toolchain == host_toolchain) { ... + +if (target_os == "") { + target_os = host_os +} + +if (target_cpu == "") { + if (target_os == "android") { + # If we're building for Android, we should assume that we want to + # build for ARM by default, not the host_cpu (which is likely x64). + # This allows us to not have to specify both target_os and target_cpu + # on the command line. + target_cpu = "arm" + } else { + target_cpu = host_cpu + } +} + +if (current_cpu == "") { + current_cpu = target_cpu +} +if (current_os == "") { + current_os = target_os +} + +# ============================================================================= +# BUILD FLAGS +# ============================================================================= +# +# This block lists input arguments to the build, along with their default +# values. +# +# If a value is specified on the command line, it will overwrite the defaults +# given in a declare_args block, otherwise the default will be used. +# +# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in +# the build to declare build flags. If you need a flag for a single component, +# you can just declare it in the corresponding BUILD.gn file. +# +# - If your feature is a single target, say //components/foo, you can put +# a declare_args() block in //components/foo/BUILD.gn and use it there. +# Nobody else in the build needs to see the flag. +# +# - Defines based on build variables should be implemented via the generated +# build flag header system. See //build/buildflag_header.gni. You can put +# the buildflag_header target in the same file as the build flag itself. You +# should almost never set "defines" directly. +# +# - If your flag toggles a target on and off or toggles between different +# versions of similar things, write a "group" target that forwards to the +# right target (or no target) depending on the value of the build flag. This +# group can be in the same BUILD.gn file as the build flag, and targets can +# depend unconditionally on the group rather than duplicating flag checks +# across many targets. +# +# - If a semi-random set of build files REALLY needs to know about a define and +# the above pattern for isolating the build logic in a forwarding group +# doesn't work, you can put the argument in a .gni file. This should be put +# in the lowest level of the build that knows about this feature (which should +# almost always be outside of the //build directory!). +# +# Other flag advice: +# +# - Use boolean values when possible. If you need a default value that expands +# to some complex thing in the default case (like the location of the +# compiler which would be computed by a script), use a default value of -1 or +# the empty string. Outside of the declare_args block, conditionally expand +# the default value as necessary. +# +# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for +# your feature) rather than just "foo". +# +# - Write good comments directly above the declaration with no blank line. +# These comments will appear as documentation in "gn args --list". +# +# - Don't call exec_script inside declare_args. This will execute the script +# even if the value is overridden, which is wasteful. See first bullet. + +declare_args() { + # Set to enable the official build level of optimization. This has nothing + # to do with branding, but enables an additional level of optimization above + # release (!is_debug). This might be better expressed as a tri-state + # (debug, release, official) but for historical reasons there are two + # separate flags. + # + # IMPORTANT NOTE: (!is_debug) is *not* sufficient to get satisfying + # performance. In particular, DCHECK()s are still enabled for release builds, + # which can halve overall performance, and do increase memory usage. Always + # set "is_official_build" to true for any build intended to ship to end-users. + is_official_build = false + + # Set to true when compiling with the Clang compiler. + is_clang = current_os != "linux" || + (current_cpu != "s390x" && current_cpu != "s390" && + current_cpu != "ppc64" && current_cpu != "ppc" && + current_cpu != "mips" && current_cpu != "mips64" && + current_cpu != "riscv64") + + # Allows the path to a custom target toolchain to be injected as a single + # argument, and set as the default toolchain. + custom_toolchain = "" + + # This should not normally be set as a build argument. It's here so that + # every toolchain can pass through the "global" value via toolchain_args(). + host_toolchain = "" + + # Do not set this directly. + # It should be set only by //build/toolchains/android:robolectric_x64. + # True when compiling native code for use with robolectric_binary(). + is_robolectric = false + + # DON'T ADD MORE FLAGS HERE. Read the comment above. +} + +declare_args() { + # Debug build. Enabling official builds automatically sets is_debug to false. + is_debug = !is_official_build +} + +declare_args() { + # Component build. Setting to true compiles targets declared as "components" + # as shared libraries loaded dynamically. This speeds up development time. + # When false, components will be linked statically. + # + # For more information see + # https://chromium.googlesource.com/chromium/src/+/main/docs/component_build.md + is_component_build = is_debug && current_os != "ios" +} + +assert(!(is_debug && is_official_build), "Can't do official debug builds") +assert(!(current_os == "ios" && is_component_build), + "Can't use component build on iOS") + +# ============================================================================== +# TOOLCHAIN SETUP +# ============================================================================== +# +# Here we set the default toolchain, as well as the variable host_toolchain +# which will identify the toolchain corresponding to the local system when +# doing cross-compiles. When not cross-compiling, this will be the same as the +# default toolchain. +# +# We do this before anything else to make sure we complain about any +# unsupported os/cpu combinations as early as possible. + +if (host_toolchain == "") { + # This should only happen in the top-level context. + # In a specific toolchain context, the toolchain_args() + # block should have propagated a value down. + # TODO(dpranke): Add some sort of assert here that verifies that + # no toolchain omitted host_toolchain from its toolchain_args(). + + if (host_os == "linux") { + if (target_os != "linux") { + host_toolchain = "//build/toolchain/linux:clang_$host_cpu" + } else if (is_clang) { + host_toolchain = "//build/toolchain/linux:clang_$host_cpu" + } else { + host_toolchain = "//build/toolchain/linux:$host_cpu" + } + } else if (host_os == "mac") { + host_toolchain = "//build/toolchain/mac:clang_$host_cpu" + } else if (host_os == "win") { + # On Windows always use the target CPU for host builds for x86/x64. On the + # configurations we support this will always work and it saves build steps. + # Windows ARM64 targets require an x64 host for cross build. + if (target_cpu == "x86" || target_cpu == "x64") { + if (is_clang) { + host_toolchain = "//build/toolchain/win:win_clang_$target_cpu" + } else { + host_toolchain = "//build/toolchain/win:$target_cpu" + } + } else if (is_clang) { + host_toolchain = "//build/toolchain/win:win_clang_$host_cpu" + } else { + host_toolchain = "//build/toolchain/win:$host_cpu" + } + } else if (host_os == "aix") { + host_toolchain = "//build/toolchain/aix:$host_cpu" + } else if (host_os == "zos") { + host_toolchain = "//build/toolchain/zos:$host_cpu" + } else { + assert(false, "Unsupported host_os: $host_os") + } +} + +_default_toolchain = "" + +if (target_os == "android") { + assert(host_os == "linux", "Android builds are only supported on Linux.") + _default_toolchain = "//build/toolchain/android:android_clang_$target_cpu" +} else if (target_os == "chromeos" || target_os == "linux") { + # See comments in build/toolchain/cros/BUILD.gn about board compiles. + if (is_clang) { + _default_toolchain = "//build/toolchain/linux:clang_$target_cpu" + } else { + _default_toolchain = "//build/toolchain/linux:$target_cpu" + } +} else if (target_os == "fuchsia") { + _default_toolchain = "//build/toolchain/fuchsia:$target_cpu" +} else if (target_os == "ios") { + _default_toolchain = "//build/toolchain/ios:ios_clang_$target_cpu" +} else if (target_os == "mac") { + assert(host_os == "mac" || host_os == "linux", + "Mac cross-compiles are unsupported.") + _default_toolchain = "//build/toolchain/mac:clang_$target_cpu" +} else if (target_os == "win") { + # On Windows, we use the same toolchain for host and target by default. + # Beware, win cross builds have some caveats, see docs/win_cross.md + if (is_clang) { + _default_toolchain = "//build/toolchain/win:win_clang_$target_cpu" + } else { + _default_toolchain = "//build/toolchain/win:$target_cpu" + } +} else if (target_os == "winuwp") { + # Only target WinUWP on for a Windows store application and only + # x86, x64 and arm are supported target CPUs. + assert(target_cpu == "x86" || target_cpu == "x64" || target_cpu == "arm" || + target_cpu == "arm64") + _default_toolchain = "//build/toolchain/win:uwp_$target_cpu" +} else if (target_os == "aix") { + _default_toolchain = "//build/toolchain/aix:$target_cpu" +} else if (target_os == "zos") { + _default_toolchain = "//build/toolchain/zos:$target_cpu" +} else { + assert(false, "Unsupported target_os: $target_os") +} + +# If a custom toolchain has been set in the args, set it as default. Otherwise, +# set the default toolchain for the platform (if any). +if (custom_toolchain != "") { + set_default_toolchain(custom_toolchain) +} else if (_default_toolchain != "") { + set_default_toolchain(_default_toolchain) +} + +# ============================================================================= +# OS DEFINITIONS +# ============================================================================= +# +# We set these various is_FOO booleans for convenience in writing OS-based +# conditions. +# +# - is_android, is_chromeos, is_ios, and is_win should be obvious. +# - is_mac is set only for desktop Mac. It is not set on iOS. +# - is_posix is true for mac and any Unix-like system (basically everything +# except Fuchsia and Windows). +# - is_linux is true for desktop Linux, but not for ChromeOS nor Android (which +# is generally too different despite being based on the Linux kernel). +# +# Do not add more is_* variants here for random lesser-used Unix systems like +# aix or one of the BSDs. If you need to check these, just check the +# current_os value directly. + +is_android = current_os == "android" +is_chromeos = current_os == "chromeos" +is_fuchsia = current_os == "fuchsia" +is_ios = current_os == "ios" +is_linux = current_os == "linux" +is_mac = current_os == "mac" +is_nacl = current_os == "nacl" +is_win = current_os == "win" || current_os == "winuwp" + +is_apple = is_ios || is_mac +is_posix = !is_win && !is_fuchsia + +# ============================================================================= +# TARGET DEFAULTS +# ============================================================================= +# +# Set up the default configuration for every build target of the given type. +# The values configured here will be automatically set on the scope of the +# corresponding target. Target definitions can add or remove to the settings +# here as needed. +# +# WHAT GOES HERE? +# +# Other than the main compiler and linker configs, the only reason for a config +# to be in this list is if some targets need to explicitly override that config +# by removing it. This is how targets opt-out of flags. If you don't have that +# requirement and just need to add a config everywhere, reference it as a +# sub-config of an existing one, most commonly the main "compiler" one. + +# Holds all configs used for running the compiler. +default_compiler_configs = [ + "//build/config:feature_flags", + "//build/config/compiler:afdo", + "//build/config/compiler:afdo_optimize_size", + "//build/config/compiler:cet_shadow_stack", + "//build/config/compiler:chromium_code", + "//build/config/compiler:compiler", + "//build/config/compiler:compiler_arm_fpu", + "//build/config/compiler:compiler_arm_thumb", + "//build/config/compiler:default_include_dirs", + "//build/config/compiler:default_init_stack_vars", + "//build/config/compiler:default_optimization", + "//build/config/compiler:default_stack_frames", + "//build/config/compiler:default_symbols", + "//build/config/compiler:export_dynamic", + "//build/config/compiler:no_exceptions", + "//build/config/compiler:no_rtti", + "//build/config/compiler:no_unresolved_symbols", + "//build/config/compiler:runtime_library", + "//build/config/compiler:thin_archive", + "//build/config/compiler:thinlto_optimize_default", + "//build/config/compiler/pgo:default_pgo_flags", + "//build/config/coverage:default_coverage", + "//build/config/sanitizers:default_sanitizer_flags", +] + +if (is_win) { + default_compiler_configs += [ + "//build/config/win:default_cfg_compiler", + "//build/config/win:default_crt", + "//build/config/win:lean_and_mean", + "//build/config/win:nominmax", + "//build/config/win:unicode", + "//build/config/win:winver", + ] +} + +if (is_posix) { + if (current_os != "aix") { + default_compiler_configs += + [ "//build/config/gcc:symbol_visibility_hidden" ] + } +} + +if (is_fuchsia) { + default_compiler_configs += [ "//build/config/gcc:symbol_visibility_hidden" ] +} + +if (is_android) { + default_compiler_configs += + [ "//build/config/android:default_orderfile_instrumentation" ] +} + +if (is_clang && !is_nacl) { + default_compiler_configs += [ + "//build/config/clang:find_bad_constructs", + "//build/config/clang:extra_warnings", + ] +} + +# Debug/release-related defines. +if (is_debug) { + default_compiler_configs += [ "//build/config:debug" ] +} else { + default_compiler_configs += [ "//build/config:release" ] +} + +# Static libraries and source sets use only the compiler ones. +set_defaults("static_library") { + configs = default_compiler_configs + + # For Rust, a static library involves linking in all dependencies, and it + # performs LTO. But since we will perform LTO in the C++ linker which + # consumes the library, we defer LTO from Rust into the linker. + configs += [ "//build/config/compiler:rust_defer_lto_to_linker" ] +} +set_defaults("source_set") { + configs = default_compiler_configs +} +set_defaults("rust_library") { + configs = default_compiler_configs +} + +# Compute the set of configs common to all linked targets (shared libraries, +# loadable modules, executables) to avoid duplication below. +if (is_win) { + # Many targets remove these configs, so they are not contained within + # //build/config:executable_config for easy removal. + _linker_configs = [ + "//build/config/win:default_incremental_linking", + + # Default to console-mode apps. Most of our targets are tests and such + # that shouldn't use the windows subsystem. + "//build/config/win:console", + ] +} else if (is_apple) { + _linker_configs = [ "//build/config/apple:strip_all" ] +} else { + _linker_configs = [] +} + +# Executable defaults. +default_executable_configs = default_compiler_configs + [ + "//build/config:default_libs", + "//build/config:executable_config", + ] + _linker_configs + +if (is_win) { + # Turn on linker CFI for executables, and position it so it can be removed + # if needed. + default_executable_configs += [ "//build/config/win:cfi_linker" ] +} + +set_defaults("executable") { + configs = default_executable_configs +} + +# Shared library and loadable module defaults (also for components in component +# mode). +default_shared_library_configs = default_compiler_configs + [ + "//build/config:default_libs", + "//build/config:shared_library_config", + ] + _linker_configs +if (is_win) { + # Turn on linker CFI for DLLs, and position it so it can be removed if needed. + default_shared_library_configs += [ "//build/config/win:cfi_linker" ] +} + +if (is_android) { + # Strip native JNI exports from shared libraries by default. Binaries that + # want this can remove this config. + default_shared_library_configs += + [ "//build/config/android:hide_all_but_jni_onload" ] +} +set_defaults("shared_library") { + configs = default_shared_library_configs +} +set_defaults("loadable_module") { + configs = default_shared_library_configs + + # loadable_modules are generally used by other libs, not just via JNI. + if (is_android) { + configs -= [ "//build/config/android:hide_all_but_jni_onload" ] + } +} + +default_rust_proc_macro_configs = + default_shared_library_configs + [ "//build/rust:proc_macro_extern" ] + + # Rust proc macros don't support (Thin)LTO, so always remove it. + [ + "//build/config/compiler:thinlto_optimize_default", + "//build/config/compiler:thinlto_optimize_max", + ] - + [ + "//build/config/compiler:thinlto_optimize_default", + "//build/config/compiler:thinlto_optimize_max", + ] + +set_defaults("rust_proc_macro") { + configs = default_rust_proc_macro_configs +} + +# A helper for forwarding testonly and visibility. +# Forwarding "*" does not include variables from outer scopes (to avoid copying +# all globals into each template invocation), so it will not pick up +# file-scoped or outer-template-scoped variables. Normally this behavior is +# desired, but "visibility" and "testonly" are commonly defined in outer scopes. +# Explicitly forwarding them in forward_variables_from() works around this +# nuance. See //build/docs/writing_gn_templates.md#using-forward_variables_from +TESTONLY_AND_VISIBILITY = [ + "testonly", + "visibility", +] + +# Sets default dependencies for executable and shared_library targets. +# +# Variables +# no_default_deps: If true, no standard dependencies will be added. +# Targets that set this usually also want to remove +# "//build/config/compiler:runtime_library" from configs (to remove +# its subconfig "//build/config/c++:runtime_library"). +foreach(_target_type, + [ + "executable", + "loadable_module", + "shared_library", + "rust_bin", + "rust_dylib", + "rust_cdylib", + ]) { + template(_target_type) { + # Alias "target_name" because it is clobbered by forward_variables_from(). + _target_name = target_name + target(_target_type, _target_name) { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ "no_default_deps" ]) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (!defined(deps)) { + deps = [] + } + if (!defined(invoker.no_default_deps) || !invoker.no_default_deps) { + # This pulls in one of: + # //build/config:executable_deps + # //build/config:loadable_module_deps + # //build/config:shared_library_deps + # (This explicit list is so that grepping for these configs finds where + # they are used.) + deps += [ "//build/config:${_target_type}_deps" ] + } + + # On Android, write shared library output file to metadata. We will use + # this information to, for instance, collect all shared libraries that + # should be packaged into an APK. + if (!defined(invoker.metadata) && (is_android || is_robolectric) && + (_target_type == "shared_library" || + _target_type == "loadable_module")) { + _output_name = _target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + # Remove 'lib' prefix from output name if it exists. + _magic_prefix = "$0x01$0x01" + _output_name = string_replace("${_magic_prefix}${_output_name}", + "${_magic_prefix}lib", + _magic_prefix, + 1) + _output_name = string_replace(_output_name, _magic_prefix, "", 1) + + if (defined(output_extension)) { + _shlib_extension = ".$output_extension" + } else if (is_component_build && _target_type != "loadable_module") { + _shlib_extension = ".cr.so" + } else { + _shlib_extension = ".so" + } + + metadata = { + shared_libraries = + [ "$root_out_dir/lib${_output_name}${_shlib_extension}" ] + } + } + } + } +} + +# ============================================================================== +# COMPONENT SETUP +# ============================================================================== + +# Defines a component, which equates to a shared_library when +# is_component_build == true and a static_library otherwise. +# +# Use static libraries for the static build rather than source sets because +# many of of our test binaries link many large dependencies but often don't +# use large portions of them. The static libraries are much more efficient to +# link in this situation since only the necessary object files are linked. +# +# The invoker can override the type of the target in the non-component-build +# case by setting static_component_type to either "source_set" or +# "static_library". If unset, the default will be used. +template("component") { + if (is_component_build) { + _component_mode = "shared_library" + + # Generate a unique output_name for a shared library if not set by invoker. + if (!defined(invoker.output_name)) { + _output_name = get_label_info(":$target_name", "label_no_toolchain") + _output_name = + string_replace(_output_name, "$target_name:$target_name", target_name) + _output_name = string_replace(_output_name, "//", "") + _output_name = string_replace(_output_name, "/", "_") + _output_name = string_replace(_output_name, ":", "_") + } + } else if (defined(invoker.static_component_type)) { + assert(invoker.static_component_type == "static_library" || + invoker.static_component_type == "source_set") + _component_mode = invoker.static_component_type + } else if (!defined(invoker.sources) || invoker.sources == []) { + # When there are no sources defined, use a source set to avoid creating + # an empty static library (which generally don't work). + _component_mode = "source_set" + } else { + _component_mode = "static_library" + } + target(_component_mode, target_name) { + if (defined(_output_name)) { + output_name = _output_name + } + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + } +} + +# Component defaults +# Set a variable since we also want to make this available +# to mixed_component.gni +if (is_component_build) { + default_component_configs = default_shared_library_configs + if (is_android) { + default_component_configs -= + [ "//build/config/android:hide_all_but_jni_onload" ] + } +} else { + default_component_configs = default_compiler_configs +} + +set_defaults("component") { + configs = default_component_configs +} + +# ============================================================================= +# ACTION OVERRIDE +# ============================================================================= +# +# We override gn action() to support remote execution using rewrapper. The +# invoker should set allow_remote to true if remote execution is desired. +# +# As remote execution requires inputs to be made more explicit than is normally +# expected with gn, you may find that setting allow_remote to true will result +# in many missing file errors. In most cases, this should be resolved by +# explicitly declaring these inputs/sources. +# +# However, it may be impractical to determine these inputs in gn. For such +# cases, the invoker can specify a custom input processor, which are currently +# defined and implemented in //build/util/action_remote.py. The appropriate +# value should be set using the custom_processor arg. + +# Variables needed by rbe.gni aren't available at the top of this file. +import("//build/toolchain/rbe.gni") + +# TODO(b/253987456): Add action_foreach support. +foreach(_target_type, [ "action" ]) { + template(_target_type) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, [ "allow_remote" ]) + action("${target_name}") { + forward_variables_from(invoker, + [ + "args", + "assert_no_deps", + "check_includes", + "configs", + "data_deps", + "data", + "depfile", + "deps", + "metadata", + "outputs", + "pool", + "script", + "public_configs", + "public_deps", + "response_file_contents", + "sources", + "write_runtime_deps", + ]) + allow_remote = false + if (defined(invoker.allow_remote)) { + allow_remote = invoker.allow_remote + } + + # If remote execution is desired, only run remotely when use_remoteexec + # is enabled, and the environment is not nacl. + # TODO(b/259381924): Investigate enabling in nacl config. + if (allow_remote && use_remoteexec && !is_nacl) { + pool = "//build/toolchain:remote_action_pool($default_toolchain)" + script = "//build/util/action_remote.py" + inputs = [ invoker.script ] + + re_inputs = [ rebase_path(invoker.script, rbe_exec_root) ] + if (defined(invoker.inputs)) { + foreach(input, invoker.inputs) { + re_inputs += [ rebase_path(input, rbe_exec_root) ] + inputs += [ input ] + } + } + if (defined(invoker.sources)) { + foreach(source, invoker.sources) { + re_inputs += [ rebase_path(source, rbe_exec_root) ] + } + } + + re_outputs = [] + if (defined(invoker.outputs)) { + foreach(output, invoker.outputs) { + re_outputs += [ rebase_path(output, rbe_exec_root) ] + } + } + + # Write input/output lists to files as these can grow extremely large. + re_inputs_file = "$target_gen_dir/${target_name}__remote_inputs.rsp" + write_file(re_inputs_file, re_inputs) + inputs += [ re_inputs_file ] + re_outputs_file = "$target_gen_dir/${target_name}__remote_outputs.rsp" + write_file(re_outputs_file, re_outputs) + + args = [] + args += [ "$rbe_bin_dir/rewrapper" ] + if (defined(invoker.custom_processor)) { + args += [ "--custom_processor=" + invoker.custom_processor ] + } + + args += [ + "--cfg=$rbe_py_cfg_file", + "--exec_root=$rbe_exec_root", + "--input_list_paths=" + rebase_path(re_inputs_file, root_build_dir), + "--output_list_paths=" + rebase_path(re_outputs_file, root_build_dir), + "python3", + rebase_path(invoker.script, root_build_dir), + ] + + if (defined(invoker.args)) { + args += invoker.args + } + } else { + forward_variables_from(invoker, [ "inputs" ]) + not_needed(invoker, [ "custom_processor" ]) + } + } + } +} diff --git a/config/OWNERS b/config/OWNERS new file mode 100644 index 000000000000..580fa2ef22ad --- /dev/null +++ b/config/OWNERS @@ -0,0 +1,4 @@ +per-file ozone.gni=file://ui/ozone/OWNERS +per-file ozone_extra.gni=file://ui/ozone/OWNERS +per-file rust.gni=file://build/rust/OWNERS +per-file chromecast_build.gni=file://build/config/chromecast/OWNERS diff --git a/config/aix/BUILD.gn b/config/aix/BUILD.gn new file mode 100644 index 000000000000..6e55c83938fc --- /dev/null +++ b/config/aix/BUILD.gn @@ -0,0 +1,61 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic. + +config("compiler") { + # These flags are shared between the C compiler and linker. + defines = [ + "_LINUX_SOURCE_COMPAT=1", + "__STDC_FORMAT_MACROS", + "_ALL_SOURCE=1", + ] + + cflags = [ + "-Wall", + "-Wno-unused-parameter", + "-pthread", + "-Wmissing-field-initializers", + "-Wno-uninitialized", + "-mcpu=power5+", + "-mfprnd", + "-mno-popcntb", + "-maix64", + "-fdata-sections", + "-ffunction-sections", + "-fno-extern-tls-init", + "-O3", + + # "-Werror" + # We need to find a way to fix the TOC warnings if we want to enable this. + ] + + cflags_cc = [ + "-fno-rtti", + "-fno-exceptions", + "-Wno-narrowing", + "-Wno-non-virtual-dtor", + ] + + ldflags = [ + "-pthread", + "-maix64", + "-Wl,-bbigtoc", + ] + + if (is_component_build) { + cflags += [ "-fpic" ] + ldflags += [ + "-Wl,-brtl", + + # -bnoipath so that only names of .so objects are stored in loader + # section, excluding leading "./" + "-Wl,-bnoipath", + ] + } +} diff --git a/config/android/BUILD.gn b/config/android/BUILD.gn new file mode 100644 index 000000000000..63b37e0c17c6 --- /dev/null +++ b/config/android/BUILD.gn @@ -0,0 +1,173 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") +import("//build/config/c++/c++.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") + +if (current_toolchain == default_toolchain) { + import("//build/toolchain/concurrent_links.gni") +} + +assert(is_android) + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic that is +# Android-only. +config("compiler") { + cflags = [ + "-ffunction-sections", + "-fno-short-enums", + ] + defines = [ + "ANDROID", + + # The NDK has these things, but doesn't define the constants to say that it + # does. Define them here instead. + "HAVE_SYS_UIO_H", + + # Forces full rebuilds on NDK rolls. To rebuild everything when NDK version + # stays the same, increment the suffix number. + "ANDROID_NDK_VERSION_ROLL=${android_ndk_version}_1", + ] + + ldflags = [ + # Don't allow visible symbols from libraries that contain + # assembly code with symbols that aren't hidden properly. + # http://crbug.com/448386 + "-Wl,--exclude-libs=libvpx_assembly_arm.a", + ] + + if (current_cpu == "arm64") { + # Reduce the page size from 65536 in order to reduce binary size slightly + # by shrinking the alignment gap between segments. This also causes all + # segments to be mapped adjacently, which breakpad relies on. + ldflags += [ "-Wl,-z,max-page-size=4096" ] + } + + if (current_cpu == "arm64") { + if (arm_control_flow_integrity == "standard") { + cflags += [ "-mbranch-protection=standard" ] + rustflags = [ "-Zbranch-protection=bti" ] + } else if (arm_control_flow_integrity == "pac") { + cflags += [ "-mbranch-protection=pac-ret" ] + rustflags = [ "-Zbranch-protection=pac-ret" ] + } + } + + # Instead of using an unwind lib from the toolchain, + # buildtools/third_party/libunwind will be built and used directly. + ldflags += [ "--unwindlib=none" ] + + # $compile_api_level corresponds to the API level used for the sysroot path + # calculation in //build/config/android/config.gni + if (android_64bit_target_cpu) { + compile_api_level = android64_ndk_api_level + } else { + compile_api_level = android32_ndk_api_level + } + + cflags += [ "--target=$android_abi_target$compile_api_level" ] + ldflags += [ "--target=$android_abi_target$compile_api_level" ] + + # Assign any flags set for the C compiler to asmflags so that they are sent + # to the assembler. + asmflags = cflags +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Android-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + libs = [] + ldflags = [] + + # On 64-bit platforms, the only symbols provided by libandroid_support.a are + # strto{d,f,l,ul}_l. These symbols are not used by our libc++, and newer NDKs + # don't provide a libandroid_support.a on 64-bit platforms, so we only depend + # on this library on 32-bit platforms. + if (current_cpu == "arm" || current_cpu == "x86") { + libs += [ "android_support" ] + } + + if (current_cpu == "arm" && arm_version == 6) { + libs += [ "atomic" ] + } +} + +config("hide_all_but_jni_onload") { + ldflags = [ "-Wl,--version-script=" + rebase_path( + "//build/android/android_only_explicit_jni_exports.lst", + root_build_dir) ] +} + +config("hide_all_but_jni") { + ldflags = [ "-Wl,--version-script=" + + rebase_path("//build/android/android_only_jni_exports.lst", + root_build_dir) ] +} + +config("lld_pack_relocations") { + ldflags = [ "-Wl,--pack-dyn-relocs=android" ] +} + +config("lld_relr_relocations") { + # RELR supported API 30+, but supported 28+ with --use-android-relr-tags. + # https://android.googlesource.com/platform/bionic/+/master/android-changes-for-ndk-developers.md#relative-relocations-relr + ldflags = [ "-Wl,--pack-dyn-relocs=relr,--use-android-relr-tags" ] +} + +config("lld_branch_target_hardening") { + # Config opts a shared library into BTI linker hardening. This + # is an opt-in config (rather than default-enabled) to avoid + # interfering with the V8 CFI bots (crbug.com/1334614). + if (current_cpu == "arm64") { + if (arm_control_flow_integrity == "standard") { + # Linking objects without GNU_PROPERTY_AARCH64_FEATURE_1_BTI + # in their .gnu.note section implicitly results in the final + # binary losing Branch Target Identification (BTI) support. + # Issue a warning if this happens. + ldflags = [ "-Wl,-z,force-bti" ] + } + } +} + +# Used for instrumented build to generate the orderfile. +config("default_orderfile_instrumentation") { + if (use_order_profiling) { + cflags = [ "-finstrument-function-entry-bare" ] + if (use_thin_lto) { + # TODO(pcc): This should not be necessary. Remove once + # https://reviews.llvm.org/D50016 lands and gets rolled in. + ldflags = [ "-Wl,-u,__cyg_profile_func_enter_bare" ] + } + } +} + +config("jni_include_dir") { + include_dirs = [ jni_headers_dir ] +} + +if (current_toolchain == default_toolchain) { + pool("goma_javac_pool") { + # Override action_pool when goma is enabled for javac. + depth = 10000 + } + + # nocompile tests share output directory to avoid them all needing to rebuild + # things. But this also means they can't run in parallel. + pool("nocompile_pool") { + depth = 1 + } + + # When defined, this pool should be used instead of link_pool for command + # that need 1-2GB of RAM. https://crbug.com/1078460 + if (defined(java_cmd_pool_size)) { + pool("java_cmd_pool") { + depth = java_cmd_pool_size + } + } +} diff --git a/config/android/DIR_METADATA b/config/android/DIR_METADATA new file mode 100644 index 000000000000..cdc2d6fb6eb6 --- /dev/null +++ b/config/android/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/android/COMMON_METADATA" diff --git a/config/android/OWNERS b/config/android/OWNERS new file mode 100644 index 000000000000..a74cfbe228b5 --- /dev/null +++ b/config/android/OWNERS @@ -0,0 +1 @@ +file://build/android/OWNERS diff --git a/config/android/abi.gni b/config/android/abi.gni new file mode 100644 index 000000000000..e044ac6745b5 --- /dev/null +++ b/config/android/abi.gni @@ -0,0 +1,107 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Logic separated out from config.gni so that it can be used by compiler.gni +# without introducing a circular dependency. + +# NOTE: Because Chrome OS builds may depend on targets built with the Android +# toolchain, this GNI file may be read and processed from within Chrome OS +# toolchains. Checking |is_android| here would therefore be too restrictive. +assert(is_android || is_chromeos) + +declare_args() { + # Adds intrumentation to each function. Writes a file with the order that + # functions are called at startup. + use_order_profiling = false + + # Only effective if use_order_profiling = true. When this is true, + # instrumentation switches from startup profiling after a delay, and + # then waits for a devtools memory dump request to dump all + # profiling information. When false, the same delay is used to switch from + # startup, and then after a second delay all profiling information is dumped. + # See base::android::orderfile::StartDelayedDump for more information. + devtools_instrumentation_dumping = false + + # Only effective if use_order_profiling = true. When this is true the call + # graph based instrumentation is used. + use_call_graph = false + + # Build additional browser splits with HWASAN instrumentation enabled. + build_hwasan_splits = false + + # *For CQ puposes only* Leads to non-working APKs. + # Forces all APKs/bundles to be 64-bit only to improve build speed in the CQ + # (no need to also build 32-bit library). + skip_secondary_abi_for_cq = false +} + +assert(!devtools_instrumentation_dumping || use_order_profiling, + "devtools_instrumentation_dumping requires use_order_profiling") +assert(!use_call_graph || use_order_profiling, + "use_call_graph requires use_order_profiling") + +if (current_cpu == "x86") { + android_app_abi = "x86" + android_abi_target = "i686-linux-android" +} else if (current_cpu == "arm") { + import("//build/config/arm.gni") + if (arm_version < 7) { + android_app_abi = "armeabi" + } else { + android_app_abi = "armeabi-v7a" + } + android_abi_target = "arm-linux-androideabi" +} else if (current_cpu == "mipsel") { + android_app_abi = "mips" + android_abi_target = "mipsel-linux-android" +} else if (current_cpu == "x64") { + android_app_abi = "x86_64" + + # Place holder for x64 support, not tested. + # TODO: Enable clang support for Android x64. http://crbug.com/539781 + android_abi_target = "x86_64-linux-android" +} else if (current_cpu == "arm64") { + android_app_abi = "arm64-v8a" + android_abi_target = "aarch64-linux-android" +} else if (current_cpu == "mips64el") { + android_app_abi = "mips64" + + # Place holder for mips64 support, not tested. + android_abi_target = "mips64el-linux-android" +} else if (current_cpu == "riscv64") { + android_app_abi = "riscv64" + + # Place holder for riscv64 support, not tested. + android_abi_target = "riscv64-linux-android" +} else { + assert(false, "Unknown Android ABI: " + current_cpu) +} + +if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el" || + target_cpu == "riscv64") { + android_64bit_target_cpu = true +} else if (target_cpu == "arm" || target_cpu == "x86" || + target_cpu == "mipsel") { + android_64bit_target_cpu = false +} else { + assert(false, "Unknown target CPU: $target_cpu") +} + +# Intentionally do not define android_app_secondary_abi_cpu and +# android_app_secondary_abi for 32-bit target_cpu, since they are not used. +if (target_cpu == "arm64") { + android_secondary_abi_cpu = "arm" + android_app_secondary_abi = "armeabi-v7a" +} else if (target_cpu == "x64") { + android_secondary_abi_cpu = "x86" + android_app_secondary_abi = "x86" +} else if (target_cpu == "mips64el") { + android_secondary_abi_cpu = "mipsel" + android_app_secondary_abi = "mips" +} + +if (defined(android_secondary_abi_cpu)) { + android_secondary_abi_toolchain = + "//build/toolchain/android:android_clang_${android_secondary_abi_cpu}" +} diff --git a/config/android/android_nocompile.gni b/config/android/android_nocompile.gni new file mode 100644 index 000000000000..0b3f517bd976 --- /dev/null +++ b/config/android/android_nocompile.gni @@ -0,0 +1,114 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +declare_args() { + # Used by tests to enable generating build files for GN targets which should + # not compile. + enable_android_nocompile_tests = false +} + +# Defines a test suite which checks that the 'test targets' fail to compile. The +# test suite runs 'gn gen' with a custom output directory and attempts to compile +# each test target. +# +# All of the tests should be defined in the same dedicated BUILD.gn file in order +# to minimize the number of targets that are processed by 'gn gen'. +# +# Variables +# tests: List of test configurations. A test configuration has the following +# keys: +# 'target': The GN target which should not compile when +# enable_android_nocompile_tests=true The target should compile when +# enable_android_nocompile_tests=false. +# 'expected_compile_output_regex': Error message regex to search for when compile fails. +# 'nocompile_sources': Source files which do not compile. This ensures that +# the test suite is re-run when one of these files change (as the test +# targets might not depend of the files when +# enable_android_nocompile_tests=false). +template("android_nocompile_test_suite") { + assert(!enable_android_nocompile_tests) + + action(target_name) { + testonly = true + script = "//build/android/gyp/nocompile_test.py" + pool = "//build/config/android:nocompile_pool" + + _tests = invoker.tests + _test0 = _tests[0] + _test0_dir = get_label_info(_test0["target"], "dir") + _test0_target_out_dir = get_label_info(_test0["target"], "target_out_dir") + foreach(_test_config, _tests) { + assert( + _test0_dir == get_label_info(_test_config["target"], "dir"), + "To avoid running 'gn gen' for each test, all tests in an android_nocompile_test_suite() should be declared in same BUILD.gn file") + } + + deps = [] + if (defined(invoker.deps)) { + deps += invoker.deps + } + + sources = [] + if (defined(invoker.sources)) { + sources += invoker.sources + } + + # Depend on compile_java Python scripts so that the action is re-run whenever the script is + # modified. + _pydeps = [ "//build/android/gyp/compile_java.pydeps" ] + if (defined(invoker.pydeps)) { + _pydeps += invoker.pydeps + } + + inputs = [] + foreach(_pydeps_file, _pydeps) { + _pydeps_file_lines = [] + _pydeps_file_lines = read_file(_pydeps_file, "list lines") + _pydeps_entries = [] + _pydeps_entries = filter_exclude(_pydeps_file_lines, [ "#*" ]) + _pydeps_file_dir = get_path_info(_pydeps_file, "dir") + inputs += rebase_path(_pydeps_entries, ".", _pydeps_file_dir) + } + + _json_test_configs = [] + foreach(_test_config, _tests) { + _test = _test_config["target"] + deps += [ _test ] + sources += _test_config["nocompile_sources"] + _dep_dir = get_label_info(_test, "dir") + _dep_name = get_label_info(_test, "name") + _json_test_configs += [ + { + target = "${_dep_dir}:${_dep_name}" + expect_regex = _test_config["expected_compile_output_regex"] + }, + ] + } + + _config_path = "$target_gen_dir/${target_name}.nocompile_config" + write_file(_config_path, _json_test_configs, "json") + + # Compute output directory for no-compile tests based on the directory containing test + # targets instead of based on the test suite target name. This avoids calling 'gn gen' for each + # android_nocompile_test_suite() for test suites whose tests are declared in the same BUILD.gn + # file. + _out_dir = "${_test0_target_out_dir}/nocompile_out" + + _stamp_path = "${target_gen_dir}/${target_name}.stamp" + args = [ + "--gn-args-path", + "args.gn", + "--out-dir", + rebase_path(_out_dir, root_build_dir), + "--test-configs-path", + rebase_path(_config_path, root_build_dir), + "--stamp", + rebase_path(_stamp_path, root_build_dir), + ] + inputs += [ _config_path ] + outputs = [ _stamp_path ] + } +} diff --git a/config/android/build_vars.gni b/config/android/build_vars.gni new file mode 100644 index 000000000000..27866a7c1232 --- /dev/null +++ b/config/android/build_vars.gni @@ -0,0 +1,31 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") + +# Contains useful GN variables that may be used by scripts that take +# --output-directory as an arg. +build_vars_file = "$root_build_dir/build_vars.json" + +android_build_vars_json = { + if (enable_java_templates) { + android_ndk_root = rebase_path(android_ndk_root, root_build_dir) + android_sdk_build_tools = + rebase_path(android_sdk_build_tools, root_build_dir) + android_sdk_build_tools_version = android_sdk_build_tools_version + android_sdk_root = rebase_path(android_sdk_root, root_build_dir) + android_sdk_version = android_sdk_version + android_tool_prefix = rebase_path(android_tool_prefix, root_build_dir) + default_min_sdk_version = default_min_sdk_version + final_android_sdk = final_android_sdk + public_android_sdk_version = public_android_sdk_version + + if (defined(android_secondary_abi_cpu)) { + android_secondary_abi_toolchain = + rebase_path(get_label_info(":foo($android_secondary_abi_toolchain)", + "root_out_dir"), + root_build_dir) + } + } +} diff --git a/config/android/channel.gni b/config/android/channel.gni new file mode 100644 index 000000000000..0f8d45337d35 --- /dev/null +++ b/config/android/channel.gni @@ -0,0 +1,14 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # The channel to build on Android: stable, beta, dev, canary, work, or + # default. "default" should be used on non-official builds. + android_channel = "default" +} + +assert(android_channel == "default" || android_channel == "canary" || + android_channel == "dev" || android_channel == "beta" || + android_channel == "stable", + "Invalid channel: " + android_channel) diff --git a/config/android/config.gni b/config/android/config.gni new file mode 100644 index 000000000000..5f4836726a3e --- /dev/null +++ b/config/android/config.gni @@ -0,0 +1,366 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains common system config stuff for the Android build. + +# NOTE: Because Chrome OS builds may depend on targets built with the Android +# toolchain, this GNI file may be read and processed from within Chrome OS +# toolchains. Checking |is_android| here would therefore be too restrictive. +if (is_android || is_chromeos) { + import("//build/config/android/channel.gni") + import("//build/config/chromecast_build.gni") + import("//build/config/dcheck_always_on.gni") + import("//build_overrides/build.gni") + import("abi.gni") + + if (build_with_chromium) { + # Some non-chromium projects (e.g. WebRTC) use our build configs + # heavily but don't write gclient args files. + + import("//build/config/gclient_args.gni") + if (defined(checkout_android_native_support)) { + n = "$0x0A" # Newline + assert(checkout_android_native_support, + "Missing native Android toolchain support. |target_os| in your " + + ".gclient configuration file (in the parent directory of " + + "src) must include \"android\" and/or \"chromeos\". For " + + "example:${n}${n}solutions = [${n}...${n}]${n}" + + "target_os=[\"chromeos\"]${n}${n}" + + "After adding |target_os| please re-run \"gclient sync\".${n}") + } + } + + # We are using a separate declare_args block for only this argument so that + # we can decide if we have to pull in definitions from the internal config + # early. + declare_args() { + # Enables using the internal Chrome for Android repository. The default + # is set from gclient vars, with target_os needed for chromeos. + # Can be set to false to disable all internal android things. + enable_chrome_android_internal = + build_with_chromium && checkout_src_internal && target_os == "android" + + # The default to use for android:minSdkVersion for targets that do + # not explicitly set it. + default_min_sdk_version = 24 + + # Static analysis can be either "on" or "off" or "build_server". This + # controls how android lint, error-prone, bytecode checks are run. This + # needs to be in a separate declare_args as it determines some of the args + # in the main declare_args block below. + # "on" is the default. + # "off" avoids running any static analysis. This is the default for + # official builds to reduce build times. Failures in static analysis + # would have been caught by other bots. + # "build_server" ensures that fast_local_dev_server.py is running and + # offloads analysis tasks to it to be run after the build completes. + # This is the recommended setting for local development. + if (is_official_build) { + android_static_analysis = "off" + } else { + android_static_analysis = "on" + } + + # Build incremental targets whenever possible. + # See //build/android/incremental_install/README.md for more details. + incremental_install = false + } + + # Avoid typos when setting android_static_analysis in args.gn. + assert(android_static_analysis == "on" || android_static_analysis == "off" || + android_static_analysis == "build_server") + + # This configuration has not bot coverage and has broken multiple times. + # Warn against it. + assert(!(enable_chrome_android_internal && skip_secondary_abi_for_cq)) + + if (enable_chrome_android_internal) { + import("//clank/config.gni") + } else { + import("//build/config/android/sdk.gni") + declare_args() { + # Which Android SDK to use. + android_sdk_release = default_android_sdk_release + } + } + + if (!defined(default_android_ndk_root)) { + default_android_ndk_root = "//third_party/android_ndk" + default_android_ndk_version = "r23" + default_android_ndk_major_version = 23 + } else { + assert(defined(default_android_ndk_version)) + assert(defined(default_android_ndk_major_version)) + } + + public_android_sdk_root = "//third_party/android_sdk/public" + public_android_sdk_build_tools = + "${public_android_sdk_root}/build-tools/33.0.0" + public_android_sdk_version = "33" + if (android_sdk_release == "t") { + default_android_sdk_root = public_android_sdk_root + default_android_sdk_version = public_android_sdk_version + default_android_sdk_build_tools_version = "33.0.0" + public_android_sdk = true + } + + if (android_sdk_release == "tprivacysandbox") { + default_android_sdk_root = public_android_sdk_root + default_android_sdk_version = "TiramisuPrivacySandbox" + default_android_sdk_build_tools_version = "33.0.0" + public_android_sdk = true + } + + # For use downstream when we are building with preview Android SDK + if (!defined(final_android_sdk)) { + final_android_sdk = public_android_sdk + } + + if (!defined(default_lint_android_sdk_root)) { + # Purposefully repeated so that downstream can change + # default_android_sdk_root without changing lint version. + default_lint_android_sdk_root = public_android_sdk_root + default_lint_android_sdk_version = 33 + } + + if (!defined(default_extras_android_sdk_root)) { + # Purposefully repeated so that downstream can change + # default_android_sdk_root without changing where we load the SDK extras + # from. (Google Play services, etc.) + default_extras_android_sdk_root = public_android_sdk_root + } + + if (!defined(default_android_keystore_path)) { + default_android_keystore_path = "//build/android/chromium-debug.keystore" + default_android_keystore_name = "chromiumdebugkey" + default_android_keystore_password = "chromium" + } + + # google_play_services_package contains the path where individual client + # targets (e.g. google_play_services_base_java) are located. + if (!defined(google_play_services_package)) { + google_play_services_package = "//third_party/android_deps" + } + + if (!defined(android_protoc_bin)) { + android_protoc_bin = "//third_party/android_protoc/protoc" + android_proto_runtime = + "//third_party/android_deps:com_google_protobuf_protobuf_javalite_java" + } + + webview_public_framework_dep = + "//third_party/android_sdk:public_framework_system_java" + if (!defined(webview_framework_dep)) { + webview_framework_dep = webview_public_framework_dep + } + + assert(defined(default_android_sdk_root), + "SDK release " + android_sdk_release + " not recognized.") + + declare_args() { + android_ndk_root = default_android_ndk_root + android_ndk_version = default_android_ndk_version + android_ndk_major_version = default_android_ndk_major_version + + # Android API level for 32 bits platforms + android32_ndk_api_level = default_min_sdk_version + + # Android API level for 64 bits platforms + android64_ndk_api_level = default_min_sdk_version + + if (default_min_sdk_version < 21) { + # Android did not support 64 bit before API 21. + android64_ndk_api_level = 21 + } + + android_sdk_root = default_android_sdk_root + android_sdk_version = default_android_sdk_version + android_sdk_build_tools_version = default_android_sdk_build_tools_version + + lint_android_sdk_root = default_lint_android_sdk_root + lint_android_sdk_version = default_lint_android_sdk_version + + # Libc++ library directory. Override to use a custom libc++ binary. + android_libcpp_lib_dir = "" + + # Android versionCode for android_apk()s that don't explicitly set one. + android_default_version_code = "1" + + # Android versionName for android_apk()s that don't explicitly set one. + android_default_version_name = "Developer Build" + + # Forced Android versionCode + android_override_version_code = "" + + # Forced Android versionName + android_override_version_name = "" + + # The path to the keystore to use for signing builds. + android_keystore_path = default_android_keystore_path + + # The name of the keystore to use for signing builds. + android_keystore_name = default_android_keystore_name + + # The password for the keystore to use for signing builds. + android_keystore_password = default_android_keystore_password + + # Java debug on Android. Having this on enables multidexing, and turning it + # off will enable proguard. + is_java_debug = is_debug || incremental_install + + # Mark APKs as android:debuggable="true". + debuggable_apks = !is_official_build + + # Set to false to disable the Errorprone compiler. + use_errorprone_java_compiler = android_static_analysis != "off" + + # When true, updates all android_aar_prebuilt() .info files during gn gen. + # Refer to android_aar_prebuilt() for more details. + update_android_aar_prebuilts = false + + # Turns off android lint. + disable_android_lint = android_static_analysis == "off" + + # Location of aapt2 used for app bundles. For now, a more recent version + # than the one distributed with the Android SDK is required. + android_sdk_tools_bundle_aapt2_dir = + "//third_party/android_build_tools/aapt2" + + # Causes expectation failures to break the build, otherwise, just warns on + # stderr and writes a failure file to $android_configuration_failure_dir: + fail_on_android_expectations = false + + # Controls whether proguard obfuscation is enabled for targets + # configured to use it. + enable_proguard_obfuscation = true + + # Controls whether |short_resource_paths| and |strip_resource_names| are + # respected. Useful when trying to analyze APKs using tools that do not + # support mapping these names. + enable_arsc_obfuscation = true + + # Controls whether |strip_unused_resources| is respected. Useful when trying + # to analyze APKs using tools that do not support missing resources from + # resources.arsc. + enable_unused_resource_stripping = true + + # Controls whether |baseline_profile_path| is respected. Useful to disable + # baseline profiles. + # Currently disabled while bundletool does not support baseline profiles in + # non-base splits. + enable_baseline_profiles = false + + # The target to use as the system WebView implementation. + if (android_64bit_target_cpu && skip_secondary_abi_for_cq) { + system_webview_apk_target = "//android_webview:system_webview_64_apk" + } else { + system_webview_apk_target = "//android_webview:system_webview_apk" + } + + # Where to write failed expectations for bots to read. + expectations_failure_dir = "$root_build_dir/failed_expectations" + } + + # We need a second declare_args block to make sure we are using the overridden + # value of the arguments set above. + declare_args() { + if (defined(default_android_sdk_platform_version)) { + android_sdk_platform_version = default_android_sdk_platform_version + } else { + android_sdk_platform_version = android_sdk_version + } + + # Whether java assertions and Preconditions checks are enabled. + enable_java_asserts = dcheck_always_on || !is_official_build + + # Reduce build time by using d8 incremental build. + enable_incremental_d8 = true + + # Use hashed symbol names to reduce JNI symbol overhead. + use_hashed_jni_names = !is_java_debug + + # Enables JNI multiplexing to reduce JNI native methods overhead. + allow_jni_multiplexing = false + + # Enables trace event injection on Android views with bytecode rewriting. + # This adds an additional step on android_app_bundle_module targets that + # adds trace events to some methods in android.view.View subclasses. + enable_trace_event_bytecode_rewriting = + !is_java_debug && android_channel != "stable" + } + + assert(!incremental_install || is_java_debug, + "incremental_install=true && is_java_debug=false is not supported.") + + # Host stuff ----------------------------------------------------------------- + + # Defines the name the Android build gives to the current host CPU + # architecture, which is different than the names GN uses. + if (host_cpu == "x64") { + android_host_arch = "x86_64" + } else if (host_cpu == "x86") { + android_host_arch = "x86" + } else { + assert(false, "Need Android toolchain support for your build CPU arch.") + } + + # Defines the name the Android build gives to the current host CPU + # architecture, which is different than the names GN uses. + if (host_os == "linux") { + android_host_os = "linux" + } else if (host_os == "mac") { + android_host_os = "darwin" + } else { + assert(false, "Need Android toolchain support for your build OS.") + } + + # Directories and files ------------------------------------------------------ + # + # We define may of the dirs strings here for each output architecture (rather + # than just the current one) since these are needed by the Android toolchain + # file to define toolchains for all possible targets in one pass. + + android_sdk = + "${android_sdk_root}/platforms/android-${android_sdk_platform_version}" + android_sdk_build_tools = + "${android_sdk_root}/build-tools/$android_sdk_build_tools_version" + + # Path to the SDK's android.jar + android_sdk_jar = "$android_sdk/android.jar" + + # Location of libgcc. This is only needed for the current GN toolchain, so we + # only need to define the current one, rather than one for every platform + # like the toolchain roots. + if (current_cpu == "x86") { + android_prebuilt_arch = "android-x86" + } else if (current_cpu == "arm") { + android_prebuilt_arch = "android-arm" + } else if (current_cpu == "mipsel") { + android_prebuilt_arch = "android-mips" + } else if (current_cpu == "x64") { + android_prebuilt_arch = "android-x86_64" + } else if (current_cpu == "arm64") { + android_prebuilt_arch = "android-arm64" + } else if (current_cpu == "mips64el") { + android_prebuilt_arch = "android-mips64" + } else if (current_cpu == "riscv64") { + # Place holder for riscv64 support, not tested. + android_prebuilt_arch = "android-riscv64" + } else { + assert(false, "Need android libgcc support for your target arch.") + } + + android_toolchain_root = "$android_ndk_root/toolchains/llvm/prebuilt/${android_host_os}-${android_host_arch}" + android_ndk_library_path = "$android_toolchain_root/lib64" + android_tool_prefix = "$android_toolchain_root/bin/llvm-" + android_readelf = "${android_tool_prefix}readobj" + android_objcopy = "${android_tool_prefix}objcopy" + android_gdbserver = + "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver" + + android_sdk_tools_bundle_aapt2 = "${android_sdk_tools_bundle_aapt2_dir}/aapt2" + + # Toolchain used to create native libraries for robolectric_binary() targets. + robolectric_toolchain = "//build/toolchain/android:robolectric_$host_cpu" +} diff --git a/config/android/copy_ex.gni b/config/android/copy_ex.gni new file mode 100644 index 000000000000..8e70c3014399 --- /dev/null +++ b/config/android/copy_ex.gni @@ -0,0 +1,72 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Copy a list of file into a destination directory. Potentially renaming +# files are they are copied. This also ensures that symlinks are followed +# during the copy (i.e. the symlinks are never copied, only their content). +# +# Variables: +# dest: Destination directory path. +# sources: List of source files or directories to copy to dest. +# renaming_sources: Optional list of source file paths that will be renamed +# during the copy operation. If provided, renaming_destinations is required. +# renaming_destinations: Optional list of destination file paths, required +# when renaming_sources is provided. Both lists should have the same size +# and matching entries. +# args: Optional. Additionnal arguments to the copy_ex.py script. +# +# The following variables have the usual GN meaning: data, deps, inputs, +# outputs, testonly, visibility. + +import("//build/config/python.gni") + +template("copy_ex") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "data", + "deps", + "public_deps", + "testonly", + "visibility", + ]) + sources = [] + if (defined(invoker.sources)) { + sources += invoker.sources + } + outputs = [] + if (defined(invoker.outputs)) { + outputs += invoker.outputs + } + if (defined(invoker.inputs)) { + inputs = invoker.inputs + } + + script = "//build/android/gyp/copy_ex.py" + + args = [ + "--dest", + rebase_path(invoker.dest, root_build_dir), + ] + rebased_sources = rebase_path(sources, root_build_dir) + args += [ "--files=$rebased_sources" ] + + if (defined(invoker.args)) { + args += invoker.args + } + + if (defined(invoker.renaming_sources) && + defined(invoker.renaming_destinations)) { + sources += invoker.renaming_sources + renaming_destinations = invoker.renaming_destinations + outputs += + get_path_info(rebase_path(renaming_destinations, ".", invoker.dest), + "abspath") + rebased_renaming_sources = + rebase_path(invoker.renaming_sources, root_build_dir) + args += [ "--renaming-sources=$rebased_renaming_sources" ] + args += [ "--renaming-destinations=$renaming_destinations" ] + } + } +} diff --git a/config/android/create_unwind_table.gni b/config/android/create_unwind_table.gni new file mode 100644 index 000000000000..92b7427c47c5 --- /dev/null +++ b/config/android/create_unwind_table.gni @@ -0,0 +1,50 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +unwind_table_asset_v2_filename = "unwind_cfi_32_v2" + +_dump_syms_target = "//third_party/breakpad:dump_syms($host_toolchain)" +_dump_syms = get_label_info(_dump_syms_target, "root_out_dir") + "/dump_syms" +_readobj_path = "$clang_base_path/bin/llvm-readobj" + +template("unwind_table_v2") { + action(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _output_path = + "$target_out_dir/$target_name/$unwind_table_asset_v2_filename" + + # Strip the "lib" prefix, if present. Add and then remove a space because + # our ownly tool is "replace all". + _library_name = get_label_info(invoker.library_target, "name") + _library_name = + string_replace(string_replace(" $_library_name", " $shlib_prefix", ""), + " ", + "") + _library_path = "$root_out_dir/lib.unstripped/$shlib_prefix$_library_name$shlib_extension" + + script = "//build/android/gyp/create_unwind_table.py" + outputs = [ _output_path ] + inputs = [ + _dump_syms, + _library_path, + ] + deps = [ + _dump_syms_target, + invoker.library_target, + ] + + args = [ + "--input_path", + rebase_path(_library_path, root_build_dir), + "--output_path", + rebase_path(_output_path, root_build_dir), + "--dump_syms_path", + rebase_path(_dump_syms, root_build_dir), + "--readobj_path", + rebase_path(_readobj_path, root_build_dir), + ] + } +} diff --git a/config/android/extract_unwind_tables.gni b/config/android/extract_unwind_tables.gni new file mode 100644 index 000000000000..d4daa6a2aadc --- /dev/null +++ b/config/android/extract_unwind_tables.gni @@ -0,0 +1,47 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +unwind_table_asset_v1_filename = "unwind_cfi_32" + +_dump_syms_target = "//third_party/breakpad:dump_syms($host_toolchain)" +_dump_syms = get_label_info(_dump_syms_target, "root_out_dir") + "/dump_syms" + +template("unwind_table_v1") { + action(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _output_path = + "$target_out_dir/$target_name/$unwind_table_asset_v1_filename" + + # Strip the "lib" prefix, if present. Add and then remove a space because + # our ownly tool is "replace all". + _library_name = get_label_info(invoker.library_target, "name") + _library_name = + string_replace(string_replace(" $_library_name", " $shlib_prefix", ""), + " ", + "") + _library_path = "$root_out_dir/lib.unstripped/$shlib_prefix$_library_name$shlib_extension" + + script = "//build/android/gyp/extract_unwind_tables.py" + outputs = [ _output_path ] + inputs = [ + _dump_syms, + _library_path, + ] + deps = [ + _dump_syms_target, + invoker.library_target, + ] + + args = [ + "--input_path", + rebase_path(_library_path, root_build_dir), + "--output_path", + rebase_path(_output_path, root_build_dir), + "--dump_syms_path", + rebase_path(_dump_syms, root_build_dir), + ] + } +} diff --git a/config/android/internal_rules.gni b/config/android/internal_rules.gni new file mode 100644 index 000000000000..41abf539fe96 --- /dev/null +++ b/config/android/internal_rules.gni @@ -0,0 +1,4463 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Do not add any imports to non-//build directories here. +# Some projects (e.g. V8) do not have non-build directories DEPS'ed in. +import("//build/config/android/channel.gni") +import("//build/config/android/config.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/compute_inputs_for_analyze.gni") +import("//build/config/coverage/coverage.gni") +import("//build/config/python.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/kythe.gni") +import("//build/util/generate_wrapper.gni") +import("//build_overrides/build.gni") +if (current_toolchain == default_toolchain) { + import("//build/toolchain/concurrent_links.gni") +} +assert(is_android) + +default_android_sdk_dep = "//third_party/android_sdk:android_sdk_java" +_jacoco_dep = "//third_party/jacoco:jacocoagent_java" +_jacoco_host_jar = + "$root_build_dir/lib.java/third_party/jacoco/jacocoagent_java.jar" +_robolectric_libs_dir = + rebase_path( + get_label_info("//:foo($robolectric_toolchain)", "root_out_dir"), + root_build_dir) + +# The following _java_*_types variables capture all the existing target types. +# If a new type is introduced, please add it to one of these categories, +# preferring the more specific resource/library types. +_java_resource_types = [ + "android_assets", + "android_resources", +] + +_java_library_types = [ + "java_library", + "system_java_library", + "android_app_bundle_module", +] + +# These are leaf java target types. They cannot be passed as deps to other +# targets. Thus their naming schemes are not enforced. +_java_leaf_types = [ + "android_apk", + "android_app_bundle", + "dist_aar", + "dist_jar", + "java_annotation_processor", + "java_binary", + "robolectric_binary", +] + +# All _java_resource_types targets must conform to these patterns. +java_resource_patterns = [ + "*_assets", + "*_grd", + "*_java_strings", + "*locale_paks", + "*_resources", + "*strings_java", + "*android*:assets", + "*:*_apk_*resources", + "*android*:resources", +] + +# All _java_library_types targets must conform to these patterns. This includes +# all non-leaf targets that use java_library_impl. +java_library_patterns = [ + "*_java", + "*_javalib", + "*javatests", + "*_bundle_module", + "*:*_java_*", # E.g. chrome_java_test_support + "*:java", + "*/java", # to allow filtering without expanding labels //a/java -> + # //a/java:java + "*:junit", + "*/junit", + "*:junit_*", + "*:*_junit_*", + + # TODO(agrieve): Rename to glue_java + "//android_webview/glue", + "//android_webview/glue:glue", +] + +# These identify all non-leaf targets that have .build_config.json files. This is the +# set of patterns that other targets can use to filter out java targets. +java_target_patterns = java_library_patterns + java_resource_patterns + +_r8_path = "//third_party/r8/lib/r8.jar" + +# This duplication is intentional, so we avoid updating the r8.jar used by +# dexing unless necessary, since each update invalidates all incremental dexing +# and unnecessarily slows down all bots. +_d8_path = "//third_party/r8/d8/lib/r8.jar" +_custom_d8_path = "//third_party/r8/custom_d8.jar" +_default_lint_jar_path = "//third_party/android_build_tools/lint/lint.jar" +_custom_lint_jar_path = "//third_party/android_build_tools/lint/custom_lint.jar" +_manifest_merger_jar_path = + "//third_party/android_build_tools/manifest_merger/manifest-merger.jar" + +# Put the bug number in the target name so that false-positives have a hint in +# the error message about why non-existent dependencies are there. +build_config_target_suffix = "__build_config_crbug_908819" + +# Write the target's .build_config.json file. This is a json file that contains a +# dictionary of information about how to build this target (things that +# require knowledge about this target's dependencies and cannot be calculated +# at gn-time). There is a special syntax to add a value in that dictionary to +# an action/action_foreachs args: +# --python-arg=@FileArg($rebased_build_config_path:key0:key1) +# At runtime, such an arg will be replaced by the value in the build_config. +# See build/android/gyp/write_build_config.py and +# build/android/gyp/util/build_utils.py:ExpandFileArgs +template("write_build_config") { + _type = invoker.type + _parent_invoker = invoker.invoker + _target_label = + get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain") + + # Ensure targets match naming patterns so that __assetres, __header, __host, + # and __validate targets work properly. + if (filter_exclude([ _type ], _java_resource_types) == []) { + if (filter_exclude([ _target_label ], java_resource_patterns) != []) { + assert(false, "Invalid java resource target name: $_target_label") + } + } else if (filter_exclude([ _type ], _java_library_types) == []) { + if (filter_exclude([ _target_label ], java_library_patterns) != [] || + filter_exclude([ _target_label ], java_resource_patterns) == []) { + assert(false, "Invalid java library target name: $_target_label") + } + } else if (_type == "group") { + if (filter_exclude([ _target_label ], java_target_patterns) != []) { + assert(false, "Invalid java target name: $_target_label") + } + } else if (filter_exclude([ _type ], _java_leaf_types) != []) { + assert(false, "This java type needs a category: $_type") + } + + if (defined(invoker.public_target_label)) { + _target_label = invoker.public_target_label + } + + action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + if (!defined(deps)) { + deps = [] + } + if (defined(invoker.android_manifest_dep)) { + deps += [ invoker.android_manifest_dep ] + } + + script = "//build/android/gyp/write_build_config.py" + depfile = "$target_gen_dir/$target_name.d" + inputs = [] + outputs = [ invoker.build_config ] + + _deps_configs = [] + if (defined(invoker.possible_config_deps)) { + foreach(_possible_dep, invoker.possible_config_deps) { + _dep_label = get_label_info(_possible_dep, "label_no_toolchain") + if (filter_exclude([ _dep_label ], java_target_patterns) == []) { + deps += [ "$_dep_label$build_config_target_suffix" ] + _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir") + _dep_name = get_label_info(_possible_dep, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + + _deps_configs += [ _dep_config ] + } + } + } + _public_deps_configs = [] + if (defined(invoker.possible_config_public_deps)) { + foreach(_possible_dep, invoker.possible_config_public_deps) { + _dep_label = get_label_info(_possible_dep, "label_no_toolchain") + + # E.g. Adding an action that generates a .java file that is then + # consumed by a subsequent java_library() target would not work + # because the libraries depend only on the nested targets of one + # another. It is simplest to just ban non-java public_deps. + assert(filter_exclude([ _dep_label ], java_target_patterns) == [], + "Only java_library targets can be used as public_deps. " + + "Found:\n${_dep_label}\non Target:\n" + + get_label_info(":$target_name", "label_no_toolchain")) + + # Put the bug number in the target name so that false-positives + # have a hint in the error message about non-existent dependencies. + deps += [ "$_dep_label$build_config_target_suffix" ] + _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir") + _dep_name = get_label_info(_possible_dep, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + + _public_deps_configs += [ _dep_config ] + } + } + inputs += _deps_configs + inputs += _public_deps_configs + _rebased_deps_configs = rebase_path(_deps_configs, root_build_dir) + _rebased_public_deps_configs = + rebase_path(_public_deps_configs, root_build_dir) + + args = [ + "--type=$_type", + "--depfile", + rebase_path(depfile, root_build_dir), + "--deps-configs=$_rebased_deps_configs", + "--public-deps-configs=$_rebased_public_deps_configs", + "--build-config", + rebase_path(invoker.build_config, root_build_dir), + "--gn-target", + _target_label, + ] + + if (defined(invoker.preferred_dep) && invoker.preferred_dep) { + args += [ "--preferred-dep" ] + } + + if (defined(invoker.aar_path)) { + args += [ + "--aar-path", + rebase_path(invoker.aar_path, root_build_dir), + ] + } + + if (defined(invoker.chromium_code) && !invoker.chromium_code) { + # Default to chromium code if invoker did not pass anything. + args += [ "--non-chromium-code" ] + } + + if (defined(invoker.device_jar_path)) { + args += [ + "--device-jar-path", + rebase_path(invoker.device_jar_path, root_build_dir), + ] + } + if (defined(invoker.host_jar_path)) { + args += [ + "--host-jar-path", + rebase_path(invoker.host_jar_path, root_build_dir), + ] + } + if (defined(invoker.unprocessed_jar_path)) { + args += [ + "--unprocessed-jar-path", + rebase_path(invoker.unprocessed_jar_path, root_build_dir), + ] + } + if (defined(invoker.ijar_path)) { + args += [ + "--interface-jar-path", + rebase_path(invoker.ijar_path, root_build_dir), + ] + } + if (defined(invoker.kotlinc_jar_path)) { + args += [ + "--kotlinc-jar-path", + rebase_path(invoker.kotlinc_jar_path, root_build_dir), + ] + } + if (defined(invoker.java_resources_jar)) { + args += [ + "--java-resources-jar-path", + rebase_path(invoker.java_resources_jar, root_build_dir), + ] + } + if (defined(invoker.annotation_processor_deps) && + invoker.annotation_processor_deps != []) { + _processor_configs = [] + foreach(_dep_label, invoker.annotation_processor_deps) { + deps += [ "$_dep_label$build_config_target_suffix" ] + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + _processor_configs += [ _dep_config ] + } + _rebased_processor_configs = + rebase_path(_processor_configs, root_build_dir) + inputs += _processor_configs + args += [ "--annotation-processor-configs=$_rebased_processor_configs" ] + } + + # Dex path for library targets, or the the intermediate library for apks. + if (defined(invoker.dex_path)) { + args += [ + "--dex-path", + rebase_path(invoker.dex_path, root_build_dir), + ] + } + + # Dex path for the final apk. + if (defined(invoker.final_dex_path)) { + args += [ + "--final-dex-path", + rebase_path(invoker.final_dex_path, root_build_dir), + ] + } + if (defined(invoker.supports_android) && invoker.supports_android) { + args += [ "--supports-android" ] + } + if (defined(invoker.requires_android) && invoker.requires_android) { + args += [ "--requires-android" ] + } + if (defined(invoker.is_prebuilt) && invoker.is_prebuilt) { + args += [ "--is-prebuilt" ] + } + if (defined(invoker.bypass_platform_checks) && + invoker.bypass_platform_checks) { + args += [ "--bypass-platform-checks" ] + } + if (defined(invoker.is_robolectric) && invoker.is_robolectric) { + args += [ "--is-robolectric" ] + } + + if (defined(invoker.apk_under_test)) { + _dep_label = invoker.apk_under_test + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + inputs += [ _dep_config ] + deps += [ "$_dep_label$build_config_target_suffix" ] + args += [ + "--tested-apk-config", + rebase_path(_dep_config, root_build_dir), + ] + } + + if (defined(invoker.asset_sources)) { + _rebased_asset_sources = + rebase_path(invoker.asset_sources, root_build_dir) + args += [ "--asset-sources=$_rebased_asset_sources" ] + } + if (defined(invoker.asset_renaming_sources)) { + _rebased_asset_renaming_sources = + rebase_path(invoker.asset_renaming_sources, root_build_dir) + args += [ "--asset-renaming-sources=$_rebased_asset_renaming_sources" ] + + # These are zip paths, so no need to rebase. + args += [ + "--asset-renaming-destinations=${invoker.asset_renaming_destinations}", + ] + } + if (defined(invoker.disable_compression) && invoker.disable_compression) { + args += [ "--disable-asset-compression" ] + } + if (defined(invoker.treat_as_locale_paks) && invoker.treat_as_locale_paks) { + args += [ "--treat-as-locale-paks" ] + } + + if (defined(invoker.merged_android_manifest)) { + args += [ + "--merged-android-manifest", + rebase_path(invoker.merged_android_manifest, root_build_dir), + ] + } + if (defined(invoker.android_manifest)) { + inputs += [ invoker.android_manifest ] + args += [ + "--android-manifest", + rebase_path(invoker.android_manifest, root_build_dir), + ] + } + if (defined(invoker.resources_zip)) { + args += [ + "--resources-zip", + rebase_path(invoker.resources_zip, root_build_dir), + ] + } + + if (defined(invoker.resource_overlay) && invoker.resource_overlay) { + args += [ "--resource-overlay" ] + } + + if (defined(invoker.custom_package)) { + args += [ + "--package-name", + invoker.custom_package, + ] + } + if (defined(invoker.r_text)) { + args += [ + "--r-text-path", + rebase_path(invoker.r_text, root_build_dir), + ] + } + if (defined(invoker.res_size_info_path)) { + args += [ + "--res-size-info", + rebase_path(invoker.res_size_info_path, root_build_dir), + ] + } + if (defined(invoker.res_sources_path)) { + _res_sources_path = rebase_path(invoker.res_sources_path, root_build_dir) + args += [ "--res-sources-path=$_res_sources_path" ] + } + if (defined(invoker.proto_resources_path)) { + _rebased_proto_resources = + rebase_path(invoker.proto_resources_path, root_build_dir) + args += [ "--apk-proto-resources=$_rebased_proto_resources" ] + } + if (defined(invoker.r_text_path)) { + _rebased_rtxt_path = rebase_path(invoker.r_text_path, root_build_dir) + args += [ "--r-text-path=$_rebased_rtxt_path" ] + } + if (defined(invoker.module_pathmap_path)) { + _rebased_pathmap_path = + rebase_path(invoker.module_pathmap_path, root_build_dir) + args += [ "--module-pathmap-path=$_rebased_pathmap_path" ] + } + + if (defined(invoker.shared_libraries_runtime_deps_file)) { + # Don't list shared_libraries_runtime_deps_file as an input in order to + # avoid having to depend on the runtime_deps target. See comment in + # rules.gni for why we do this. + args += [ + "--shared-libraries-runtime-deps", + rebase_path(invoker.shared_libraries_runtime_deps_file, root_build_dir), + ] + } + + if (defined(invoker.base_allowlist_rtxt_path)) { + args += [ + "--base-allowlist-rtxt-path", + rebase_path(invoker.base_allowlist_rtxt_path, root_build_dir), + ] + } + + if (defined(invoker.loadable_modules)) { + _rebased_loadable_modules = + rebase_path(invoker.loadable_modules, root_build_dir) + args += [ "--loadable-modules=$_rebased_loadable_modules" ] + } + + if (defined(invoker.secondary_abi_shared_libraries_runtime_deps_file)) { + # Don't list secondary_abi_shared_libraries_runtime_deps_file as an + # input in order to avoid having to depend on the runtime_deps target. + # See comment in rules.gni for why we do this. + args += [ + "--secondary-abi-shared-libraries-runtime-deps", + rebase_path(invoker.secondary_abi_shared_libraries_runtime_deps_file, + root_build_dir), + ] + } + + if (defined(invoker.secondary_abi_loadable_modules) && + invoker.secondary_abi_loadable_modules != []) { + _rebased_secondary_abi_loadable_modules = + rebase_path(invoker.secondary_abi_loadable_modules, root_build_dir) + args += [ "--secondary-abi-loadable-modules=$_rebased_secondary_abi_loadable_modules" ] + } + + if (defined(invoker.native_lib_placeholders) && + invoker.native_lib_placeholders != []) { + args += [ "--native-lib-placeholders=${invoker.native_lib_placeholders}" ] + } + + if (defined(invoker.secondary_native_lib_placeholders) && + invoker.secondary_native_lib_placeholders != []) { + args += [ "--secondary-native-lib-placeholders=${invoker.secondary_native_lib_placeholders}" ] + } + + if (defined(invoker.library_always_compress)) { + args += [ "--library-always-compress=${invoker.library_always_compress}" ] + } + + if (defined(invoker.apk_path)) { + # TODO(tiborg): Remove APK path from build config and use + # install_artifacts from metadata instead. + _rebased_apk_path = rebase_path(invoker.apk_path, root_build_dir) + args += [ "--apk-path=$_rebased_apk_path" ] + if (defined(invoker.incremental_apk_path)) { + _rebased_incremental_apk_path = + rebase_path(invoker.incremental_apk_path, root_build_dir) + _rebased_incremental_install_json_path = + rebase_path(invoker.incremental_install_json_path, root_build_dir) + args += [ + "--incremental-install-json-path=$_rebased_incremental_install_json_path", + "--incremental-apk-path=$_rebased_incremental_apk_path", + ] + } + } + + if (defined(invoker.target_sources_file)) { + args += [ + "--target-sources-file", + rebase_path(invoker.target_sources_file, root_build_dir), + ] + } + if (defined(invoker.srcjar)) { + args += [ + "--srcjar", + rebase_path(invoker.srcjar, root_build_dir), + ] + } + if (defined(invoker.bundled_srcjars)) { + _rebased_bundled_srcjars = + rebase_path(invoker.bundled_srcjars, root_build_dir) + args += [ "--bundled-srcjars=$_rebased_bundled_srcjars" ] + } + if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) { + args += [ "--proguard-enabled" ] + } + if (defined(invoker.proguard_mapping_path)) { + _rebased_proguard_mapping_path = + rebase_path(invoker.proguard_mapping_path, root_build_dir) + args += [ "--proguard-mapping-path=$_rebased_proguard_mapping_path" ] + } + if (defined(invoker.input_jars_paths)) { + _rebased_input_jars_paths = + rebase_path(invoker.input_jars_paths, root_build_dir) + args += [ "--extra-classpath-jars=$_rebased_input_jars_paths" ] + } + if (defined(invoker.low_classpath_priority) && + invoker.low_classpath_priority) { + args += [ "--low-classpath-priority" ] + } + if (defined(invoker.mergeable_android_manifests)) { + _rebased_mergeable_android_manifests = + rebase_path(invoker.mergeable_android_manifests, root_build_dir) + args += [ + "--mergeable-android-manifests=$_rebased_mergeable_android_manifests", + ] + } + if (defined(invoker.proguard_configs)) { + _rebased_proguard_configs = + rebase_path(invoker.proguard_configs, root_build_dir) + args += [ "--proguard-configs=$_rebased_proguard_configs" ] + } + if (defined(invoker.gradle_treat_as_prebuilt) && + invoker.gradle_treat_as_prebuilt) { + args += [ "--gradle-treat-as-prebuilt" ] + } + if (defined(invoker.main_class)) { + args += [ + "--main-class", + invoker.main_class, + ] + } + if (defined(invoker.base_module_target)) { + _dep_label = invoker.base_module_target + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + deps += [ "$_dep_label$build_config_target_suffix" ] + inputs += [ _dep_config ] + args += [ + "--base-module-build-config", + rebase_path(_dep_config, root_build_dir), + ] + } + if (defined(invoker.parent_module_target)) { + _dep_label = invoker.parent_module_target + _dep_gen_dir = get_label_info(_dep_label, "target_gen_dir") + _dep_name = get_label_info(_dep_label, "name") + _dep_config = "$_dep_gen_dir/$_dep_name.build_config.json" + deps += [ "$_dep_label$build_config_target_suffix" ] + inputs += [ _dep_config ] + args += [ + "--parent-module-build-config", + rebase_path(_dep_config, root_build_dir), + ] + } + if (defined(invoker.module_name)) { + args += [ + "--module-name", + invoker.module_name, + ] + } + if (defined(invoker.modules)) { + foreach(_module, invoker.modules) { + if (defined(_module.uses_split)) { + args += [ "--uses-split=${_module.name}:${_module.uses_split}" ] + } + } + } + if (defined(invoker.module_build_configs)) { + inputs += invoker.module_build_configs + _rebased_configs = + rebase_path(invoker.module_build_configs, root_build_dir) + args += [ "--module-build-configs=$_rebased_configs" ] + } + if (defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events) { + # Adding trace events involves rewriting bytecode and generating a new set + # of jar files. In order to avoid conflicts between bundles we save the + # new jars in a bundle specific gen/ directory. The build config for the + # bundle, and each one of its modules need a path to a bundle specific + # gen/ directory in order to generate a list of rewritten jar paths. + # We use the base module's target_gen_dir because non-base modules and the + # app bundle targets have a reference to it (base_module_target). + if (_type == "android_app_bundle") { + _trace_events_target_name = + get_label_info(_parent_invoker.base_module_target, "name") + } else if (defined(invoker.base_module_target)) { + _trace_events_target_name = + get_label_info(invoker.base_module_target, "name") + } else { + _grandparent_invoker = _parent_invoker.invoker + _trace_events_target_name = _grandparent_invoker.target_name + } + + # FIXME: This should likely be using the base module's target_out_dir + # rather than the current target's. + args += [ + "--trace-events-jar-dir", + rebase_path("$target_out_dir/$_trace_events_target_name", + root_build_dir), + ] + } + if (defined(invoker.version_name)) { + args += [ + "--version-name", + invoker.version_name, + ] + } + if (defined(invoker.version_code)) { + args += [ + "--version-code", + invoker.version_code, + ] + } + if (defined(invoker.recursive_resource_deps) && + invoker.recursive_resource_deps) { + args += [ "--recursive-resource-deps" ] + } + if (current_toolchain != default_toolchain) { + # This has to be a built-time error rather than a GN assert because many + # packages have a mix of java and non-java targets. For example, the + # following would fail even though nothing depends on :bar(//baz): + # + # shared_library("foo") { + # } + # + # android_library("bar") { + # deps = [ ":foo(//baz)" ] + # assert(current_toolchain == default_toolchain) + # } + _msg = [ + "Tried to build an Android target in a non-default toolchain.", + "target: $_target_label", + "current_toolchain: $current_toolchain", + "default_toolchain: $default_toolchain", + ] + args += [ "--fail=$_msg" ] + } + } +} + +template("generate_android_wrapper") { + generate_wrapper(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + generator_script = "//build/android/gyp/generate_android_wrapper.py" + sources = [ + "//build/android/gyp/util/build_utils.py", + "//build/gn_helpers.py", + "//build/util/generate_wrapper.py", + ] + } +} + +template("generate_r_java") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + depfile = "$target_gen_dir/${invoker.target_name}.d" + inputs = [ invoker.build_config ] + outputs = [ invoker.srcjar_path ] + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + script = "//build/android/gyp/create_r_java.py" + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--srcjar-out", + rebase_path(invoker.srcjar_path, root_build_dir), + "--deps-rtxts=@FileArg($_rebased_build_config:deps_info:dependency_r_txt_files)", + "--r-package=${invoker.package}", + ] + } +} + +# Generates a script in the build bin directory which runs the test +# target using the test runner script in build/android/test_runner.py. +template("test_runner_script") { + testonly = true + _test_name = invoker.test_name + _test_type = invoker.test_type + _is_unit_test = defined(invoker.is_unit_test) && invoker.is_unit_test + _incremental_apk = defined(invoker.incremental_apk) && invoker.incremental_apk + + _runtime_deps = + !defined(invoker.ignore_all_data_deps) || !invoker.ignore_all_data_deps + + if (_runtime_deps) { + # This runtime_deps file is used at runtime and thus cannot go in + # target_gen_dir. + _target_dir_name = get_label_info(":$target_name", "dir") + _runtime_deps_file = + "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.runtime_deps" + _runtime_deps_target = "${target_name}__write_deps" + group(_runtime_deps_target) { + forward_variables_from(invoker, + [ + "data", + "deps", + "public_deps", + ]) + data_deps = [] + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + if (defined(invoker.additional_apks)) { + data_deps += invoker.additional_apks + } + write_runtime_deps = _runtime_deps_file + } + } + + if (defined(invoker.apk_under_test)) { + _install_artifacts_json = + "${target_gen_dir}/${target_name}.install_artifacts" + _install_artifacts_target_name = "${target_name}__install_artifacts" + generated_file(_install_artifacts_target_name) { + deps = [ invoker.apk_under_test ] + output_conversion = "json" + outputs = [ _install_artifacts_json ] + data_keys = [ "install_artifacts" ] + walk_keys = [ "install_artifacts_barrier" ] + rebase = root_build_dir + } + } + + generate_android_wrapper(target_name) { + forward_variables_from(invoker, + [ + "assert_no_deps", + "public_deps", + "visibility", + ]) + wrapper_script = "$root_build_dir/bin/run_${_test_name}" + + executable = "//testing/test_env.py" + + if (defined(invoker.android_test_runner_script)) { + _runner_script = invoker.android_test_runner_script + } else { + _runner_script = "//build/android/test_runner.py" + } + + deps = [] + if (defined(invoker.deps)) { + deps = invoker.deps + } + data_deps = [ + "//build/android:test_runner_core_py", + "//testing:test_scripts_shared", + ] + if (_test_type != "junit") { + data_deps += [ "//build/android:test_runner_device_support" ] + } + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + data = [] + if (defined(invoker.data)) { + data += invoker.data + } + + executable_args = [ + "@WrappedPath(" + rebase_path(_runner_script, root_build_dir) + ")", + _test_type, + "--output-directory", + "@WrappedPath(.)", + "--wrapper-script-args", + ] + + if (_is_unit_test) { + executable_args += [ "--is-unit-test" ] + } + + if (_runtime_deps) { + deps += [ ":$_runtime_deps_target" ] + data += [ _runtime_deps_file ] + _rebased_runtime_deps_file = + rebase_path(_runtime_deps_file, root_build_dir) + executable_args += [ + "--runtime-deps-path", + "@WrappedPath(${_rebased_runtime_deps_file})", + ] + } + + # apk_target is not used for native executable tests + # (e.g. breakpad_unittests). + if (defined(invoker.apk_target)) { + assert(!defined(invoker.executable_dist_dir)) + deps += [ "${invoker.apk_target}$build_config_target_suffix" ] + _apk_build_config = + get_label_info(invoker.apk_target, "target_gen_dir") + "/" + + get_label_info(invoker.apk_target, "name") + ".build_config.json" + _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir) + not_needed([ "_rebased_apk_build_config" ]) + } else if (_test_type == "gtest") { + assert( + defined(invoker.executable_dist_dir), + "Must define either apk_target or executable_dist_dir for test_runner_script()") + _rebased_executable_dist_dir = + rebase_path(invoker.executable_dist_dir, root_build_dir) + executable_args += [ + "--executable-dist-dir", + "@WrappedPath(${_rebased_executable_dist_dir})", + ] + } + + _device_test = true + if (_test_type == "gtest") { + assert(defined(invoker.test_suite)) + executable_args += [ + "--suite", + invoker.test_suite, + ] + if (use_clang_coverage) { + # Set a default coverage output directory (can be overridden by user + # passing the same flag). + _rebased_coverage_dir = + rebase_path("$root_out_dir/coverage", root_build_dir) + executable_args += [ + "--coverage-dir", + "@WrappedPath(${_rebased_coverage_dir})", + ] + } + } else if (_test_type == "instrumentation") { + _test_apk = "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))" + if (_incremental_apk) { + _test_apk = "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:incremental_apk_path))" + } + executable_args += [ + "--test-apk", + _test_apk, + ] + if (defined(invoker.apk_under_test)) { + if (_incremental_apk) { + deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ] + _apk_under_test_build_config = + get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" + + get_label_info(invoker.apk_under_test, "name") + + ".build_config.json" + _rebased_apk_under_test_build_config = + rebase_path(_apk_under_test_build_config, root_build_dir) + _apk_under_test = "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path))" + } else { + deps += [ ":${_install_artifacts_target_name}" ] + _rebased_install_artifacts_json = + rebase_path(_install_artifacts_json, root_build_dir) + _apk_under_test = + "@WrappedPath(@FileArg($_rebased_install_artifacts_json[]))" + } + executable_args += [ + "--apk-under-test", + _apk_under_test, + ] + } + if (defined(invoker.use_webview_provider)) { + deps += [ "${invoker.use_webview_provider}$build_config_target_suffix" ] + _build_config = + get_label_info(invoker.use_webview_provider, "target_gen_dir") + + "/" + get_label_info(invoker.use_webview_provider, "name") + + ".build_config.json" + _rebased_build_config = rebase_path(_build_config, root_build_dir) + executable_args += [ + "--use-webview-provider", + "@WrappedPath(@FileArg($_rebased_build_config:deps_info:apk_path))", + ] + } + if (defined(invoker.proguard_mapping_path)) { + if (_incremental_apk) { + not_needed(invoker, [ "proguard_mapping_path" ]) + } else { + data += [ invoker.proguard_mapping_path ] + _rebased_mapping_path = + rebase_path(invoker.proguard_mapping_path, root_build_dir) + executable_args += [ + "--proguard-mapping-path", + "@WrappedPath($_rebased_mapping_path)", + ] + } + } + if (use_jacoco_coverage) { + # Set a default coverage output directory (can be overridden by user + # passing the same flag). + _rebased_coverage_dir = + rebase_path("$root_out_dir/coverage", root_build_dir) + executable_args += [ + "--coverage-dir", + "@WrappedPath(${_rebased_coverage_dir})", + ] + } + } else if (_test_type == "junit") { + assert(defined(invoker.test_suite)) + _device_test = false + executable_args += [ + "--test-suite", + invoker.test_suite, + "--native-libs-dir", + "@WrappedPath($_robolectric_libs_dir)", + ] + + # Test runner uses this generated wrapper script. + data += [ "$root_build_dir/bin/helper/${invoker.test_suite}" ] + + deps += [ ":${invoker.test_suite}$build_config_target_suffix" ] + + _rebased_robolectric_runtime_deps_dir = + rebase_path("//third_party/robolectric/lib", root_build_dir) + _rebased_resource_apk = rebase_path(invoker.resource_apk, root_build_dir) + executable_args += [ + "--resource-apk", + "@WrappedPath(${_rebased_resource_apk})", + "--robolectric-runtime-deps-dir", + "@WrappedPath(${_rebased_robolectric_runtime_deps_dir})", + ] + if (use_jacoco_coverage) { + # Set a default coverage output directory (can be overridden by user + # passing the same flag). + _rebased_coverage_dir = + rebase_path("$root_out_dir/coverage", root_build_dir) + executable_args += [ + "--coverage-dir", + "@WrappedPath(${_rebased_coverage_dir})", + ] + } + } else if (_test_type == "linker") { + executable_args += [ + "--test-apk", + "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:apk_path))", + ] + } else { + assert(false, "Invalid test type: $_test_type.") + } + + if (defined(invoker.additional_apks)) { + foreach(additional_apk, invoker.additional_apks) { + deps += [ "$additional_apk$build_config_target_suffix" ] + _build_config = + get_label_info(additional_apk, "target_gen_dir") + "/" + + get_label_info(additional_apk, "name") + ".build_config.json" + _rebased_build_config = rebase_path(_build_config, root_build_dir) + executable_args += [ + "--additional-apk", + "@WrappedPath(@FileArg($_rebased_build_config:deps_info:apk_path))", + ] + } + } + if (defined(invoker.shard_timeout)) { + executable_args += [ "--shard-timeout=${invoker.shard_timeout}" ] + } + if (_incremental_apk) { + executable_args += [ + "--test-apk-incremental-install-json", + "@WrappedPath(@FileArg($_rebased_apk_build_config:deps_info:incremental_install_json_path))", + ] + if (defined(invoker.apk_under_test)) { + executable_args += [ + "--apk-under-test-incremental-install-json", + "@WrappedPath(@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_install_json_path))", + ] + } + executable_args += [ "--fast-local-dev" ] + } + if (_device_test && is_asan) { + executable_args += [ "--tool=asan" ] + } + + if (defined(invoker.modules)) { + foreach(module, invoker.modules) { + executable_args += [ + "--module", + module, + ] + } + } + + if (defined(invoker.fake_modules)) { + foreach(fake_module, invoker.fake_modules) { + executable_args += [ + "--fake-module", + fake_module, + ] + } + } + + if (defined(invoker.additional_locales)) { + foreach(locale, invoker.additional_locales) { + executable_args += [ + "--additional-locale", + locale, + ] + } + } + + if (defined(invoker.extra_args)) { + executable_args += invoker.extra_args + } + } +} + +if (enable_java_templates) { + template("android_lint") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + # https://crbug.com/1098752 Fix for bot OOM (https://crbug.com/1098333). + if (defined(java_cmd_pool_size)) { + pool = "//build/config/android:java_cmd_pool($default_toolchain)" + } else { + pool = "//build/toolchain:link_pool($default_toolchain)" + } + + # Lint requires generated sources and generated resources from the build. + # Turbine __header targets depend on all generated sources, and the + # __assetres targets depend on all generated resources. + deps = [] + if (defined(invoker.deps)) { + _lib_deps = + filter_exclude(filter_include(invoker.deps, java_library_patterns), + java_resource_patterns) + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + deps += [ + "${_lib_dep}__assetres", + "${_lib_dep}__header", + ] + } + + # Keep non-java deps as they may generate files used only by lint. + # e.g. generated suppressions.xml files. + deps += filter_exclude(invoker.deps, _lib_deps) + } + + if (defined(invoker.min_sdk_version)) { + _min_sdk_version = invoker.min_sdk_version + } else { + _min_sdk_version = default_min_sdk_version + } + + if (defined(invoker.lint_jar_path)) { + _lint_jar_path = invoker.lint_jar_path + } else { + _lint_jar_path = _default_lint_jar_path + } + + _cache_dir = "$root_build_dir/android_lint_cache" + + # Save generated xml files in a consistent location for debugging. + _lint_gen_dir = "$target_gen_dir/$target_name" + _backported_methods = "//third_party/r8/backported_methods.txt" + + script = "//build/android/gyp/lint.py" + depfile = "$target_gen_dir/$target_name.d" + inputs = [ + _lint_jar_path, + _custom_lint_jar_path, + _backported_methods, + ] + + args = [ + "--target-name", + get_label_info(":${target_name}", "label_no_toolchain"), + "--depfile", + rebase_path(depfile, root_build_dir), + "--lint-jar-path", + rebase_path(_lint_jar_path, root_build_dir), + "--custom-lint-jar-path", + rebase_path(_custom_lint_jar_path, root_build_dir), + "--cache-dir", + rebase_path(_cache_dir, root_build_dir), + "--lint-gen-dir", + rebase_path(_lint_gen_dir, root_build_dir), + "--android-sdk-version=${lint_android_sdk_version}", + "--min-sdk-version=$_min_sdk_version", + "--android-sdk-root", + rebase_path(lint_android_sdk_root, root_build_dir), + "--backported-methods", + rebase_path(_backported_methods, root_build_dir), + ] + + if (defined(invoker.skip_build_server) && invoker.skip_build_server) { + # Nocompile tests need lint to fail through ninja. + args += [ "--skip-build-server" ] + } else if (android_static_analysis == "build_server") { + args += [ "--use-build-server" ] + } + + if (defined(invoker.lint_suppressions_file)) { + inputs += [ invoker.lint_suppressions_file ] + + args += [ + "--config-path", + rebase_path(invoker.lint_suppressions_file, root_build_dir), + ] + } + + if (defined(testonly) && testonly) { + # Allows us to ignore unnecessary checks when linting test targets. + args += [ "--testonly" ] + } + + if (defined(invoker.manifest_package)) { + args += [ "--manifest-package=${invoker.manifest_package}" ] + } + + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] + } + + if (defined(invoker.lint_baseline_file)) { + if (compute_inputs_for_analyze) { + # The baseline file is included in lint.py as a depfile dep. Since + # removing it regenerates the file, it is useful to not have this as + # a gn input during local development. Add it only for bots' analyze. + inputs += [ invoker.lint_baseline_file ] + } + args += [ + # Baseline allows us to turn on lint warnings without fixing all the + # pre-existing issues. This stops the flood of new issues while the + # existing ones are being fixed. + "--baseline", + rebase_path(invoker.lint_baseline_file, root_build_dir), + ] + } + + if (defined(invoker.create_cache) && invoker.create_cache) { + # Putting the stamp file in the cache dir allows us to depend on ninja + # to create the cache dir for us. + _stamp_path = "$_cache_dir/build.lint.stamp" + args += [ "--create-cache" ] + } else { + _stamp_path = "$target_out_dir/$target_name/build.lint.stamp" + deps += [ + "//build/android:prepare_android_lint_cache", + invoker.build_config_dep, + ] + inputs += [ invoker.build_config ] + _rebased_build_config = + rebase_path(invoker.build_config, root_build_dir) + + args += [ + "--manifest-path=@FileArg($_rebased_build_config:deps_info:lint_android_manifest)", + "--extra-manifest-paths=@FileArg($_rebased_build_config:deps_info:lint_extra_android_manifests)", + + # Lint requires all source and all resource files to be passed in the + # same invocation for checks like UnusedResources. + "--sources=@FileArg($_rebased_build_config:deps_info:lint_sources)", + "--aars=@FileArg($_rebased_build_config:deps_info:lint_aars)", + "--srcjars=@FileArg($_rebased_build_config:deps_info:lint_srcjars)", + "--resource-sources=@FileArg($_rebased_build_config:deps_info:lint_resource_sources)", + "--resource-zips=@FileArg($_rebased_build_config:deps_info:lint_resource_zips)", + + # The full classpath is required for annotation checks like @IntDef. + "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)", + ] + } + + outputs = [ _stamp_path ] + args += [ + "--stamp", + rebase_path(_stamp_path, root_build_dir), + ] + } + } + + template("proguard") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _script = "//build/android/gyp/proguard.py" + _deps = invoker.deps + + _inputs = [ + invoker.build_config, + _r8_path, + ] + if (defined(invoker.inputs)) { + _inputs += invoker.inputs + } + if (defined(invoker.proguard_mapping_path)) { + _mapping_path = invoker.proguard_mapping_path + } else { + _mapping_path = "${invoker.output_path}.mapping" + } + + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + + # This is generally the apk name, and serves to identify the mapping + # file that would be required to deobfuscate a stacktrace. + _mapping_basename = get_path_info(_mapping_path, "name") + _version_code = "@FileArg($_rebased_build_config:deps_info:version_code)" + _package_name = "@FileArg($_rebased_build_config:deps_info:package_name)" + if (defined(invoker.package_name)) { + _package_name = invoker.package_name + } + if (defined(invoker.version_code)) { + _version_code = invoker.version_code + } + + # The Mapping ID is parsed to when uploading mapping files. + # See: https://crbug.com/1417308 + _source_file_template = + "chromium-$_mapping_basename-$android_channel-$_version_code" + + _args = [ + "--mapping-output", + rebase_path(_mapping_path, root_build_dir), + "--classpath", + "@FileArg($_rebased_build_config:deps_info:proguard_classpath_jars)", + "--classpath", + "@FileArg($_rebased_build_config:android:sdk_jars)", + "--r8-path", + rebase_path(_r8_path, root_build_dir), + "--package-name=$_package_name", + "--source-file", + _source_file_template, + ] + if (treat_warnings_as_errors) { + _args += [ "--warnings-as-errors" ] + } + + if ((!defined(invoker.proguard_enable_obfuscation) || + invoker.proguard_enable_obfuscation) && enable_proguard_obfuscation) { + _args += [ "--enable-obfuscation" ] + } + + if (defined(invoker.modules)) { + foreach(_feature_module, invoker.modules) { + _rebased_module_build_config = + rebase_path(_feature_module.build_config, root_build_dir) + _args += [ + "--feature-name=${_feature_module.name}", + "--dex-dest=@FileArg($_rebased_module_build_config:final_dex:path)", + ] + + # The bundle's build config has the correct classpaths - the individual + # modules' build configs may double-use some jars. + if (defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events) { + _args += [ "--feature-jars=@FileArg($_rebased_build_config:modules:${_feature_module.name}:trace_event_rewritten_device_classpath)" ] + } else { + _args += [ "--feature-jars=@FileArg($_rebased_build_config:modules:${_feature_module.name}:device_classpath)" ] + } + + if (defined(_feature_module.uses_split)) { + _args += [ "--uses-split=${_feature_module.name}:${_feature_module.uses_split}" ] + } + _deps += [ _feature_module.build_config_target ] + } + _stamp = "${target_gen_dir}/${target_name}.r8.stamp" + _outputs = [ _stamp ] + _output_arg = [ + "--stamp", + rebase_path(_stamp, root_build_dir), + ] + } else { + # We don't directly set the output arg on the _args variable since it is + # shared with the expectation target that uses its own stamp file and + # does not take an --output-path. + _output_arg = [ + "--output-path", + rebase_path(invoker.output_path, root_build_dir), + ] + _outputs = [ invoker.output_path ] + } + _outputs += [ _mapping_path ] + + if (defined(invoker.enable_proguard_checks) && + !invoker.enable_proguard_checks) { + _args += [ "--disable-checks" ] + } + + _ignore_desugar_missing_deps = + defined(invoker.ignore_desugar_missing_deps) && + invoker.ignore_desugar_missing_deps + if (!_ignore_desugar_missing_deps) { + _args += [ "--show-desugar-default-interface-warnings" ] + } + + if (defined(invoker.custom_assertion_handler)) { + _args += [ + "--assertion-handler", + invoker.custom_assertion_handler, + ] + } else if (enable_java_asserts) { + # The default for generating dex file format is + # --force-disable-assertions. + _args += [ "--force-enable-assertions" ] + } + + if (defined(invoker.args)) { + _args += invoker.args + } + + if (defined(invoker.expected_proguard_config)) { + _expectations_target = + "${invoker.top_target_name}_validate_proguard_config" + action_with_pydeps(_expectations_target) { + script = _script + + # Need to depend on all deps so that proguard.txt within .aar files get + # extracted. + deps = _deps + depfile = "${target_gen_dir}/${target_name}.d" + inputs = [ + invoker.build_config, + invoker.expected_proguard_config, + ] + _actual_file = "$target_gen_dir/$target_name.proguard_configs" + _failure_file = + "$expectations_failure_dir/" + + string_replace(invoker.expected_proguard_config, "/", "_") + outputs = [ + _actual_file, + _failure_file, + ] + args = _args + [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--failure-file", + rebase_path(_failure_file, root_build_dir), + "--expected-file", + rebase_path(invoker.expected_proguard_config, root_build_dir), + "--actual-file", + rebase_path(_actual_file, root_build_dir), + "--only-verify-expectations", + ] + if (defined(invoker.expected_proguard_config_base)) { + inputs += [ invoker.expected_proguard_config_base ] + args += [ + "--expected-file-base", + rebase_path(invoker.expected_proguard_config_base, root_build_dir), + ] + } + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] + } + } + _deps += [ ":$_expectations_target" ] + } + action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "public_deps", + ]) + script = _script + deps = _deps + inputs = _inputs + outputs = _outputs + depfile = "${target_gen_dir}/${target_name}.d" + args = _args + _output_arg + [ + "--depfile", + rebase_path(depfile, root_build_dir), + ] + + # http://crbug.com/725224. Fix for bots running out of memory. + if (defined(java_cmd_pool_size)) { + pool = "//build/config/android:java_cmd_pool($default_toolchain)" + } else { + pool = "//build/toolchain:link_pool($default_toolchain)" + } + } + } + + # Generates a script in the build bin directory to run a java binary. + # + # Variables + # main_class: The class containing the program entry point. + # build_config: Path to .build_config.json for the jar (contains classpath). + # script_name: Name of the script to generate. + # wrapper_script_args: List of extra arguments to pass to the executable. + # tiered_stop_at_level_one: Whether to pass --tiered-stop-at-level-one + # + template("java_binary_script") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + + _main_class = invoker.main_class + _build_config = invoker.build_config + _script_name = invoker.script_name + if (defined(invoker.max_heap_size)) { + _max_heap_size = invoker.max_heap_size + } else { + _max_heap_size = "1G" + } + + script = "//build/android/gyp/create_java_binary_script.py" + inputs = [ _build_config ] + _java_script = "$root_build_dir/bin/$_script_name" + outputs = [ _java_script ] + _rebased_build_config = rebase_path(_build_config, root_build_dir) + args = [ + "--output", + rebase_path(_java_script, root_build_dir), + "--main-class", + _main_class, + "--classpath=@FileArg($_rebased_build_config:deps_info:host_classpath)", + "--max-heap-size=$_max_heap_size", + ] + data = [] + + if (use_jacoco_coverage) { + args += [ + "--classpath", + rebase_path(_jacoco_host_jar, root_build_dir), + ] + data += [ _jacoco_host_jar ] + } + if (defined(invoker.tiered_stop_at_level_one) && + invoker.tiered_stop_at_level_one) { + args += [ "--tiered-stop-at-level-one" ] + } + if (defined(invoker.extra_classpath_jars)) { + _rebased_extra_classpath_jars = + rebase_path(invoker.extra_classpath_jars, root_build_dir) + args += [ "--classpath=${_rebased_extra_classpath_jars}" ] + data += invoker.extra_classpath_jars + } + if (defined(invoker.wrapper_script_args)) { + args += [ "--" ] + invoker.wrapper_script_args + } + if (defined(invoker.use_jdk_11) && invoker.use_jdk_11) { + args += [ "--use-jdk-11" ] + deps += [ "//third_party/jdk11:java_data" ] + } else { + deps += [ "//third_party/jdk:java_data" ] + } + } + } + + # Variables + # apply_mapping: The path to the ProGuard mapping file to apply. + # disable_incremental: Disable incremental dexing. + template("dex") { + _min_sdk_version = default_min_sdk_version + if (defined(invoker.min_sdk_version)) { + _min_sdk_version = invoker.min_sdk_version + } + + _proguard_enabled = + defined(invoker.proguard_enabled) && invoker.proguard_enabled + _is_dex_merging = defined(invoker.input_dex_filearg) + _enable_multidex = + !defined(invoker.enable_multidex) || invoker.enable_multidex + _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21 + _enable_desugar = !defined(invoker.enable_desugar) || invoker.enable_desugar + _desugar_needs_classpath = _enable_desugar + + # It's not safe to dex merge with libraries dex'ed at higher api versions. + assert(!_is_dex_merging || _min_sdk_version >= default_min_sdk_version) + + # For D8's backported method desugaring to work properly, the dex merge step + # must not be set to a higher minSdkVersion than it was for the libraries. + if (_enable_desugar && _is_dex_merging) { + _min_sdk_version = default_min_sdk_version + } + + assert(defined(invoker.output) || + (_proguard_enabled && defined(invoker.modules))) + assert(!_proguard_enabled || !(defined(invoker.input_dex_filearg) || + defined(invoker.input_classes_filearg) || + defined(invoker.input_class_jars)), + "Cannot explicitly set inputs when proguarding a dex.") + + # Dex merging should not also be dexing. + assert(!(_is_dex_merging && defined(invoker.input_classes_filearg))) + assert(!(_is_dex_merging && defined(invoker.input_class_jars))) + + assert(!(defined(invoker.apply_mapping) && !_proguard_enabled), + "apply_mapping can only be specified if proguard is enabled.") + if (defined(invoker.custom_assertion_handler)) { + assert(_proguard_enabled, + "Proguard is required to support the custom assertion handler.") + } + + if (_enable_main_dex_list) { + _main_dex_rules = "//build/android/main_dex_classes.flags" + } + + if (_desugar_needs_classpath || _proguard_enabled) { + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + } + + if (_proguard_enabled) { + _proguard_target_name = target_name + + proguard(_proguard_target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "add_view_trace_events", + "build_config", + "custom_assertion_handler", + "data", + "data_deps", + "deps", + "enable_proguard_checks", + "expected_proguard_config", + "expected_proguard_config_base", + "ignore_desugar_missing_deps", + "modules", + "package_name", + "proguard_enable_obfuscation", + "proguard_mapping_path", + "proguard_sourcefile_suffix", + "top_target_name", + "version_code", + ]) + inputs = [] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + if (defined(invoker.proguard_configs)) { + inputs += invoker.proguard_configs + } + + args = [ + "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)", + "--min-api=$_min_sdk_version", + ] + if (defined(invoker.has_apk_under_test) && invoker.has_apk_under_test) { + args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath_extended)" ] + } else if (defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events && defined(invoker.modules)) { + args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:trace_event_rewritten_device_classpath)" ] + } else { + args += [ "--input-paths=@FileArg($_rebased_build_config:deps_info:device_classpath)" ] + } + if (defined(invoker.proguard_args)) { + args += invoker.proguard_args + } + + if (defined(invoker.apply_mapping)) { + _rebased_apply_mapping_path = + rebase_path(invoker.apply_mapping, root_build_dir) + args += [ "--apply-mapping=$_rebased_apply_mapping_path" ] + } + + if (_enable_main_dex_list) { + if (defined(invoker.extra_main_dex_proguard_config)) { + args += [ + "--main-dex-rules-path", + rebase_path(invoker.extra_main_dex_proguard_config, + root_build_dir), + ] + inputs += [ invoker.extra_main_dex_proguard_config ] + } + args += [ + "--main-dex-rules-path", + rebase_path(_main_dex_rules, root_build_dir), + ] + inputs += [ _main_dex_rules ] + } + + if (defined(invoker.output)) { + output_path = invoker.output + } else if (!defined(proguard_mapping_path)) { + proguard_mapping_path = "$target_out_dir/$target_name.mapping" + } + } + } else { # !_proguard_enabled + _is_library = defined(invoker.is_library) && invoker.is_library + assert(!(defined(invoker.input_classes_filearg) && _is_library)) + assert(_is_library == defined(invoker.unprocessed_jar_path)) + _input_class_jars = [] + if (defined(invoker.input_class_jars)) { + _input_class_jars = invoker.input_class_jars + } + _deps = invoker.deps + + if (_input_class_jars != []) { + _rebased_input_class_jars = + rebase_path(_input_class_jars, root_build_dir) + } + + action_with_pydeps(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "data", + "data_deps", + ]) + script = "//build/android/gyp/dex.py" + deps = _deps + depfile = "$target_gen_dir/$target_name.d" + outputs = [ invoker.output ] + inputs = [ + _d8_path, + _custom_d8_path, + ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + + if (!_is_library) { + # http://crbug.com/725224. Fix for bots running out of memory. + if (defined(java_cmd_pool_size)) { + pool = "//build/config/android:java_cmd_pool($default_toolchain)" + } else { + pool = "//build/toolchain:link_pool($default_toolchain)" + } + } + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(outputs[0], root_build_dir), + "--min-api=$_min_sdk_version", + "--r8-jar-path", + rebase_path(_d8_path, root_build_dir), + "--custom-d8-jar-path", + rebase_path(_custom_d8_path, root_build_dir), + + # Uncomment when rebuilding custom_d8.jar. + #"--skip-custom-d8", + ] + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] + } + + if (enable_incremental_d8 && !(defined(invoker.disable_incremental) && + invoker.disable_incremental)) { + # Don't use incremental dexing for ProGuarded inputs as a precaution. + args += [ + "--incremental-dir", + rebase_path("$target_out_dir/$target_name", root_build_dir), + ] + } + + if (_enable_multidex) { + args += [ "--multi-dex" ] + if (_enable_main_dex_list) { + if (defined(invoker.extra_main_dex_proguard_config)) { + args += [ + "--main-dex-rules-path", + rebase_path(invoker.extra_main_dex_proguard_config, + root_build_dir), + ] + inputs += [ invoker.extra_main_dex_proguard_config ] + } + args += [ + "--main-dex-rules-path", + rebase_path(_main_dex_rules, root_build_dir), + ] + inputs += [ _main_dex_rules ] + } + } + if (_is_library) { + args += [ "--library" ] + } + if (defined(invoker.input_dex_filearg)) { + inputs += [ invoker.build_config ] + args += [ "--dex-inputs-filearg=${invoker.input_dex_filearg}" ] + } + if (defined(invoker.input_classes_filearg)) { + inputs += [ invoker.build_config ] + args += [ "--class-inputs-filearg=${invoker.input_classes_filearg}" ] + + # Required for the same reason as unprocessed_jar_path is added to + # classpath (see note below). + args += [ "--classpath=${invoker.input_classes_filearg}" ] + } + if (_input_class_jars != []) { + inputs += _input_class_jars + args += [ "--class-inputs=${_rebased_input_class_jars}" ] + } + + # Never compile intemediates with --release in order to: + # 1) not require recompiles when toggling is_java_debug, + # 2) allow incremental_install=1 to still have local variable + # information even when is_java_debug=false. + if (!is_java_debug && !_is_library) { + args += [ "--release" ] + } + + if (_enable_desugar) { + args += [ "--desugar" ] + + _ignore_desugar_missing_deps = + defined(invoker.ignore_desugar_missing_deps) && + invoker.ignore_desugar_missing_deps + if (!_ignore_desugar_missing_deps) { + args += [ "--show-desugar-default-interface-warnings" ] + } + } + if (_desugar_needs_classpath) { + # Cannot use header jar for the active jar, because it does not + # contain anonymous classes. https://crbug.com/1342018#c5 + # Cannot use processed .jar here because it might have classes + # filtered out via jar_excluded_patterns. + # Must come first in classpath in order to take precedence over + # deps that defined the same classes (via jar_excluded_patterns). + if (defined(invoker.unprocessed_jar_path)) { + args += [ + "--classpath", + rebase_path(invoker.unprocessed_jar_path, root_build_dir), + + # Pass the full classpath to find new dependencies that are not in + # the .desugardeps file. + "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)", + ] + inputs += [ invoker.unprocessed_jar_path ] + } + _desugar_dependencies_path = + "$target_gen_dir/$target_name.desugardeps" + args += [ + "--desugar-dependencies", + rebase_path(_desugar_dependencies_path, root_build_dir), + "--bootclasspath=@FileArg($_rebased_build_config:android:sdk_jars)", + ] + } + + if (defined(invoker.custom_assertion_handler)) { + args += [ + "--assertion-handler", + invoker.custom_assertion_handler, + ] + } else if (enable_java_asserts) { + # The default for generating dex file format is + # --force-disable-assertions. + args += [ "--force-enable-assertions" ] + } + } + } + } + + template("jacoco_instr") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "public_deps", + ]) + + # The name needs to match the SOURCES_JSON_FILES_SUFFIX in + # generate_coverage_metadata_for_java.py. + _sources_json_file = "$target_out_dir/${target_name}__jacoco_sources.json" + _jacococli_jar = "//third_party/jacoco/lib/jacococli.jar" + + script = "//build/android/gyp/jacoco_instr.py" + inputs = invoker.source_files + [ + _jacococli_jar, + invoker.input_jar_path, + ] + outputs = [ + _sources_json_file, + invoker.output_jar_path, + ] + args = [ + "--input-path", + rebase_path(invoker.input_jar_path, root_build_dir), + "--output-path", + rebase_path(invoker.output_jar_path, root_build_dir), + "--sources-json-file", + rebase_path(_sources_json_file, root_build_dir), + "--target-sources-file", + rebase_path(invoker.target_sources_file, root_build_dir), + "--jacococli-jar", + rebase_path(_jacococli_jar, root_build_dir), + ] + if (coverage_instrumentation_input_file != "") { + args += [ + "--files-to-instrument", + rebase_path(coverage_instrumentation_input_file, root_build_dir), + ] + } + } + } + + template("filter_jar") { + action_with_pydeps(target_name) { + script = "//build/android/gyp/filter_zip.py" + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "data", + "data_deps", + ]) + inputs = [ invoker.input_jar ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + outputs = [ invoker.output_jar ] + + _jar_excluded_patterns = [] + if (defined(invoker.jar_excluded_patterns)) { + _jar_excluded_patterns = invoker.jar_excluded_patterns + } + _jar_included_patterns = [] + if (defined(invoker.jar_included_patterns)) { + _jar_included_patterns = invoker.jar_included_patterns + } + args = [ + "--input", + rebase_path(invoker.input_jar, root_build_dir), + "--output", + rebase_path(invoker.output_jar, root_build_dir), + "--exclude-globs=${_jar_excluded_patterns}", + "--include-globs=${_jar_included_patterns}", + ] + } + } + + template("process_java_library") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _previous_output_jar = invoker.input_jar_path + + if (invoker.jacoco_instrument) { + _filter_jar_target_name = "${target_name}__filter_jar" + _filter_jar_output_jar = "$target_out_dir/$target_name.filter.jar" + } else { + _filter_jar_target_name = target_name + _filter_jar_output_jar = invoker.output_jar_path + } + + filter_jar(_filter_jar_target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "jar_excluded_patterns", + "jar_included_patterns", + ]) + deps = invoker.deps + input_jar = _previous_output_jar + output_jar = _filter_jar_output_jar + } + + if (invoker.jacoco_instrument) { + # Jacoco must run after desugar (or else desugar sometimes fails). + # It must run after filtering to avoid the same (filtered) class mapping + # to multiple .jar files. + # We run offline code coverage processing here rather than with a + # javaagent as the desired coverage data was not being generated. + # See crbug.com/1097815. + jacoco_instr(target_name) { + deps = [ ":$_filter_jar_target_name" ] + invoker.deps + forward_variables_from(invoker, + [ + "source_files", + "target_sources_file", + ]) + + input_jar_path = _filter_jar_output_jar + output_jar_path = invoker.output_jar_path + } + } + } + + template("bytecode_processor") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "data_deps" ]) + _bytecode_checker_script = "$root_build_dir/bin/helper/bytecode_processor" + script = "//build/android/gyp/bytecode_processor.py" + inputs = [ + invoker.build_config, + invoker.input_jar, + _bytecode_checker_script, + ] + outputs = [ "$target_out_dir/$target_name.bytecode.stamp" ] + deps = + invoker.deps + + [ "//build/android/bytecode:bytecode_processor($default_toolchain)" ] + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + args = [ + "--target-name", + get_label_info(":${target_name}", "label_no_toolchain"), + "--script", + rebase_path(_bytecode_checker_script, root_build_dir), + "--gn-target=${invoker.target_label}", + "--input-jar", + rebase_path(invoker.input_jar, root_build_dir), + "--stamp", + rebase_path(outputs[0], root_build_dir), + "--direct-classpath-jars=@FileArg($_rebased_build_config:javac:classpath)", + "--full-classpath-jars=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", + "--full-classpath-gn-targets=@FileArg($_rebased_build_config:deps_info:javac_full_classpath_targets)", + ] + if (android_static_analysis == "build_server") { + args += [ "--use-build-server" ] + } + if (invoker.include_android_sdk) { + args += [ "--sdk-classpath-jars=@FileArg($_rebased_build_config:android:sdk_jars)" ] + } + if (invoker.is_prebuilt) { + args += [ "--is-prebuilt" ] + } + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] + } + if (defined(invoker.missing_classes_allowlist)) { + args += [ + "--missing-classes-allowlist=${invoker.missing_classes_allowlist}", + ] + } + } + } + + template("merge_manifests") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + script = "//build/android/gyp/merge_manifest.py" + depfile = "$target_gen_dir/$target_name.d" + + inputs = [ + invoker.build_config, + invoker.input_manifest, + _manifest_merger_jar_path, + ] + + outputs = [ invoker.output_manifest ] + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--manifest-merger-jar", + rebase_path(_manifest_merger_jar_path, root_build_dir), + "--root-manifest", + rebase_path(invoker.input_manifest, root_build_dir), + "--output", + rebase_path(invoker.output_manifest, root_build_dir), + "--extras", + "@FileArg($_rebased_build_config:extra_android_manifests)", + "--min-sdk-version=${invoker.min_sdk_version}", + "--target-sdk-version=${invoker.target_sdk_version}", + ] + + if (defined(invoker.manifest_package)) { + args += [ "--manifest-package=${invoker.manifest_package}" ] + } + + if (defined(invoker.max_sdk_version)) { + args += [ "--max-sdk-version=${invoker.max_sdk_version}" ] + } + + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] + } + } + } + + # This template is used to parse a set of resource directories and + # create the R.txt, .srcjar and .resources.zip for it. + # + # Input variables: + # deps: Specifies the input dependencies for this target. + # + # build_config: Path to the .build_config.json file corresponding to the target. + # + # sources: + # List of input resource files. + # + # custom_package: (optional) + # Package name for the generated R.java source file. Optional if + # android_manifest is not provided. + # + # android_manifest: (optional) + # If custom_package is not provided, path to an AndroidManifest.xml file + # that is only used to extract a package name out of it. + # + # r_text_in_path: (optional) + # Path to an input R.txt file to use to generate the R.java file. + # The default is to use 'aapt' to generate the file from the content + # of the resource directories. + # + # Output variables: + # resources_zip: + # Path to a .resources.zip that will simply contain all the + # input resources, collected in a single archive. + # + # r_text_out_path: Path for the generated R.txt file. + # + template("prepare_resources") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "public_deps", + "sources", + ]) + script = "//build/android/gyp/prepare_resources.py" + + depfile = "$target_gen_dir/${invoker.target_name}.d" + outputs = [ + invoker.resources_zip, + invoker.resources_zip + ".info", + invoker.r_text_out_path, + ] + + inputs = [ invoker.res_sources_path ] + + _rebased_res_sources_path = + rebase_path(invoker.res_sources_path, root_build_dir) + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--res-sources-path=$_rebased_res_sources_path", + "--resource-zip-out", + rebase_path(invoker.resources_zip, root_build_dir), + "--r-text-out", + rebase_path(invoker.r_text_out_path, root_build_dir), + ] + + if (defined(invoker.r_text_in_path)) { + _r_text_in_path = invoker.r_text_in_path + inputs += [ _r_text_in_path ] + args += [ + "--r-text-in", + rebase_path(_r_text_in_path, root_build_dir), + ] + } + + if (defined(invoker.strip_drawables) && invoker.strip_drawables) { + args += [ "--strip-drawables" ] + } + if (defined(invoker.allow_missing_resources) && + invoker.allow_missing_resources) { + args += [ "--allow-missing-resources" ] + } + } + } + + # A template that is used to compile all resources needed by a binary + # (e.g. an android_apk or a robolectric_binary) into an intermediate .ar_ + # archive. It can also generate an associated .srcjar that contains the + # final R.java sources for all resource packages the binary depends on. + # + # Input variables: + # android_sdk_dep: The sdk dep that these resources should compile against. + # + # deps: Specifies the input dependencies for this target. + # + # build_config: Path to the .build_config.json file corresponding to the target. + # + # build_config_dep: Dep target to generate the .build_config.json file. + # + # android_manifest: Path to root manifest for the binary. + # + # version_code: (optional) + # + # version_name: (optional) + # + # shared_resources: (optional) + # If true, make all variables in each generated R.java file non-final, + # and provide an onResourcesLoaded() method that can be used to reset + # their package index at load time. Useful when the APK corresponds to + # a library that is loaded at runtime, like system_webview_apk or + # monochrome_apk. + # + # app_as_shared_lib: (optional) + # If true, same effect as shared_resources, but also ensures that the + # resources can be used by the APK when it is loaded as a regular + # application as well. Useful for the monochrome_public_apk target + # which is both an application and a shared runtime library that + # implements the system webview feature. + # + # shared_resources_allowlist: (optional) + # Path to an R.txt file. If provided, acts similar to shared_resources + # except that it restricts the list of non-final resource variables + # to the list from the input R.txt file. Overrides shared_resources + # when both are specified. + # + # shared_resources_allowlist_locales: (optional) + # If shared_resources_allowlist is used, provide an optional list of + # Chromium locale names to determine which localized shared string + # resources to put in the final output, even if aapt_locale_allowlist + # is defined to a smaller subset. + # + # aapt_locale_allowlist: (optional) + # Restrict compiled locale-dependent resources to a specific allowlist. + # NOTE: This is a list of Chromium locale names, not Android ones. + # + # r_java_root_package_name: (optional) + # Short package name for this target's root R java file (ex. input of + # "base" would become "gen.base_module" for the root R java package name). + # Optional as defaults to "base". + # + # resource_exclusion_regex: (optional) + # + # resource_exclusion_exceptions: (optional) + # + # resource_values_filter_rules: (optional) + # + # png_to_webp: (optional) + # If true, convert all PNG resources (except 9-patch files) to WebP. + # + # post_process_script: (optional) + # + # package_name: (optional) + # Name of the package for the purpose of creating R class. + # + # package_id: (optional) + # Use a custom package ID in resource IDs. + # + # arsc_package_name: (optional) + # Use this package name in the arsc file rather than the package name + # found in the AndroidManifest.xml. Does not affect the package name + # used in AndroidManifest.xml. + # + # resource_ids_provider_dep: (optional) + # Use resource IDs provided by another APK target when compiling resources + # (via. "aapt2 link --stable-ids") + # + # + # Output variables: + # arsc_output: Path to output .ap_ file (optional). + # + # proto_output: Path to output .proto.ap_ file (optional). + # + # r_text_out_path: (optional): + # Path for the corresponding generated R.txt file. + # + # proguard_file: (optional) + # Path to proguard configuration file for this apk target. + # + # proguard_file_main_dex: (optional) + # + template("compile_resources") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _deps = invoker.deps + [ + invoker.android_sdk_dep, + invoker.build_config_dep, + ] + if (defined(invoker.android_manifest_dep)) { + _deps += [ invoker.android_manifest_dep ] + } + + if (defined(invoker.arsc_output)) { + _arsc_output = invoker.arsc_output + } + _final_srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + + _script = "//build/android/gyp/compile_resources.py" + + _inputs = [ + invoker.build_config, + android_sdk_tools_bundle_aapt2, + ] + + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + + _args = [ + "--include-resources=@FileArg($_rebased_build_config:android:sdk_jars)", + "--aapt2-path", + rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), + "--dependencies-res-zips=@FileArg($_rebased_build_config:deps_info:dependency_zips)", + "--extra-res-packages=@FileArg($_rebased_build_config:deps_info:extra_package_names)", + "--min-sdk-version=${invoker.min_sdk_version}", + "--target-sdk-version=${invoker.target_sdk_version}", + "--webp-cache-dir=obj/android-webp-cache", + ] + + _inputs += [ invoker.android_manifest ] + _outputs = [ _final_srcjar_path ] + _args += [ + "--android-manifest", + rebase_path(invoker.android_manifest, root_build_dir), + "--srcjar-out", + rebase_path(_final_srcjar_path, root_build_dir), + ] + if (defined(invoker.version_code)) { + _args += [ + "--version-code", + invoker.version_code, + ] + } + if (defined(invoker.version_name)) { + _args += [ + "--version-name", + invoker.version_name, + ] + } + if (defined(_arsc_output)) { + _outputs += [ _arsc_output ] + _args += [ + "--arsc-path", + rebase_path(_arsc_output, root_build_dir), + ] + } + if (defined(invoker.proto_output)) { + _outputs += [ invoker.proto_output ] + _args += [ + "--proto-path", + rebase_path(invoker.proto_output, root_build_dir), + ] + } + if (defined(invoker.size_info_path)) { + _outputs += [ invoker.size_info_path ] + _args += [ + "--info-path", + rebase_path(invoker.size_info_path, root_build_dir), + ] + } + + if (defined(invoker.r_java_root_package_name)) { + _args += [ + "--r-java-root-package-name", + invoker.r_java_root_package_name, + ] + } + + # Useful to have android:debuggable in the manifest even for Release + # builds. Just omit it for officai + if (debuggable_apks) { + _args += [ "--debuggable" ] + } + + if (defined(invoker.r_text_out_path)) { + _outputs += [ invoker.r_text_out_path ] + _args += [ + "--r-text-out", + rebase_path(invoker.r_text_out_path, root_build_dir), + ] + } + + if (defined(invoker.rename_manifest_package)) { + _args += [ + "--rename-manifest-package", + invoker.rename_manifest_package, + ] + } + + # Define the flags related to shared resources. + # + # Note the small sanity check to ensure that the package ID of the + # generated resources table is correct. It should be 0x02 for runtime + # shared libraries, and 0x7f otherwise. + + if (defined(invoker.shared_resources) && invoker.shared_resources) { + _args += [ "--shared-resources" ] + } + if (defined(invoker.app_as_shared_lib) && invoker.app_as_shared_lib) { + _args += [ "--app-as-shared-lib" ] + } + if (defined(invoker.package_id)) { + _args += [ "--package-id=${invoker.package_id}" ] + } + if (defined(invoker.package_name)) { + _args += [ + "--package-name", + invoker.package_name, + ] + } + if (defined(invoker.arsc_package_name)) { + _args += [ + "--arsc-package-name", + invoker.arsc_package_name, + ] + } + + if (defined(invoker.shared_resources_allowlist)) { + _inputs += [ invoker.shared_resources_allowlist ] + _args += [ + "--shared-resources-allowlist", + rebase_path(invoker.shared_resources_allowlist, root_build_dir), + ] + } + if (defined(invoker.shared_resources_allowlist_locales)) { + _args += [ "--shared-resources-allowlist-locales=" + + "${invoker.shared_resources_allowlist_locales}" ] + } + + if (!defined(testonly) || !testonly || + (defined(invoker.enforce_resource_overlays_in_tests) && + invoker.enforce_resource_overlays_in_tests)) { + _args += [ "--dependencies-res-zip-overlays=@FileArg($_rebased_build_config:deps_info:dependency_zip_overlays)" ] + } else { + _args += [ "--dependencies-res-zip-overlays=@FileArg($_rebased_build_config:deps_info:dependency_zips)" ] + } + + if (defined(invoker.proguard_file)) { + _outputs += [ invoker.proguard_file ] + _args += [ + "--proguard-file", + rebase_path(invoker.proguard_file, root_build_dir), + ] + } + + if (defined(invoker.proguard_file_main_dex)) { + _outputs += [ invoker.proguard_file_main_dex ] + _args += [ + "--proguard-file-main-dex", + rebase_path(invoker.proguard_file_main_dex, root_build_dir), + ] + } + + if (defined(invoker.aapt_locale_allowlist)) { + _args += [ "--locale-allowlist=${invoker.aapt_locale_allowlist}" ] + } + if (defined(invoker.png_to_webp) && invoker.png_to_webp) { + _webp_target = "//third_party/libwebp:cwebp($host_toolchain)" + _webp_binary = get_label_info(_webp_target, "root_out_dir") + "/cwebp" + _deps += [ _webp_target ] + _inputs += [ _webp_binary ] + _args += [ + "--png-to-webp", + "--webp-binary", + rebase_path(_webp_binary, root_build_dir), + ] + } + if (defined(invoker.resource_exclusion_regex)) { + _args += + [ "--resource-exclusion-regex=${invoker.resource_exclusion_regex}" ] + if (defined(invoker.resource_exclusion_exceptions)) { + _args += [ "--resource-exclusion-exceptions=${invoker.resource_exclusion_exceptions}" ] + } + } + if (defined(invoker.resource_values_filter_rules)) { + _args += + [ "--values-filter-rules=${invoker.resource_values_filter_rules}" ] + } + + if (defined(invoker.include_resource)) { + _inputs += [ invoker.include_resource ] + _rebased_include_resources = + rebase_path(invoker.include_resource, root_build_dir) + _args += [ "--include-resources=$_rebased_include_resources" ] + } + + if (defined(invoker._args)) { + _args += invoker._args + } + + if (defined(invoker.emit_ids_out_path)) { + _outputs += [ invoker.emit_ids_out_path ] + _rebased_emit_ids_path = + rebase_path(invoker.emit_ids_out_path, root_out_dir) + _args += [ "--emit-ids-out=$_rebased_emit_ids_path" ] + } + + if (defined(invoker.resource_ids_provider_dep)) { + _compile_res_dep = + "${invoker.resource_ids_provider_dep}__compile_resources" + _gen_dir = get_label_info(_compile_res_dep, "target_gen_dir") + _name = get_label_info(_compile_res_dep, "name") + _resource_ids_path = "$_gen_dir/$_name.resource_ids" + _inputs += [ _resource_ids_path ] + _rebased_ids_path = rebase_path(_resource_ids_path, root_out_dir) + _args += [ "--use-resource-ids-path=$_rebased_ids_path" ] + _deps += [ _compile_res_dep ] + } + + if (defined(invoker.max_sdk_version)) { + _max_sdk_version = invoker.max_sdk_version + _args += [ "--max-sdk-version=$_max_sdk_version" ] + } + + if (defined(invoker.manifest_package)) { + _args += [ "--manifest-package=${invoker.manifest_package}" ] + } + + if (defined(invoker.is_bundle_module) && invoker.is_bundle_module) { + _args += [ "--is-bundle-module" ] + } + + if (defined(invoker.uses_split)) { + assert(invoker.is_bundle_module) + _args += [ "--uses-split=${invoker.uses_split}" ] + } + + if (defined(invoker.expected_android_manifest)) { + _expectations_target = + "${invoker.top_target_name}_validate_android_manifest" + action_with_pydeps(_expectations_target) { + _actual_file = "${invoker.android_manifest}.normalized" + _failure_file = + "$expectations_failure_dir/" + + string_replace(invoker.expected_android_manifest, "/", "_") + inputs = [ + invoker.android_manifest, + invoker.build_config, + invoker.expected_android_manifest, + ] + outputs = [ + _actual_file, + _failure_file, + ] + deps = [ + invoker.android_manifest_dep, + invoker.build_config_dep, + ] + script = _script + args = _args + [ + "--expected-file", + rebase_path(invoker.expected_android_manifest, root_build_dir), + "--actual-file", + rebase_path(_actual_file, root_build_dir), + "--failure-file", + rebase_path(_failure_file, root_build_dir), + "--only-verify-expectations", + ] + if (defined(invoker.expected_android_manifest_base)) { + args += [ + "--expected-file-base", + rebase_path(invoker.expected_android_manifest_base, root_build_dir), + ] + inputs += [ invoker.expected_android_manifest_base ] + } + if (defined(invoker.expected_android_manifest_version_code_offset)) { + args += [ + "--verification-version-code-offset", + invoker.expected_android_manifest_version_code_offset, + ] + } + if (defined(invoker.expected_android_manifest_library_version_offset)) { + args += [ + "--verification-library-version-offset", + invoker.expected_android_manifest_library_version_offset, + ] + } + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] + } + } + _deps += [ ":$_expectations_target" ] + } + + action_with_pydeps(target_name) { + script = _script + depfile = "$target_gen_dir/${target_name}.d" + inputs = _inputs + outputs = _outputs + deps = _deps + args = _args + [ + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } + } + + # A template that is used to optimize compiled resources using aapt2 optimize. + # + # proto_input_path: + # Path to input compiled .proto.ap_ file. + # + # short_resource_paths: (optional) + # Rename the paths within a the apk to be randomly generated short + # strings to reduce binary size. + # + # strip_resource_names: (optional) + # Strip resource names from the resources table of the apk. + # + # resources_configs_paths: (optional) + # List of resource configs to use for optimization. + # + # optimized_proto_output: + # Path to output optimized .proto.ap_ file. + # + # resources_path_map_out_path: (optional): + # Path for the generated map between original resource paths and + # shortened resource paths. + template("optimize_resources") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + action_with_pydeps(target_name) { + forward_variables_from(invoker, [ "deps" ]) + script = "//build/android/gyp/optimize_resources.py" + outputs = [ invoker.optimized_proto_output ] + inputs = [ + android_sdk_tools_bundle_aapt2, + invoker.r_text_path, + invoker.proto_input_path, + ] + args = [ + "--aapt2-path", + rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), + "--r-text-in", + rebase_path(invoker.r_text_path, root_build_dir), + "--proto-path", + rebase_path(invoker.proto_input_path, root_build_dir), + "--optimized-proto-path", + rebase_path(invoker.optimized_proto_output, root_build_dir), + ] + + if (defined(invoker.resources_config_paths)) { + inputs += invoker.resources_config_paths + _rebased_resource_configs = + rebase_path(invoker.resources_config_paths, root_build_dir) + args += [ "--resources-config-paths=${_rebased_resource_configs}" ] + } + + if (defined(invoker.short_resource_paths) && + invoker.short_resource_paths) { + args += [ "--short-resource-paths" ] + if (defined(invoker.resources_path_map_out_path)) { + outputs += [ invoker.resources_path_map_out_path ] + args += [ + "--resources-path-map-out-path", + rebase_path(invoker.resources_path_map_out_path, root_build_dir), + ] + } + } + + if (defined(invoker.strip_resource_names) && + invoker.strip_resource_names) { + args += [ "--strip-resource-names" ] + } + } + } + + # A template that is used to find unused resources. + template("unused_resources") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + script = "//build/android/gyp/unused_resources.py" + depfile = "$target_gen_dir/${target_name}.d" + _unused_resources_script = "$root_build_dir/bin/helper/unused_resources" + inputs = [ _unused_resources_script ] + outputs = [ + invoker.output_config, + invoker.output_r_txt, + ] + if (!defined(deps)) { + deps = [] + } + deps += [ "//build/android/unused_resources:unused_resources" ] + _rebased_module_build_config = + rebase_path(invoker.build_config, root_build_dir) + args = [ + "--script", + rebase_path(_unused_resources_script, root_build_dir), + "--output-config", + rebase_path(invoker.output_config, root_build_dir), + "--r-text-in=@FileArg($_rebased_module_build_config:deps_info:r_text_path)", + "--r-text-out", + rebase_path(invoker.output_r_txt, root_build_dir), + "--dependencies-res-zips=@FileArg($_rebased_module_build_config:deps_info:dependency_zips)", + "--depfile", + rebase_path(depfile, root_build_dir), + ] + + if (defined(invoker.proguard_mapping_path)) { + inputs += [ invoker.proguard_mapping_path ] + args += [ + "--proguard-mapping", + rebase_path(invoker.proguard_mapping_path, root_build_dir), + ] + } + + foreach(_build_config, invoker.all_module_build_configs) { + inputs += [ _build_config ] + _rebased_build_config = rebase_path(_build_config, root_build_dir) + args += [ + "--dexes=@FileArg($_rebased_build_config:final_dex:path)", + "--android-manifests=@FileArg($_rebased_build_config:deps_info:merged_android_manifest)", + ] + } + } + } + + # Create an .jar.info file by merging several .jar.info files into one. + # + # Variables: + # build_config: Path to APK's build config file. Used to extract the + # list of input .jar files from its dependencies. + # name: Name of the apk or app bundle (e.g. "Foo.apk"). + # res_size_info_path: Path to input .ap_.info file (for apks). + # + template("create_size_info_files") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + script = "//build/android/gyp/create_size_info_files.py" + _jar_info_path = "$root_build_dir/size-info/${invoker.name}.jar.info" + _pak_info_path = "$root_build_dir/size-info/${invoker.name}.pak.info" + _res_info_path = "$root_build_dir/size-info/${invoker.name}.res.info" + outputs = [ + _jar_info_path, + _pak_info_path, + _res_info_path, + ] + depfile = "$target_gen_dir/$target_name.d" + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--jar-info-path", + rebase_path(_jar_info_path, root_build_dir), + "--pak-info-path", + rebase_path(_pak_info_path, root_build_dir), + "--res-info-path", + rebase_path(_res_info_path, root_build_dir), + ] + _is_bundle = defined(invoker.module_build_configs) + if (_is_bundle) { + inputs = invoker.module_build_configs + foreach(_build_config, invoker.module_build_configs) { + _rebased_build_config = rebase_path(_build_config, root_build_dir) + args += [ + "--jar-files=@FileArg($_rebased_build_config:deps_info:unprocessed_jar_path)", + "--jar-files=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", + "--in-res-info-path=@FileArg($_rebased_build_config:deps_info:res_size_info)", + "--assets=@FileArg($_rebased_build_config:assets)", + "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)", + ] + } + } else { + inputs = [ + invoker.build_config, + invoker.res_size_info_path, + ] + _rebased_build_config = + rebase_path(invoker.build_config, root_build_dir) + args += [ + "--jar-files=@FileArg($_rebased_build_config:deps_info:unprocessed_jar_path)", + "--jar-files=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", + "--in-res-info-path", + rebase_path(invoker.res_size_info_path, root_build_dir), + "--assets=@FileArg($_rebased_build_config:assets)", + "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)", + ] + } + } + } + + template("create_binary_profile") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, [ "deps" ]) + script = "//build/android/gyp/binary_baseline_profile.py" + depfile = "$target_gen_dir/$target_name.d" + outputs = [ + invoker.binary_baseline_profile_path, + invoker.binary_baseline_profile_metadata_path, + ] + _profgen_path = "$android_sdk_root/cmdline-tools/latest/bin/profgen" + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + inputs = [ + invoker.build_config, + invoker.proguard_mapping_path, + invoker.input_profile_path, + _profgen_path, + ] + args = [ + "--profgen", + rebase_path(_profgen_path, root_build_dir), + "--output-profile", + rebase_path(invoker.binary_baseline_profile_path, root_build_dir), + "--output-metadata", + rebase_path(invoker.binary_baseline_profile_metadata_path, + root_build_dir), + "--dex=@FileArg($_rebased_build_config:final_dex:path)", + "--proguard-mapping", + rebase_path(invoker.proguard_mapping_path, root_build_dir), + "--input-profile-path", + rebase_path(invoker.input_profile_path, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } + } + + # Creates a signed and aligned .apk. + # + # Variables + # apk_name: (optional) APK name (without .apk suffix). If provided, will + # be used to generate .info files later used by the supersize tool. + # assets_build_config: Path to android_apk .build_config.json containing merged + # asset information. + # deps: Specifies the dependencies of this target. + # dex_path: Path to classes.dex file to include (optional). + # expected_libs_and_assets: Verify the list of included native libraries + # and assets is consistent with the given expectation file. + # expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff + # with this file as the base. + # packaged_resources_path: Path to .ap_ to use. + # output_apk_path: Output path for the generated .apk. + # min_sdk_version: The minimum Android SDK version this target supports. + # native_lib_placeholders: List of placeholder filenames to add to the apk + # (optional). + # secondary_native_lib_placeholders: List of placeholder filenames to add to + # the apk for the secondary ABI (optional). + # loadable_modules: List of native libraries. + # native_libs_filearg: @FileArg() of additionally native libraries. + # secondary_abi_loadable_modules: (optional) List of native libraries for + # secondary ABI. + # secondary_abi_native_libs_filearg: (optional). @FileArg() of additional + # secondary ABI native libs. + # keystore_path: Path to keystore to use for signing. + # keystore_name: Key alias to use. + # keystore_password: Keystore password. + template("package_apk") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "public_deps" ]) + _is_robolectric_apk = + defined(invoker.is_robolectric_apk) && invoker.is_robolectric_apk + _deps = invoker.deps + _native_lib_placeholders = [] + if (defined(invoker.native_lib_placeholders)) { + _native_lib_placeholders = invoker.native_lib_placeholders + } + _secondary_native_lib_placeholders = [] + if (defined(invoker.secondary_native_lib_placeholders)) { + _secondary_native_lib_placeholders = + invoker.secondary_native_lib_placeholders + } + + _script = "//build/android/gyp/apkbuilder.py" + + _inputs = [ invoker.packaged_resources_path ] + + _outputs = [ invoker.output_apk_path ] + _data = [ invoker.output_apk_path ] + + _rebased_compiled_resources_path = + rebase_path(invoker.packaged_resources_path, root_build_dir) + _rebased_packaged_apk_path = + rebase_path(invoker.output_apk_path, root_build_dir) + _args = [ + "--resource-apk=$_rebased_compiled_resources_path", + "--output-apk=$_rebased_packaged_apk_path", + "--min-sdk-version=${invoker.min_sdk_version}", + ] + + # system_image_stub_apk does not use a build_config.json. + if (defined(invoker.build_config)) { + _inputs += [ invoker.build_config ] + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + _args += [ + "--assets=@FileArg($_rebased_build_config:assets)", + "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)", + ] + if (!_is_robolectric_apk) { + _args += [ "--java-resources=@FileArg($_rebased_build_config:java_resources_jars)" ] + } + } + if (defined(invoker.extra_assets)) { + _args += [ "--assets=${invoker.extra_assets}" ] + } + if (!_is_robolectric_apk) { + _apksigner = "$android_sdk_build_tools/lib/apksigner.jar" + _zipalign = "$android_sdk_build_tools/zipalign" + _keystore_path = android_keystore_path + _keystore_name = android_keystore_name + _keystore_password = android_keystore_password + + if (defined(invoker.keystore_path)) { + _keystore_path = invoker.keystore_path + _keystore_name = invoker.keystore_name + _keystore_password = invoker.keystore_password + } + + _inputs += [ + _apksigner, + _zipalign, + _keystore_path, + ] + _args += [ + "--apksigner-jar", + rebase_path(_apksigner, root_build_dir), + "--zipalign-path", + rebase_path(_zipalign, root_build_dir), + "--key-path", + rebase_path(_keystore_path, root_build_dir), + "--key-name", + _keystore_name, + "--key-passwd", + _keystore_password, + ] + if (is_official_build) { + _args += [ "--best-compression" ] + } + } + if (defined(invoker.uncompress_dex)) { + _uncompress_dex = invoker.uncompress_dex + } else { + # Uncompressed dex support started on Android P. + _uncompress_dex = invoker.min_sdk_version >= 28 + } + + if (_uncompress_dex) { + _args += [ "--uncompress-dex" ] + } + if (defined(invoker.library_always_compress)) { + _args += + [ "--library-always-compress=${invoker.library_always_compress}" ] + } + if (defined(invoker.dex_path)) { + _inputs += [ invoker.dex_path ] + _args += [ + "--dex-file", + rebase_path(invoker.dex_path, root_build_dir), + ] + } + if ((defined(invoker.loadable_modules) && invoker.loadable_modules != []) || + defined(invoker.native_libs_filearg) || + _native_lib_placeholders != []) { + _args += [ "--android-abi=$android_app_abi" ] + } + if (defined(android_app_secondary_abi)) { + _args += [ "--secondary-android-abi=$android_app_secondary_abi" ] + } + if (defined(invoker.loadable_modules) && invoker.loadable_modules != []) { + _inputs += invoker.loadable_modules + _rebased_loadable_modules = + rebase_path(invoker.loadable_modules, root_build_dir) + _args += [ "--native-libs=$_rebased_loadable_modules" ] + } + if (defined(invoker.native_libs_filearg)) { + _args += [ "--native-libs=${invoker.native_libs_filearg}" ] + } + if (_native_lib_placeholders != []) { + _args += [ "--native-lib-placeholders=$_native_lib_placeholders" ] + } + + if (defined(invoker.secondary_abi_native_libs_filearg)) { + _args += [ + "--secondary-native-libs=${invoker.secondary_abi_native_libs_filearg}", + ] + } + if (defined(invoker.secondary_abi_loadable_modules)) { + _rebased_secondary_abi_loadable_modules = + rebase_path(invoker.secondary_abi_loadable_modules, root_build_dir) + _args += + [ "--secondary-native-libs=$_rebased_secondary_abi_loadable_modules" ] + } + if (_secondary_native_lib_placeholders != []) { + _args += [ "--secondary-native-lib-placeholders=$_secondary_native_lib_placeholders" ] + } + if (treat_warnings_as_errors) { + _args += [ "--warnings-as-errors" ] + } + + if (defined(invoker.expected_libs_and_assets)) { + _expectations_target = + "${invoker.top_target_name}_validate_libs_and_assets" + action_with_pydeps(_expectations_target) { + _actual_file = "$target_gen_dir/$target_name.libs_and_assets" + _failure_file = + "$expectations_failure_dir/" + + string_replace(invoker.expected_libs_and_assets, "/", "_") + inputs = [ invoker.expected_libs_and_assets ] + if (defined(invoker.build_config)) { + inputs += [ invoker.build_config ] + } + deps = [ invoker.build_config_dep ] + outputs = [ + _actual_file, + _failure_file, + ] + script = _script + args = _args + [ + "--expected-file", + rebase_path(invoker.expected_libs_and_assets, root_build_dir), + "--actual-file", + rebase_path(_actual_file, root_build_dir), + "--failure-file", + rebase_path(_failure_file, root_build_dir), + "--only-verify-expectations", + ] + if (defined(invoker.expected_libs_and_assets_base)) { + inputs += [ invoker.expected_libs_and_assets_base ] + args += [ + "--expected-file-base", + rebase_path(invoker.expected_libs_and_assets_base, root_build_dir), + ] + } + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] + } + } + _deps += [ ":$_expectations_target" ] + } + action_with_pydeps(target_name) { + depfile = "$target_gen_dir/$target_name.d" + inputs = _inputs + deps = _deps + data = _data + outputs = _outputs + script = _script + args = _args + [ + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } + } + + # Compile Java source files into a .jar file, potentially using an + # annotation processor, and/or the errorprone compiler. Also includes Kotlin + # source files in the resulting info file. + # + # Note that the only way to specify custom annotation processors is + # by using build_config to point to a file that corresponds to a java-related + # target that includes javac:processor_classes entries (i.e. there is no + # variable here that can be used for this purpose). + # + # Note also the peculiar use of source_files / target_sources_file. The content + # of the source_files list and the source files in target_sources_file file must + # match exactly. + # + # Variables: + # main_target_name: Used when extracting srcjars for codesearch. + # source_files: Optional list of Java and Kotlin source file paths. + # srcjar_deps: Optional list of .srcjar dependencies (not file paths). + # The corresponding source files they contain will be compiled too. + # target_sources_file: Optional path to file containing list of source file + # paths. This must always be provided if java_files is not empty and the + # .java files in it must match the list of java_files exactly. + # build_config: Path to the .build_config.json file of the corresponding + # java_library_impl() target. The following entries will be used by this + # template: javac:srcjars, deps_info:javac_full_classpath, + # deps_info:javac_full_interface_classpath, javac:processor_classpath, + # javac:processor_classes + # javac_jar_path: Path to the final output .jar file. + # javac_args: Optional list of extra arguments to pass to javac. + # chromium_code: Whether this corresponds to Chromium-specific sources. + # requires_android: True if these sources can only run on Android. + # additional_jar_files: Optional list of files to copy into the resulting + # .jar file (by default, only .class files are put there). Each entry + # has the 'srcPath:dstPath' format. + # enable_errorprone: If True, use the errorprone compiler to check for + # error-prone constructs in the language. If not provided, whether this is + # enabled depends on chromium_code and the global + # use_errorprone_java_compiler variable. + # use_turbine: If True, compile headers using turbine.py. + # apk_name: Optional APK name. If provided, will tell compile_java.py to also + # generate an .apk.jar.info file under size-info/${apk_name}.apk.jar.info + # processor_args_javac: List of annotation processor arguments, each one + # will be passed to javac as -A. + # deps: Dependencies for the corresponding target. + # testonly: Usual meaning (should be True for test-only targets) + # + # [1] https://docs.oracle.com/javase/7/docs/api/java/util/ServiceLoader.html + # + template("compile_java") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _build_config = invoker.build_config + _chromium_code = invoker.chromium_code + + _processor_args = [] + if (defined(invoker.processor_args_javac)) { + _processor_args = invoker.processor_args_javac + } + + _additional_jar_files = [] + if (defined(invoker.additional_jar_files)) { + _additional_jar_files = invoker.additional_jar_files + } + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps = invoker.srcjar_deps + } + + _java_srcjars = [] + foreach(dep, _srcjar_deps) { + _dep_gen_dir = get_label_info(dep, "target_gen_dir") + _dep_name = get_label_info(dep, "name") + _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ] + } + + # generated_jar_path is an output when use_turbine and an input otherwise. + if (!invoker.use_turbine) { + _java_srcjars += [ invoker.generated_jar_path ] + } + + _javac_args = [] + if (defined(invoker.javac_args)) { + _javac_args = invoker.javac_args + } + + action_with_pydeps(target_name) { + if (invoker.use_turbine) { + script = "//build/android/gyp/turbine.py" + } else { + script = "//build/android/gyp/compile_java.py" + } + + if (target_name == "chrome_java__header") { + # Regression test for: https://crbug.com/1154302 + assert_no_deps = [ "//base:base_java__compile_java" ] + } + + depfile = "$target_gen_dir/$target_name.d" + deps = _srcjar_deps + if (defined(invoker.deps)) { + deps += invoker.deps + } + + outputs = [ invoker.output_jar_path ] + if (!invoker.enable_errorprone && !invoker.use_turbine) { + outputs += [ invoker.output_jar_path + ".info" ] + } + inputs = invoker.source_files + _java_srcjars + [ _build_config ] + if (invoker.source_files != []) { + inputs += [ invoker.target_sources_file ] + } + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _rebased_output_jar_path = + rebase_path(invoker.output_jar_path, root_build_dir) + _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir) + _rebased_depfile = rebase_path(depfile, root_build_dir) + _rebased_generated_dir = rebase_path( + "$target_gen_dir/${invoker.main_target_name}/generated_java", + root_build_dir) + args = [ + "--depfile=$_rebased_depfile", + "--generated-dir=$_rebased_generated_dir", + "--jar-path=$_rebased_output_jar_path", + "--java-srcjars=$_rebased_java_srcjars", + "--target-name", + get_label_info(":${target_name}", "label_no_toolchain"), + ] + + # SDK jar must be first on classpath. + if (invoker.include_android_sdk) { + args += [ "--classpath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ] + } + + if (defined(invoker.header_jar_path)) { + inputs += [ invoker.header_jar_path ] + args += [ + "--header-jar", + rebase_path(invoker.header_jar_path, root_build_dir), + ] + _header_jar_classpath = + [ rebase_path(invoker.header_jar_path, root_build_dir) ] + args += [ "--classpath=$_header_jar_classpath" ] + } + + if (defined(invoker.kotlin_jar_path)) { + inputs += [ invoker.kotlin_jar_path ] + _rebased_kotlin_jar_path = + rebase_path(invoker.kotlin_jar_path, root_build_dir) + args += [ + "--kotlin-jar-path=$_rebased_kotlin_jar_path", + "--classpath=$_rebased_kotlin_jar_path", + ] + } + + if (invoker.use_turbine) { + # Prefer direct deps for turbine as much as possible. + args += [ "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)" ] + } else { + args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)" ] + } + + if (invoker.use_turbine) { + args += [ + "--processorpath=@FileArg($_rebased_build_config:javac:processor_classpath)", + "--processors=@FileArg($_rebased_build_config:javac:processor_classes)", + ] + } + + if (invoker.use_turbine) { + _turbine_jar_path = "//third_party/turbine/turbine.jar" + inputs += [ _turbine_jar_path ] + outputs += [ invoker.generated_jar_path ] + args += [ + "--turbine-jar-path", + rebase_path(_turbine_jar_path, root_build_dir), + "--generated-jar-path", + rebase_path(invoker.generated_jar_path, root_build_dir), + ] + } + + if (use_java_goma) { + args += [ "--gomacc-path=$goma_dir/gomacc" ] + + # Override the default action_pool when goma is enabled. + pool = "//build/config/android:goma_javac_pool" + } + + # Flag enable_kythe_annotations requires + # checkout_android_prebuilts_build_tools=True in .gclient. + if (enable_kythe_annotations && !invoker.enable_errorprone) { + args += [ "--enable-kythe-annotations" ] + } + if (_chromium_code) { + args += [ "--chromium-code=1" ] + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] + } + } + if (defined(invoker.jar_excluded_patterns)) { + args += [ "--jar-info-exclude-globs=${invoker.jar_excluded_patterns}" ] + } + + if (invoker.enable_errorprone) { + # Our custom plugin pulls in the main errorprone dep transitively. + _errorprone_dep = "//tools/android/errorprone_plugin:errorprone_plugin" + deps += [ _errorprone_dep ] + _dep_gen_dir = get_label_info(_errorprone_dep, "target_gen_dir") + _dep_name = get_label_info(_errorprone_dep, "name") + _rebased_errorprone_buildconfig = + rebase_path("$_dep_gen_dir/$_dep_name.build_config.json", + root_build_dir) + args += [ + "--processorpath=@FileArg($_rebased_errorprone_buildconfig:deps_info:host_classpath)", + "--enable-errorprone", + ] + } + if (defined(invoker.skip_build_server) && invoker.skip_build_server) { + # Nocompile tests need lint to fail through ninja. + args += [ "--skip-build-server" ] + } else if (android_static_analysis == "build_server") { + args += [ "--use-build-server" ] + } + + foreach(e, _processor_args) { + args += [ "--processor-arg=" + e ] + } + + foreach(file_tuple, _additional_jar_files) { + # Each element is of length two, [ path_to_file, path_to_put_in_jar ] + inputs += [ file_tuple[0] ] + args += + [ "--additional-jar-file=" + + rebase_path(file_tuple[0], root_build_dir) + ":" + file_tuple[1] ] + } + if (invoker.source_files != []) { + args += + [ "@" + rebase_path(invoker.target_sources_file, root_build_dir) ] + } + foreach(e, _javac_args) { + args += [ "--javac-arg=" + e ] + } + } + } + + # Compile Kotlin source files into .class files and store them in a .jar. + # This explicitly does not run annotation processing on the Kotlin files. + # Java files and srcjars are also passed to kotlinc for reference, although + # no .class files will be generated for any Java files. A subsequent call to + # javac will be required to actually compile Java files into .class files. + # + # This action also creates a "header" .jar file for the Kotlin source files. + # It is similar to using turbine to create headers for Java files, but since + # turbine does not support Kotlin files, this is done via a plugin for + # kotlinc instead, at the same time as compilation (whereas turbine is run as + # a separate action before javac compilation). + template("compile_kt") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _build_config = invoker.build_config + _chromium_code = invoker.chromium_code + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps = invoker.srcjar_deps + } + + _java_srcjars = [] + foreach(dep, _srcjar_deps) { + _dep_gen_dir = get_label_info(dep, "target_gen_dir") + _dep_name = get_label_info(dep, "name") + _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ] + } + + action_with_pydeps(target_name) { + script = "//build/android/gyp/compile_kt.py" + depfile = "$target_gen_dir/$target_name.d" + deps = _srcjar_deps + if (defined(invoker.deps)) { + deps += invoker.deps + } + + outputs = [ + invoker.output_jar_path, + invoker.output_interface_jar_path, + ] + inputs = invoker.source_files + _java_srcjars + [ + _build_config, + invoker.target_sources_file, + ] + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _rebased_output_jar_path = + rebase_path(invoker.output_jar_path, root_build_dir) + _rebased_output_interface_jar_path = + rebase_path(invoker.output_interface_jar_path, root_build_dir) + _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir) + _rebased_depfile = rebase_path(depfile, root_build_dir) + _rebased_generated_dir = rebase_path( + "$target_gen_dir/${invoker.main_target_name}/generated_java", + root_build_dir) + args = [ + "--depfile=$_rebased_depfile", + "--generated-dir=$_rebased_generated_dir", + "--jar-path=$_rebased_output_jar_path", + "--interface-jar-path=$_rebased_output_interface_jar_path", + "--java-srcjars=$_rebased_java_srcjars", + ] + + # SDK jar must be first on classpath. + if (invoker.include_android_sdk) { + args += [ "--classpath=@FileArg($_rebased_build_config:android:sdk_interface_jars)" ] + } + + args += [ "--classpath=@FileArg($_rebased_build_config:deps_info:javac_full_interface_classpath)" ] + + if (use_java_goma) { + args += [ "--gomacc-path=$goma_dir/gomacc" ] + + # Override the default action_pool when goma is enabled. + pool = "//build/config/android:goma_javac_pool" + } + + if (_chromium_code) { + args += [ "--chromium-code" ] + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] + } + } + + args += [ "@" + rebase_path(invoker.target_sources_file, root_build_dir) ] + } + } + + # Create an interface jar from a normal jar. + # + # Variables + # input_jar: Path to input .jar. + # output_jar: Path to output .ijar. + # + template("generate_interface_jar") { + action_with_pydeps(target_name) { + _ijar_target = "//third_party/ijar:ijar($host_toolchain)" + _ijar_executable = get_label_info(_ijar_target, "root_out_dir") + "/ijar" + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "data", + "data_deps", + "public_deps", + ]) + script = "//build/android/gyp/ijar.py" + deps = [ _ijar_target ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + inputs = [ + invoker.input_jar, + _ijar_executable, + ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + outputs = [ invoker.output_jar ] + args = [ + rebase_path(_ijar_executable, root_build_dir), + rebase_path(invoker.input_jar, root_build_dir), + rebase_path(invoker.output_jar, root_build_dir), + ] + } + } + + # A rule that will handle multiple Java-related targets. + # + # The caller can provide a list of source files with 'java_files' + # and 'srcjar_deps', or a prebuilt .jar file through 'jar_path'. + # + # In the case of a 'java_binary' target type, it can even provide none of + # that (and the rule will just generate its wrapper script). + # + # The template will process the input .jar file (either the prebuilt one, + # or the result of compiling the sources), for example to apply Proguard, + # but also other ranges of bytecode-level rewriting schemes. + # + # Variables: + # type: type of Java target, valid values: 'java_library', 'java_binary', + # 'robolectric_binary', 'java_annotation_processor', and 'android_apk' + # main_target_name: optional. If provided, overrides target_name when + # creating sub-targets (e.g. "${main_target_name}__dex") and + # some output files (e.g. "${main_target_name}.sources"). Only used + # for 'android_apk' types at the moment, where main_target_name will + # be the name of the main APK target. + # supports_android: Optional. True if target can run on Android. + # requires_android: Optional. True if target can only run on Android. + # source_files: Optional list of Java source file paths for this target. + # javac_args: Optional list of extra arguments to pass to javac. + # errorprone_args: Optional list of extra arguments to pass to. + # srcjar_deps: Optional list of .srcjar targets (not file paths). The Java + # source files they contain will also be compiled for this target. + # target_sources_file: Optional path to a file which will be written with + # the content of source_files. If not provided, the file will be written + # under $target_gen_dir/$main_target_name.sources. Ignored if + # sources_files is empty. If not + # jar_path: Optional path to a prebuilt .jar file for this target. + # Mutually exclusive with java_files and srcjar_deps. + # output_name: Optional output name for the final jar path. Used to + # determine the name of the final jar. Default is to use the same + # name as jar_path, if provided, or main_target_name. + # main_class: Main Java class name for 'java_binary', 'robolectric_binary' and + # 'java_annotation_processor' target types. Should not be set for other + # ones. + # deps: Dependencies for this target. + # public_deps: Dependencies that this target exposes as part of its public API. + # public_deps do not need to be listed in both the 'deps' and 'public_deps' lists. + # testonly: True iff target should only be used for tests. + # chromium_code: Optional. Whether this is Chromium-specific code. If not + # provided, this is determined automatically, based on the location of + # the source files (i.e. anything under third_party/ is not + # Chromium-specific unless it is in a 'chromium' sub-directory). + # jacoco_never_instrument: Optional. If provided, whether to forbid + # instrumentation with the Jacoco coverage processor. If not provided, + # this is controlled by the global use_jacoco_coverage build arg variable + # and only used for non-test Chromium code. + # include_android_sdk: Optional. Whether or not the android SDK dep + # should be added to deps. Defaults to true for non-system libraries + # that support android. + # alternative_android_sdk_dep: Optional. Alternative Android system + # android java target to use. + # annotation_processor_deps: Optional list of dependencies corresponding + # to annotation processors used to compile these sources. + # input_jars_paths: Optional list of additional .jar file paths, which will + # be added to the compile-time classpath when building this target (but + # not to the runtime classpath). + # gradle_treat_as_prebuilt: Cause generate_gradle.py to reference this + # library via its built .jar rather than including its .java sources. + # proguard_enabled: Optional. True to enable ProGuard obfuscation. + # proguard_configs: Optional list of additional proguard config file paths. + # is_robolectric: Optional. If True, this is a host side android test binary + # which is allowed to depend on other android targets. + # include_java_resources: Optional. If True, include Java (not Android) + # resources into final .jar file. + # jar_excluded_patterns: Optional list of .class file patterns to exclude + # from the final .jar file. + # jar_included_patterns: Optional list of .class file patterns to include + # in the final .jar file. jar_excluded_patterns take precedence over this. + # low_classpath_priority: Indicates that the library should be placed at the + # end of the classpath. The default classpath order has libraries ordered + # before the libraries that they depend on. 'low_classpath_priority' is + # useful when one java_library() overrides another via + # 'jar_excluded_patterns' and the overriding library does not depend on the + # overridee. + # + # For 'android_apk' and 'android_app_bundle_module' targets only: + # + # apk_path: Path to the final APK file. + # android_manifest: Path to AndroidManifest.xml file for the APK. + # android_manifest_dep: Optional. Dependency target that generates + # android_manifest. + # apk_under_test: For 'android_apk' targets used to test other APKs, + # this is the target name of APK being tested. + # incremental_apk_path: Path to the incremental APK. + # incremental_install_json_path: Path to the incremental install json. + # native_lib_placeholders: Optional. List of placeholder filenames to add to + # the APK. + # proguard_mapping_path: Path to .mapping file produced from ProGuard step. + # shared_libraries_runtime_deps_file: Optional. Path to a file listing the + # native shared libraries required at runtime by the APK. + # secondary_abi_shared_libraries_runtime_deps_file: + # secondary_native_lib_placeholders: Optional. List of placeholder filenames + # to add to the APK for the secondary ABI. + # loadable_modules: Optional list of extra native libraries to + # be stored in the APK. + # secondary_abi_loadable_modules: Optional list of native libraries for + # secondary ABI. + # proto_resources_path: The path of an zip archive containing the APK's + # resources compiled to the protocol buffer format (instead of regular + # binary xml + resources.arsc). + # r_text_path: The path of the R.txt file generated when compiling the + # resources for this target. + # module_pathmap_path: The path of the pathmap file generated when compiling + # the resources for the bundle module, if path shortening is enabled. + # base_allowlist_rtxt_path: The path of the R.txt file containing the + # list of string resources to keep in the base split APK for any bundle + # that uses this target. + # + # For 'java_binary' and 'robolectric_binary' targets only. Ignored by others: + # + # wrapper_script_name: Optional name for the generated wrapper script. + # Default is main target name. + # wrapper_script_args: Optional list of extra arguments used by the + # generated wrapper script. + # + template("java_library_impl") { + # TODO(crbug.com/1042017): Remove. + not_needed(invoker, [ "no_build_hooks" ]) + + forward_variables_from(invoker, [ "testonly" ]) + _is_prebuilt = defined(invoker.jar_path) + _type = invoker.type + _is_annotation_processor = _type == "java_annotation_processor" + _is_java_binary = _type == "java_binary" || _type == "robolectric_binary" + _is_library = _type == "java_library" + _supports_android = + defined(invoker.supports_android) && invoker.supports_android + _requires_android = + defined(invoker.requires_android) && invoker.requires_android + _supports_host = !_requires_android + if (_is_java_binary || _is_annotation_processor) { + assert(!_requires_android && !_supports_android) + } + + _bypass_platform_checks = defined(invoker.bypass_platform_checks) && + invoker.bypass_platform_checks + _is_robolectric = defined(invoker.is_robolectric) && invoker.is_robolectric + + _invoker_deps = [] + if (defined(invoker.deps)) { + _invoker_deps += invoker.deps + } + if (defined(invoker.public_deps)) { + _invoker_deps += invoker.public_deps + } + + _main_target_name = target_name + if (defined(invoker.main_target_name)) { + _main_target_name = invoker.main_target_name + } + + _source_files = [] + if (defined(invoker.sources)) { + _source_files = invoker.sources + } + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps = invoker.srcjar_deps + } + _has_sources = _source_files != [] || _srcjar_deps != [] + + if (_is_prebuilt) { + assert(!_has_sources) + } else { + # Allow java_binary to not specify any sources. This is needed when a prebuilt + # is needed as a library as well as a binary. + assert(_is_annotation_processor || _is_java_binary || _has_sources) + } + + if (_is_java_binary) { + assert(defined(invoker.main_class), "${_type}() must set main_class") + } else if (_is_annotation_processor) { + assert(defined(invoker.main_class), + "java_annotation_processor() must set main_class") + } else { + assert(!defined(invoker.main_class), + "main_class cannot be used for target of type ${_type}") + } + + if (defined(invoker.chromium_code)) { + _chromium_code = invoker.chromium_code + } else { + # Default based on whether target is in third_party. + _chromium_code = + filter_exclude([ get_label_info(":$_main_target_name", "dir") ], + [ "*\bthird_party\b*" ]) != [] + if (!_chromium_code && !_is_prebuilt && _source_files != []) { + # Unless third_party code has an org.chromium file in it. + _chromium_code = + filter_exclude(_source_files, [ "*\bchromium\b*" ]) != _source_files + } + } + + # Define build_config_deps which will be a list of targets required to + # build the _build_config. + _build_config = "$target_gen_dir/$_main_target_name.build_config.json" + _build_config_target_name = + "${_main_target_name}$build_config_target_suffix" + + # The only target that might have no prebuilt and no sources is a java_binary. + _build_host_jar = false + _build_device_jar = false + if (_is_prebuilt || _has_sources) { + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } else { + _output_name = _main_target_name + } + + _build_host_jar = + _is_java_binary || _is_annotation_processor || _type == "java_library" + _build_device_jar = _type != "system_java_library" && _supports_android + + _jacoco_instrument = + use_jacoco_coverage && _chromium_code && _source_files != [] && + _build_device_jar && (!defined(invoker.testonly) || !invoker.testonly) + if (defined(invoker.jacoco_never_instrument)) { + _jacoco_instrument = + !invoker.jacoco_never_instrument && _jacoco_instrument + } + if (_jacoco_instrument) { + _invoker_deps += [ _jacoco_dep ] + } + + if (_build_host_jar) { + # Jar files can be needed at runtime (by Robolectric tests or java binaries), + # so do not put them under obj/. + # TODO(agrieve): I suspect it would be better to use dist_jar for java_binary + # rather than archiving unnecessary .jar files within lib.java. + _target_dir_name = get_label_info(":$_main_target_name", "dir") + _host_processed_jar_path = + "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar" + } + if (_build_device_jar) { + _dex_path = "$target_out_dir/$_main_target_name.dex.jar" + _enable_desugar = + !defined(invoker.enable_desugar) || invoker.enable_desugar + + # Build speed optimization: Skip "process device" step if the step + # would be just a copy and avoid the copy. + _process_device_jar = + defined(invoker.bytecode_rewriter_target) || _jacoco_instrument || + defined(invoker.jar_excluded_patterns) || + defined(invoker.jar_included_patterns) + if (!_process_device_jar && _is_prebuilt) { + _device_processed_jar_path = invoker.jar_path + } else { + _device_processed_jar_path = + "$target_out_dir/$_output_name.processed.jar" + } + } + + # For static libraries, the javac jar output is created at the intermediate + # path so that it can be processed by another target and moved to the final + # spot that the .build_config.json knows about. Technically this should be done + # for the ijar as well, but this is only used for APK targets where + # the ijar path isn't actually used. + if (_has_sources) { + _final_ijar_path = "$target_out_dir/$_output_name.turbine.jar" + } else { + _final_ijar_path = "$target_out_dir/$_output_name.ijar.jar" + } + + if (_has_sources) { + if (_build_device_jar && !_process_device_jar) { + _javac_jar_path = _device_processed_jar_path + } else { + _javac_jar_path = "$target_out_dir/$_main_target_name.javac.jar" + } + _generated_jar_path = + "$target_gen_dir/$_main_target_name.generated.srcjar" + } + + if (_is_prebuilt) { + _unprocessed_jar_path = invoker.jar_path + } else { + _unprocessed_jar_path = _javac_jar_path + } + } + + _java_assetres_deps = filter_include(_invoker_deps, java_resource_patterns) + + # Cannot use minus operator because it does not work when the operand has + # repeated entries. + _invoker_deps_minus_assetres = + filter_exclude(_invoker_deps, _java_assetres_deps) + _lib_deps = + filter_include(_invoker_deps_minus_assetres, java_library_patterns) + _non_java_deps = filter_exclude(_invoker_deps_minus_assetres, _lib_deps) + + _java_header_deps = [] # Turbine / ijar + + # It would be more ideal to split this into __host and __javac, but we + # combine the two concepts to save on a group() target. + _java_host_deps = [] # Processed host .jar + javac .jar. + _java_validate_deps = [] # Bytecode checker & errorprone. + + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + _java_assetres_deps += [ "${_lib_dep}__assetres" ] + _java_header_deps += [ "${_lib_dep}__header" ] + _java_host_deps += [ "${_lib_dep}__host" ] + _java_validate_deps += [ "${_lib_dep}__validate" ] + } + + # APK and base module targets are special because: + # 1) They do not follow java target naming scheme (since they are not + # generally deps, there is no need for them to). + # 2) They do not bother to define a __host target. + # Since __host is used as an indirect dep for the compile_java artifacts, + # add the __compile_java target directly for them. + if (defined(invoker.apk_under_test)) { + _java_assetres_deps += [ "${invoker.apk_under_test}__java__assetres" ] + _java_header_deps += [ "${invoker.apk_under_test}__java__header" ] + _java_validate_deps += [ "${invoker.apk_under_test}__java__validate" ] + _java_host_deps += [ "${invoker.apk_under_test}__compile_java" ] + } + if (defined(invoker.base_module_target)) { + _java_assetres_deps += [ "${invoker.base_module_target}__java__assetres" ] + _java_header_deps += [ "${invoker.base_module_target}__java__header" ] + _java_validate_deps += [ "${invoker.base_module_target}__java__validate" ] + _java_host_deps += [ "${invoker.base_module_target}__compile_java" ] + } + + not_needed([ "_non_java_deps" ]) + + if (_is_prebuilt || _has_sources) { + # Classpath deps are used for header and dex targets, they do not need + # __assetres deps. + # _non_java_deps are needed for input_jars_paths that are generated. + _header_classpath_deps = + _java_header_deps + _non_java_deps + [ ":$_build_config_target_name" ] + + _javac_classpath_deps = + _java_host_deps + _non_java_deps + [ ":$_build_config_target_name" ] + + _include_android_sdk = _build_device_jar + if (defined(invoker.include_android_sdk)) { + _include_android_sdk = invoker.include_android_sdk + } + if (_include_android_sdk) { + if (defined(invoker.alternative_android_sdk_dep)) { + _android_sdk_dep = invoker.alternative_android_sdk_dep + } else { + _android_sdk_dep = default_android_sdk_dep + } + + _header_classpath_deps += [ "${_android_sdk_dep}__header" ] + _javac_classpath_deps += [ "${_android_sdk_dep}" ] + } + } + + # Often needed, but too hard to figure out when ahead of time. + not_needed([ + "_header_classpath_deps", + "_javac_classpath_deps", + ]) + + if (_source_files != []) { + _target_sources_file = "$target_gen_dir/$_main_target_name.sources" + write_file(_target_sources_file, + rebase_path(_source_files, root_build_dir)) + } + + write_build_config(_build_config_target_name) { + forward_variables_from(invoker, + [ + "aar_path", + "annotation_processor_deps", + "base_allowlist_rtxt_path", + "gradle_treat_as_prebuilt", + "input_jars_paths", + "preferred_dep", + "low_classpath_priority", + "main_class", + "mergeable_android_manifests", + "module_name", + "parent_module_target", + "proguard_configs", + "proguard_enabled", + "proguard_mapping_path", + "public_target_label", + "r_text_path", + "type", + "version_code", + "version_name", + ]) + if (_type == "android_apk" || _type == "android_app_bundle_module") { + forward_variables_from( + invoker, + [ + "android_manifest", + "android_manifest_dep", + "merged_android_manifest", + "final_dex_path", + "loadable_modules", + "native_lib_placeholders", + "res_size_info_path", + "secondary_abi_loadable_modules", + "secondary_abi_shared_libraries_runtime_deps_file", + "secondary_native_lib_placeholders", + "shared_libraries_runtime_deps_file", + "library_always_compress", + ]) + } + if (_type == "android_apk") { + forward_variables_from(invoker, + [ + "apk_path", + "apk_under_test", + "incremental_apk_path", + "incremental_install_json_path", + ]) + } + if (_type == "android_app_bundle_module") { + forward_variables_from(invoker, + [ + "add_view_trace_events", + "base_module_target", + "module_pathmap_path", + "proto_resources_path", + ]) + } + chromium_code = _chromium_code + build_config = _build_config + is_prebuilt = _is_prebuilt + + # Specifically avoid passing in invoker.base_module_target as one of the + # possible_config_deps. + possible_config_deps = [] + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + if (defined(invoker.public_deps)) { + possible_config_public_deps = invoker.public_deps + } + if (defined(apk_under_test)) { + possible_config_deps += [ apk_under_test ] + } + if (defined(_jacoco_instrument) && _jacoco_instrument) { + possible_config_deps += [ _jacoco_dep ] + } + if (defined(_android_sdk_dep)) { + possible_config_deps += [ _android_sdk_dep ] + } + + supports_android = _supports_android + requires_android = _requires_android + is_robolectric = _is_robolectric + bypass_platform_checks = _bypass_platform_checks + + if (defined(invoker.resources_package)) { + custom_package = invoker.resources_package + } + if (_is_prebuilt || _has_sources) { + ijar_path = _final_ijar_path + unprocessed_jar_path = _unprocessed_jar_path + } + if (_build_host_jar) { + host_jar_path = _host_processed_jar_path + } + if (_build_device_jar) { + device_jar_path = _device_processed_jar_path + dex_path = _dex_path + } + if (_source_files != []) { + target_sources_file = _target_sources_file + } + + bundled_srcjars = [] + foreach(d, _srcjar_deps) { + _dep_gen_dir = get_label_info(d, "target_gen_dir") + _dep_name = get_label_info(d, "name") + bundled_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ] + } + if (defined(invoker.include_java_resources) && + invoker.include_java_resources) { + java_resources_jar = _unprocessed_jar_path + if (defined(invoker.jar_path)) { + # Use original jar_path because _jar_path points to a library without + # resources. + } else { + java_resources_jar = _device_processed_jar_path + } + } + } + + if (_is_prebuilt || _has_sources) { + _header_target_name = "${target_name}__header" + } + + if (_has_sources) { + _kt_files = filter_include(_source_files, [ "*.kt" ]) + _java_files = filter_exclude(_source_files, [ "*.kt" ]) + + if (defined(invoker.enable_errorprone)) { + _enable_errorprone = invoker.enable_errorprone + } else { + _enable_errorprone = + _java_files != [] && _chromium_code && use_errorprone_java_compiler + } + + if (defined(invoker.resources_package) && _type == "java_library") { + # TODO(crbug.com/1296632): remove _bypass_platform_checks from the list + # once all robolectric targets have migrated to robolectric_library. + assert(_requires_android || _bypass_platform_checks || _is_robolectric, + "Setting resources_package applicable only for " + + "android_library(), or robolectric_library(). " + + "Target=$target_name") + + # Serves double purpose: Generating R.java, as well as being the + #__assetres target (instead of using a separate group). + _fake_rjava_target = "${target_name}__assetres" + generate_r_java(_fake_rjava_target) { + deps = [ ":$_build_config_target_name" ] + _java_assetres_deps + + _non_java_deps + build_config = _build_config + + # Filepath has to be exactly this because compile_java looks for the + # srcjar of srcjar_deps at this location $gen_dir/$target_name.srcjar + srcjar_path = "$target_gen_dir/$target_name.srcjar" + package = invoker.resources_package + } + _srcjar_deps += [ ":$_fake_rjava_target" ] + } + + if (_kt_files != []) { + _kt_allowlist = [ + "android/java/src/org/chromium/chrome/browser/tabmodel/AsyncTabParamsManagerImpl.kt", + "webengine_shell_apk/src/org/chromium/webengine/shell/*.kt", + ] + assert(filter_exclude(_kt_files, _kt_allowlist) == [], + "Only a files in the allowlist can be included for now. Feel " + + "free to remove this assert when experimenting locally.") + _compile_kt_target_name = "${_main_target_name}__compile_kt" + _kotlinc_jar_path = "$target_out_dir/$_output_name.kotlinc.jar" + _kotlin_interface_jar_path = + "$target_out_dir/$_output_name.kt-jvm-abi.jar" + compile_kt(_compile_kt_target_name) { + deps = _header_classpath_deps + output_jar_path = _kotlinc_jar_path + output_interface_jar_path = _kotlin_interface_jar_path + main_target_name = _main_target_name + build_config = _build_config + srcjar_deps = _srcjar_deps + source_files = _source_files + target_sources_file = _target_sources_file + chromium_code = _chromium_code + include_android_sdk = _is_robolectric || _requires_android + } + } + + template("compile_java_helper") { + _enable_errorprone = + defined(invoker.enable_errorprone) && invoker.enable_errorprone + if (_enable_errorprone) { + # Rely on the header jar to provide all .class files so that it is + # safe to omit generated files entirely for errorprone. + _filtered_java_files = + filter_exclude(_java_files, [ "$root_gen_dir*" ]) + } + if (_enable_errorprone && _filtered_java_files == []) { + # Filtering out generated files resulted in no files left. + group(target_name) { + not_needed(invoker, "*") + deps = _header_classpath_deps + } + } else { + compile_java(target_name) { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ "deps" ]) + deps = _header_classpath_deps + if (defined(invoker.deps)) { + deps += invoker.deps + } + output_jar_path = invoker.output_jar_path + if (defined(invoker.kotlin_jar_path)) { + deps += [ ":$_compile_kt_target_name" ] + kotlin_jar_path = invoker.kotlin_jar_path + } + enable_errorprone = _enable_errorprone + use_turbine = defined(invoker.use_turbine) && invoker.use_turbine + + main_target_name = _main_target_name + build_config = _build_config + + if (_enable_errorprone) { + source_files = _filtered_java_files + } else { + source_files = _source_files + srcjar_deps = _srcjar_deps + } + + if (source_files != []) { + target_sources_file = _target_sources_file + } + chromium_code = _chromium_code + include_android_sdk = _is_robolectric || _requires_android + } + } + } + _compile_java_forward_variables = [ + "additional_jar_files", + "apk_name", + "jar_excluded_patterns", + "javac_args", + "processor_args_javac", + "skip_build_server", + ] + _annotation_processor_deps = [] + if (defined(invoker.annotation_processor_deps)) { + _annotation_processor_deps = invoker.annotation_processor_deps + } + + compile_java_helper(_header_target_name) { + forward_variables_from(invoker, _compile_java_forward_variables) + use_turbine = true + output_jar_path = _final_ijar_path + generated_jar_path = _generated_jar_path + deps = _annotation_processor_deps + if (_kt_files != []) { + kotlin_jar_path = _kotlin_interface_jar_path + } + } + + _compile_java_target = "${_main_target_name}__compile_java" + compile_java_helper(_compile_java_target) { + forward_variables_from(invoker, _compile_java_forward_variables) + output_jar_path = _javac_jar_path + deps = [ ":$_header_target_name" ] + header_jar_path = _final_ijar_path + generated_jar_path = _generated_jar_path + if (_kt_files != []) { + kotlin_jar_path = _kotlinc_jar_path + } + } + if (_enable_errorprone) { + _compile_java_errorprone_target = "${_main_target_name}__errorprone" + compile_java_helper(_compile_java_errorprone_target) { + forward_variables_from(invoker, _compile_java_forward_variables) + enable_errorprone = true + if (defined(invoker.errorprone_args)) { + if (!defined(javac_args)) { + javac_args = [] + } + javac_args += invoker.errorprone_args + } + deps = [ ":$_header_target_name" ] + if (_kt_files != []) { + kotlin_jar_path = _kotlinc_jar_path + } + header_jar_path = _final_ijar_path + generated_jar_path = _generated_jar_path + output_jar_path = "$target_out_dir/$target_name.errorprone.stamp" + } + _java_validate_deps += [ ":$_compile_java_errorprone_target" ] + } + } # _has_sources + + if (_is_prebuilt || _build_device_jar || _build_host_jar) { + if (_has_sources) { + _unprocessed_jar_deps = [ ":$_compile_java_target" ] + } else { + # jars might be generated by a dep. + _unprocessed_jar_deps = _non_java_deps + } + } + + if (defined(invoker.bytecode_rewriter_target)) { + assert(_build_host_jar || _build_device_jar, + "A host or device jar must be created to use bytecode rewriting") + + _rewritten_jar = "$target_out_dir/${target_name}_rewritten.jar" + _rewritten_jar_target_name = "${target_name}__rewritten" + _rewriter_path = root_build_dir + "/bin/helper/" + + get_label_info(invoker.bytecode_rewriter_target, "name") + _rebased_build_config = rebase_path(_build_config, root_build_dir) + action_with_pydeps(_rewritten_jar_target_name) { + script = "//build/android/gyp/bytecode_rewriter.py" + inputs = [ + _rewriter_path, + _build_config, + _unprocessed_jar_path, + ] + outputs = [ _rewritten_jar ] + depfile = "$target_gen_dir/$target_name.d" + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--script", + rebase_path(_rewriter_path, root_build_dir), + "--classpath", + "@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", + "--classpath", + "@FileArg($_rebased_build_config:android:sdk_jars)", + "--input-jar", + rebase_path(_unprocessed_jar_path, root_build_dir), + "--output-jar", + rebase_path(_rewritten_jar, root_build_dir), + ] + deps = _unprocessed_jar_deps + _javac_classpath_deps + + [ invoker.bytecode_rewriter_target ] + } + + _unprocessed_jar_deps = [] + _unprocessed_jar_deps = [ ":$_rewritten_jar_target_name" ] + _unprocessed_jar_path = _rewritten_jar + } + + if (_is_prebuilt) { + generate_interface_jar(_header_target_name) { + # Always used the unfiltered .jar to create the interface jar so that + # other targets will resolve filtered classes when depending on + # BuildConfig, NativeLibraries, etc. + input_jar = _unprocessed_jar_path + output_jar = _final_ijar_path + + # ijar needs only _unprocessed_jar_deps, but this also needs to export + # __header target from deps. + deps = _unprocessed_jar_deps + _java_header_deps + } + } + + if (_build_host_jar || _build_device_jar) { + _enable_bytecode_checks = + (!defined(invoker.enable_bytecode_checks) || + invoker.enable_bytecode_checks) && android_static_analysis != "off" + if (_enable_bytecode_checks) { + _validate_target_name = "${target_name}__validate" + bytecode_processor(_validate_target_name) { + forward_variables_from(invoker, [ "missing_classes_allowlist" ]) + deps = _unprocessed_jar_deps + _javac_classpath_deps + + [ ":$_build_config_target_name" ] + data_deps = _java_validate_deps + if (defined(_compile_java_errorprone_target)) { + data_deps += [ ":$_compile_java_errorprone_target" ] + } + + include_android_sdk = _requires_android || _is_robolectric + target_label = + get_label_info(":${invoker.target_name}", "label_no_toolchain") + input_jar = _unprocessed_jar_path + build_config = _build_config + is_prebuilt = _is_prebuilt + } + } else { + not_needed(invoker, [ "missing_classes_allowlist" ]) + } + + if (_build_host_jar) { + _process_host_jar_target_name = "${target_name}__host" + process_java_library(_process_host_jar_target_name) { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "jar_included_patterns", + ]) + + # Robolectric tests require these to be on swarming. + data = [ _host_processed_jar_path ] + input_jar_path = _unprocessed_jar_path + deps = _unprocessed_jar_deps + _javac_classpath_deps + output_jar_path = _host_processed_jar_path + jacoco_instrument = _jacoco_instrument + if (_jacoco_instrument) { + source_files = _source_files + target_sources_file = _target_sources_file + } + + # _java_host_deps isn't necessary for process_java_library(), but is + # necessary so that this target can be used to depend on transitive + # __device targets without the need to create a separate group() + # target. This trade-off works because process_java_library is fast. + deps += _java_host_deps + + # Add runtime_deps here since robolectric_binary does not depend on top-level group. + if (defined(invoker.data)) { + data += invoker.data + } + if (defined(invoker.data_deps)) { + data_deps = invoker.data_deps + } + } + } + + if (_build_device_jar) { + if (_process_device_jar) { + _process_device_jar_target_name = "${target_name}__process_device" + process_java_library(_process_device_jar_target_name) { + forward_variables_from(invoker, + [ + "jar_excluded_patterns", + "jar_included_patterns", + ]) + input_jar_path = _unprocessed_jar_path + + deps = _unprocessed_jar_deps + _javac_classpath_deps + output_jar_path = _device_processed_jar_path + jacoco_instrument = _jacoco_instrument + if (_jacoco_instrument) { + source_files = _source_files + target_sources_file = _target_sources_file + } + } + _process_device_jar_deps = [ ":${_process_device_jar_target_name}" ] + } else { + assert(_unprocessed_jar_path == _device_processed_jar_path) + _process_device_jar_deps = _unprocessed_jar_deps + } + + _dex_target_name = "${target_name}__dex" + dex(_dex_target_name) { + forward_variables_from(invoker, [ "proguard_enable_obfuscation" ]) + input_class_jars = [ _device_processed_jar_path ] + enable_desugar = _enable_desugar + ignore_desugar_missing_deps = !_enable_bytecode_checks + + # There's no value in per-class dexing prebuilts since they never + # change just one class at a time. + disable_incremental = _is_prebuilt + output = _dex_path + deps = _process_device_jar_deps + + if (enable_desugar) { + # Desugaring with D8 requires full classpath. + build_config = _build_config + unprocessed_jar_path = _unprocessed_jar_path + deps += _header_classpath_deps + _unprocessed_jar_deps + } + + enable_multidex = false + is_library = true + + # proguard_configs listed on java_library targets need to be marked + # as inputs to at least one target so that "gn analyze" will know + # about them. Although this target doesn't use them, it's a convenient spot + # to list them. + # https://crbug.com/827197 + if (compute_inputs_for_analyze && defined(invoker.proguard_configs)) { + inputs = invoker.proguard_configs + + # For the aapt-generated proguard rules. + deps += _non_java_deps + _srcjar_deps + } + } + } + } + + if (_is_java_binary) { + # Targets might use the generated script while building, so make it a dep + # rather than a data_dep. + _java_binary_script_target_name = "${target_name}__java_binary_script" + java_binary_script(_java_binary_script_target_name) { + forward_variables_from(invoker, + [ + "tiered_stop_at_level_one", + "main_class", + "max_heap_size", + "wrapper_script_args", + ]) + build_config = _build_config + script_name = _main_target_name + if (defined(invoker.wrapper_script_name)) { + script_name = invoker.wrapper_script_name + } + deps = [ ":$_build_config_target_name" ] + if (_is_robolectric) { + # For robolectric tests, we also add the normal sdk jar to the + # classpath since whenever we start using a new Android SDK, + # robolectric doesn't support it, and they often take a few months to + # support it. This causes issues when mocking classes that reference + # new SDK classes, so providing our normal SDK will allow these + # classes to resolve. For an example, see crbug.com/1350963. + extra_classpath_jars = [ android_sdk_jar ] + + # Mockito bug with JDK17 requires us to use JDK11 until we find a fix + # for crbug.com/1409661. + use_jdk_11 = true + } + } + } + + if (!defined(_validate_target_name)) { + _validate_target_name = "${target_name}__validate" + + # Allow other targets to depend on this __validate one. + group(_validate_target_name) { + deps = _java_validate_deps + } + } + + if (_supports_host && !defined(_process_host_jar_target_name)) { + group("${target_name}__host") { + deps = _java_host_deps + } + } + + # robolectric_library can depend on java_library, so java_library must + # define __assetres. + if ((_is_library || _supports_android || _is_robolectric) && + !defined(_fake_rjava_target)) { + group("${target_name}__assetres") { + if (_supports_android || _is_robolectric) { + deps = _java_assetres_deps + } + } + } + + # The top-level group is used: + # 1) To allow building the target explicitly via ninja, + # 2) To trigger all analysis deps, + # 3) By custom action() targets that want to use artifacts as inputs. + group(target_name) { + forward_variables_from(invoker, + [ + "assert_no_deps", + "data", + "data_deps", + "visibility", + ]) + if (_requires_android || (_supports_android && _is_library)) { + # For non-robolectric targets, depend on other java target's top-level + # groups so that the __dex step gets depended on. + forward_variables_from(invoker, + [ + "deps", + "public_deps", + ]) + if (!defined(deps)) { + deps = [] + } + if (!defined(public_deps)) { + public_deps = [] + } + } else { + # For robolectric targets, depend only on non-java deps and the specific + # subtargets below, which will not include __dex. + deps = _non_java_deps + public_deps = [] + if (defined(invoker.public_deps)) { + public_deps += + filter_exclude(invoker.public_deps, java_target_patterns) + } + } + if (defined(_jacoco_instrument) && _jacoco_instrument) { + deps += [ _jacoco_dep ] + } + if (defined(invoker.apk_under_test)) { + deps += [ invoker.apk_under_test ] + } + if (defined(_process_device_jar_target_name)) { + public_deps += [ ":$_process_device_jar_target_name" ] + } + if (defined(_dex_target_name)) { + public_deps += [ ":$_dex_target_name" ] + } + if (_supports_android && _is_library) { + # Robolectric targets define __assetres, but there's no need to build it + # by default. + public_deps += [ ":${target_name}__assetres" ] + } + if (_supports_host) { + # android_* targets define __host, but there's no need to build it by + # default. + public_deps += [ ":${target_name}__host" ] + } + if (_is_java_binary) { + public_deps += [ ":$_java_binary_script_target_name" ] + } + if (!defined(data_deps)) { + data_deps = [] + } + if (defined(_validate_target_name)) { + data_deps += [ ":$_validate_target_name" ] + } else { + data_deps += _java_validate_deps + } + } + } +} + +# Create a zip archive corresponding to an application bundle module. +# +# Compile all the components of a given android_apk_or_module() target into a +# zip archive suitable to later create an android_app_bundle() target. This +# archive's format is very similar to that on an APK, except for a few +# differences in internal directory layouts, and the fact that resources, as +# well as xml files, are compiled using a protocol-buffer based format (instead +# of the regular binary xml + resources.arsc). +# +# A final application bundle is built from one or more module bundle modules, +# plus some configuration file. +# +# Variables: +# module_zip_path: Output module path. +# build_config: Path to build_config of the android_apk_or_module() target. +# dex_path: If module is proguarded separately from the base module, dex_path +# is the path to its dex file and is passed directly to the creation script. +# Otherwise, dex_path is undefined and we retrieve the module's dex file +# using its build_config. +# expected_libs_and_assets: Verify the list of included native libraries +# and assets is consistent with the given expectation file. +# expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff +# with this file as the base. +# is_multi_abi: If true will add a library placeholder for the missing ABI if +# either the primary or the secondary ABI has no native libraries set. +# module_name: The module's name. +# native_libraries_config: Path to file listing native libraries to be +# packaged into each module. +# proguard_enabled: Optional. True if proguarding is enabled for this +# bundle. Default is to enable this only for release builds. Note that +# this will always perform synchronized proguarding. +template("create_android_app_bundle_module") { + _rebased_build_config = rebase_path(invoker.build_config, root_build_dir) + _rebased_native_libraries_config = + rebase_path(invoker.native_libraries_config, root_build_dir) + + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _deps = invoker.deps + _script = "//build/android/gyp/apkbuilder.py" + + # NOTE: Compared to the inputs of the "package_apk" template action, + # this list is much smaller, since finalize_apk is never called + # by apkbuild.py --format=bundle-module. This means not using + # apksigner and zipalign as well, nor the keystore. Other + # dependencies like extra native libraries are all pulled from the + # .build_config.json through @FileArg() references (see below) and + # will be listed in the generated depfile instead. + _inputs = [ + invoker.build_config, + invoker.native_libraries_config, + ] + _outputs = [ invoker.module_zip_path ] + _args = [ + "--format=bundle-module", + "--output-apk", + rebase_path(invoker.module_zip_path, root_build_dir), + "--resource-apk=@FileArg(" + + "$_rebased_build_config:deps_info:proto_resources_path)", + "--assets=@FileArg($_rebased_build_config:assets)", + "--uncompressed-assets=@FileArg(" + + "$_rebased_build_config:uncompressed_assets)", + "--native-libs=@FileArg($_rebased_native_libraries_config" + + ":${invoker.module_name})", + "--native-lib-placeholders=@FileArg($_rebased_build_config" + + ":native:native_library_placeholders)", + "--secondary-native-lib-placeholders=@FileArg($_rebased_build_config" + + ":native:secondary_native_library_placeholders)", + "--android-abi=$android_app_abi", + "--min-sdk-version=${invoker.min_sdk_version}", + "--library-always-compress=@FileArg($_rebased_build_config:native:library_always_compress)", + ] + if (defined(android_app_secondary_abi)) { + _rebased_secondary_abi_native_libraries_config = + rebase_path(invoker.secondary_abi_native_libraries_config, + root_build_dir) + _args += [ + "--secondary-native-libs", + "@FileArg($_rebased_secondary_abi_native_libraries_config" + + ":${invoker.module_name})", + "--secondary-android-abi=$android_app_secondary_abi", + ] + } + if (defined(invoker.is_multi_abi) && invoker.is_multi_abi) { + _args += [ "--is-multi-abi" ] + } + if (defined(invoker.uncompress_dex) && invoker.uncompress_dex) { + _args += [ "--uncompress-dex" ] + } + if (defined(invoker.extra_assets)) { + _args += [ "--assets=${invoker.extra_assets}" ] + } + + # Use either provided dex path or build config path based on type of module. + if (defined(invoker.dex_path)) { + _inputs += [ invoker.dex_path ] + _rebased_dex_path = rebase_path(invoker.dex_path, root_build_dir) + _args += [ "--dex-file=$_rebased_dex_path" ] + } else { + _args += [ "--dex-file=@FileArg($_rebased_build_config:final_dex:path)" ] + } + + if (treat_warnings_as_errors) { + _args += [ "--warnings-as-errors" ] + } + + if (defined(invoker.expected_libs_and_assets)) { + _expectations_target = "${invoker.top_target_name}_validate_libs_and_assets" + action_with_pydeps(_expectations_target) { + _actual_file = "$target_gen_dir/$target_name.libs_and_assets" + _failure_file = "$expectations_failure_dir/" + + string_replace(invoker.expected_libs_and_assets, "/", "_") + inputs = [ + invoker.expected_libs_and_assets, + invoker.build_config, + invoker.native_libraries_config, + ] + deps = [ + invoker.build_config_target, + invoker.native_libraries_config_target, + ] + if (defined(android_app_secondary_abi)) { + inputs += [ invoker.secondary_abi_native_libraries_config ] + deps += [ invoker.secondary_abi_native_libraries_config_target ] + } + outputs = [ + _actual_file, + _failure_file, + ] + script = _script + args = _args + [ + "--expected-file", + rebase_path(invoker.expected_libs_and_assets, root_build_dir), + "--actual-file", + rebase_path(_actual_file, root_build_dir), + "--failure-file", + rebase_path(_failure_file, root_build_dir), + "--only-verify-expectations", + ] + if (defined(invoker.expected_libs_and_assets_base)) { + inputs += [ invoker.expected_libs_and_assets_base ] + args += [ + "--expected-file-base", + rebase_path(invoker.expected_libs_and_assets_base, root_build_dir), + ] + } + if (fail_on_android_expectations) { + args += [ "--fail-on-expectations" ] + } + } + _deps += [ ":$_expectations_target" ] + } + + action_with_pydeps(target_name) { + deps = _deps + inputs = _inputs + outputs = _outputs + script = _script + depfile = "$target_gen_dir/$target_name.d" + args = _args + [ + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } +} + +# Allots native libraries depended on by feature modules to the module the +# libraries should be packaged into. The packaging module may be different from +# the dependee module in case a library is depended on by multiple modules. In +# that case the library will be allotted to the closest ancestor given a module +# dependency tree (see |parent| below). +# +# Variables: +# modules: List of scopes with the following format: +# name: The module's name. +# parent: The module's parent's name. +# build_config: Path to the module's build config. +# build_config_target: Target creating |build_config|. +# native_libraries_filearg_keys: Keys to be used in +# @FileArg(|build_config|:) expressions pointing to a list of native +# libraries to consider in |build_config|. +# output: Path to native libraries config. +template("allot_native_libraries") { + action_with_pydeps(target_name) { + script = "//build/android/gyp/allot_native_libraries.py" + args = [ + "--output", + rebase_path(invoker.output, root_build_dir), + ] + outputs = [ invoker.output ] + deps = [] + if (defined(invoker.deps)) { + deps += invoker.deps + } + inputs = [] + foreach(_module, invoker.modules) { + deps += [ _module.build_config_target ] + inputs += [ _module.build_config ] + _rebased_build_config = rebase_path(_module.build_config, root_out_dir) + foreach(_key, invoker.native_libraries_filearg_keys) { + args += [ + "--libraries", + "${_module.name},@FileArg($_rebased_build_config:$_key)", + ] + } + if (defined(_module.parent)) { + args += [ + "--dep", + "${_module.parent}:${_module.name}", + ] + } + } + } +} diff --git a/config/android/linker_version_script.gni b/config/android/linker_version_script.gni new file mode 100644 index 000000000000..864233c8c70b --- /dev/null +++ b/config/android/linker_version_script.gni @@ -0,0 +1,48 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/python.gni") + +# Generate a custom linker version script that can later be used with +# "-Wl,--version-script=" ldflags. +# +# Variables: +# export_java_symbols: Optional. If true, also export all Java_* symbols +# exported for JNI. +# export_symbol_allowlist_files: Optional. List of paths to input files containing +# lists of symbols to export. +# linker_script: Path to output linker version script. +# +template("generate_linker_version_script") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + script = "//build/android/gyp/generate_linker_version_script.py" + outputs = [ invoker.linker_script ] + inputs = [] + args = [ "--output=" + rebase_path(invoker.linker_script, root_build_dir) ] + + if (defined(invoker.testonly) && invoker.testonly) { + args += [ "--export-fortesting-java-symbols" ] + } + if (allow_jni_multiplexing) { + args += [ "--jni-multiplexing" ] + } + + if (defined(invoker.export_feature_registrations) && + invoker.export_feature_registrations) { + args += [ "--export-feature-registrations" ] + } + + if (defined(invoker.export_symbol_allowlist_files)) { + foreach(file_, invoker.export_symbol_allowlist_files) { + inputs += [ file_ ] + args += [ + "--export-symbol-allowlist-file", + rebase_path(file_, root_build_dir), + ] + } + } + } +} diff --git a/config/android/rules.gni b/config/android/rules.gni new file mode 100644 index 000000000000..a3eccbff751c --- /dev/null +++ b/config/android/rules.gni @@ -0,0 +1,5651 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Do not add any imports to non-//build directories here. +# Some projects (e.g. V8) do not have non-build directories DEPS'ed in. +import("//build/config/android/config.gni") +import("//build/config/android/copy_ex.gni") +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/coverage/coverage.gni") +import("//build/config/python.gni") +import("//build/config/rts.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/zip.gni") +import("//build/toolchain/toolchain.gni") +assert(is_android || is_robolectric) + +# Use a dedicated include dir so that files can #include headers from other +# toolchains without affecting non-JNI #includes. +if (target_os == "android") { + jni_headers_dir = "$root_build_dir/gen/jni_headers" +} else { + # Chrome OS builds cannot share gen/ directories because is_android=false + # within default_toolchain. + jni_headers_dir = "$root_gen_dir/jni_headers" +} + +if (target_cpu == "arm") { + _sanitizer_arch = "arm" +} else if (target_cpu == "arm64") { + _sanitizer_arch = "aarch64" +} else if (target_cpu == "x86") { + _sanitizer_arch = "i686" +} + +_sanitizer_runtimes = [] +if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) { + _sanitizer_runtimes = [ "$clang_base_path/lib/clang/$clang_version/lib/linux/libclang_rt.ubsan_standalone-$_sanitizer_arch-android.so" ] +} + +_BUNDLETOOL_JAR_PATH = + "//third_party/android_build_tools/bundletool/bundletool.jar" + +# Creates a dist directory for a native executable. +# +# Running a native executable on a device requires all the shared library +# dependencies of that executable. To make it easier to install and run such an +# executable, this will create a directory containing the native exe and all +# it's library dependencies. +# +# Note: It's usually better to package things as an APK than as a native +# executable. +# +# Variables +# dist_dir: Directory for the exe and libraries. Everything in this directory +# will be deleted before copying in the exe and libraries. +# binary: Path to (stripped) executable. +# extra_files: List of extra files to copy in (optional). +# +# Example +# create_native_executable_dist("foo_dist") { +# dist_dir = "$root_build_dir/foo_dist" +# binary = "$root_build_dir/foo" +# deps = [ ":the_thing_that_makes_foo" ] +# } +template("create_native_executable_dist") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _libraries_list = "${target_gen_dir}/${target_name}_library_dependencies.list" + + _sanitizer_runtimes_target_name = "${target_name}__sanitizer_runtimes" + group(_sanitizer_runtimes_target_name) { + metadata = { + shared_libraries = _sanitizer_runtimes + } + } + + generated_file("${target_name}__library_list") { + forward_variables_from(invoker, [ "deps" ]) + if (!defined(deps)) { + deps = [] + } + deps += [ ":${_sanitizer_runtimes_target_name}" ] + output_conversion = "json" + outputs = [ _libraries_list ] + data_keys = [ "shared_libraries" ] + walk_keys = [ "shared_libraries_barrier" ] + rebase = root_build_dir + } + + copy_ex(target_name) { + inputs = [ + _libraries_list, + invoker.binary, + ] + + dest = invoker.dist_dir + data = [ "${invoker.dist_dir}/" ] + + _rebased_libraries_list = rebase_path(_libraries_list, root_build_dir) + _rebased_binaries_list = rebase_path([ invoker.binary ], root_build_dir) + args = [ + "--clear", + "--files=@FileArg($_rebased_libraries_list)", + "--files=$_rebased_binaries_list", + ] + if (defined(invoker.extra_files)) { + _rebased_extra_files = rebase_path(invoker.extra_files, root_build_dir) + args += [ "--files=$_rebased_extra_files" ] + } + + _depfile = "$target_gen_dir/$target_name.d" + _stamp_file = "$target_gen_dir/$target_name.stamp" + outputs = [ _stamp_file ] + args += [ + "--depfile", + rebase_path(_depfile, root_build_dir), + "--stamp", + rebase_path(_stamp_file, root_build_dir), + ] + + deps = [ ":${target_name}__library_list" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + } +} + +if (enable_java_templates) { + if (is_android) { + import("//build/config/android/internal_rules.gni") + } + + # JNI target implementation. See generate_jni or generate_jar_jni for usage. + template("generate_jni_impl") { + _prev_jni_output_dir = "$target_gen_dir/$target_name" + _subdir = rebase_path(target_gen_dir, root_gen_dir) + _jni_output_dir = "$jni_headers_dir/$_subdir/$target_name" + if (defined(invoker.jni_generator_include)) { + _jni_generator_include = invoker.jni_generator_include + _jni_generator_include_deps = [] + } else { + _jni_generator_include = + "//base/android/jni_generator/jni_generator_helper.h" + _jni_generator_include_deps = [ + # Using //base/android/jni_generator/jni_generator_helper.h introduces + # a dependency on buildflags targets indirectly through + # base/android/jni_android.h, which is part of the //base target. + # This can't depend directly on //base without causing a dependency + # cycle, though. + "//base:debugging_buildflags", + "//base:logging_buildflags", + "//build:chromeos_buildflags", + ] + } + + action_with_pydeps(target_name) { + # The sources aren't compiled so don't check their dependencies. + check_includes = false + script = "//base/android/jni_generator/jni_generator.py" + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "public_deps", + ]) + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += _jni_generator_include_deps + + inputs = [] + args = [ + "--ptr_type=long", + + # TODO(agrieve): --prev_output_dir used only to make incremental builds + # work. Remove --prev_output_dir at some point after 2022. + "--prev_output_dir", + rebase_path(_prev_jni_output_dir, root_build_dir), + "--output_dir", + rebase_path(_jni_output_dir, root_build_dir), + "--includes", + rebase_path(_jni_generator_include, _jni_output_dir), + ] + + if (defined(invoker.classes)) { + if (is_robolectric) { + not_needed(invoker, [ "jar_file" ]) + } else { + if (defined(invoker.jar_file)) { + _jar_file = invoker.jar_file + } else { + _jar_file = android_sdk_jar + } + inputs += [ _jar_file ] + args += [ + "--jar_file", + rebase_path(_jar_file, root_build_dir), + ] + } + _input_args = invoker.classes + _input_names = invoker.classes + if (defined(invoker.always_mangle) && invoker.always_mangle) { + args += [ "--always_mangle" ] + } + if (defined(invoker.unchecked_exceptions) && + invoker.unchecked_exceptions) { + args += [ "--unchecked_exceptions" ] + } + } else { + assert(defined(invoker.sources)) + inputs += invoker.sources + _input_args = rebase_path(invoker.sources, root_build_dir) + _input_names = invoker.sources + if (!is_robolectric && use_hashed_jni_names) { + args += [ "--use_proxy_hash" ] + } + + if (!is_robolectric && defined(invoker.enable_jni_multiplexing) && + invoker.enable_jni_multiplexing) { + args += [ "--enable_jni_multiplexing" ] + } + if (defined(invoker.namespace)) { + args += [ "-n ${invoker.namespace}" ] + } + } + if (defined(invoker.split_name)) { + args += [ "--split_name=${invoker.split_name}" ] + } + + outputs = [] + foreach(_name, _input_names) { + _name = get_path_info(_name, "name") + "_jni.h" + outputs += [ "$_jni_output_dir/$_name" ] + + # Avoid passing GN lists because not all webrtc embedders use //build. + args += [ + "--output_name", + _name, + ] + } + + foreach(_input, _input_args) { + args += [ "--input_file=$_input" ] + } + + if (enable_profiling) { + args += [ "--enable_profiling" ] + } + if (current_toolchain != default_toolchain && target_os == "android") { + # Rather than regenerating .h files in secondary toolchains, re-use the + # ones from the primary toolchain by depending on it and adding the + # root gen directory to the include paths. + # https://crbug.com/1369398 + inputs = [] + outputs = [] + _stamp = "$target_gen_dir/$target_name.stamp" + outputs = [ _stamp ] + + # Since we used to generate the .h files rather than delegate, the + # script will delete all .h files it finds in --prev_output_dir. + # TODO(agrieve): --prev_output_dir used only to make incremental builds + # work. Convert to group() target at some point after 2022. + args += [ + "--stamp", + rebase_path(_stamp, root_build_dir), + ] + deps = [] + public_deps = [] + public_deps = [ ":$target_name($default_toolchain)" ] + public_configs = + [ "//build/config/android:jni_include_dir($default_toolchain)" ] + } else { + public_configs = [ "//build/config/android:jni_include_dir" ] + if (defined(visibility)) { + # Allow dependency on ourselves from secondary toolchain. + visibility += [ ":$target_name" ] + } + } + } + } + + # Declare a jni target + # + # This target generates the native jni bindings for a set of .java files. + # + # See base/android/jni_generator/jni_generator.py for more info about the + # format of generating JNI bindings. + # + # Variables + # sources: list of .java files to generate jni for + # namespace: Specify the namespace for the generated header file. + # deps, public_deps: As normal + # + # Example + # # Target located in base/BUILD.gn. + # generate_jni("foo_jni") { + # # Generates gen/base/foo_jni/Foo_jni.h + # # To use: #include "base/foo_jni/Foo_jni.h" + # sources = [ + # "android/java/src/org/chromium/foo/Foo.java", + # ..., + # ] + # } + template("generate_jni") { + generate_jni_impl(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + } + } + + # Declare a jni target for a prebuilt jar + # + # This target generates the native jni bindings for a set of classes in a .jar. + # + # See base/android/jni_generator/jni_generator.py for more info about the + # format of generating JNI bindings. + # + # Variables + # classes: list of .class files in the jar to generate jni for. These should + # include the full path to the .class file. + # jar_file: the path to the .jar. If not provided, will default to the sdk's + # android.jar + # always_mangle: Mangle all generated method names. By default, the script + # only mangles methods that cause ambiguity due to method overload. + # unchecked_exceptions: Don't CHECK() for exceptions in generated stubs. + # This behaves as if every method had @CalledByNativeUnchecked. + # deps, public_deps: As normal + # + # Example + # # Target located in base/BUILD.gn. + # generate_jar_jni("foo_jni") { + # # Generates gen/base/foo_jni/Runnable_jni.h + # # To use: #include "base/foo_jni/Runnable_jni.h" + # classes = [ + # "android/view/Foo.class", + # ] + # } + template("generate_jar_jni") { + generate_jni_impl(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + } + } +} # enable_java_templates + +# non-robolectric things +if (enable_java_templates && is_android) { + # Declare a jni registration target. + # + # This target generates a srcjar containing a copy of GEN_JNI.java, which has + # the native methods of all dependent java files. It can also create a .h file + # for use with manual JNI registration. + # + # The script does not scan any generated sources (those within .srcjars, or + # within root_build_dir). This could be fixed by adding deps & logic to scan + # .srcjars, but isn't currently needed. + # + # See base/android/jni_generator/jni_registration_generator.py for more info + # about the format of the header file. + # + # Variables + # targets: List of .build_config.json supported targets to provide java sources. + # manual_jni_registration: Manually do JNI registration - required for feature + # splits which provide their own native library. (optional) + # file_exclusions: List of .java files that should be skipped. (optional) + # namespace: Registration functions will be wrapped into this. (optional) + # require_native_mocks: Enforce that any native calls using + # org.chromium.base.annotations.NativeMethods must have a mock set + # (optional). + # enable_native_mocks: Allow native calls using + # org.chromium.base.annotations.NativeMethods to be mocked in tests + # (optional). + # no_transitive_deps: Generate registration for only the Java source in the + # specified target(s). This is useful for generating registration for + # feature modules, without including base module dependencies. + # + # Example + # generate_jni_registration("chrome_jni_registration") { + # targets = [ ":chrome_public_apk" ] + # manual_jni_registration = false + # file_exclusions = [ + # "//path/to/Exception.java", + # ] + # } + template("generate_jni_registration") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + script = "//base/android/jni_generator/jni_registration_generator.py" + inputs = [] + deps = [] + _srcjar_output = "$target_gen_dir/$target_name.srcjar" + outputs = [ _srcjar_output ] + depfile = "$target_gen_dir/$target_name.d" + + args = [ + "--srcjar-path", + rebase_path(_srcjar_output, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + foreach(_target, invoker.targets) { + deps += [ "${_target}$build_config_target_suffix($default_toolchain)" ] + _build_config = + get_label_info("${_target}($default_toolchain)", "target_gen_dir") + + "/" + get_label_info("${_target}($default_toolchain)", "name") + + ".build_config.json" + _rebased_build_config = rebase_path(_build_config, root_build_dir) + inputs += [ _build_config ] + + if (defined(invoker.no_transitive_deps) && invoker.no_transitive_deps) { + args += [ "--sources-files=@FileArg($_rebased_build_config:deps_info:target_sources_file)" ] + } else { + args += [ + # This is a list of .sources files. + "--sources-files=@FileArg($_rebased_build_config:deps_info:jni_all_source)", + ] + } + } + if (defined(invoker.include_testonly)) { + _include_testonly = invoker.include_testonly + } else { + _include_testonly = defined(testonly) && testonly + } + if (_include_testonly) { + args += [ "--include-test-only" ] + } + + if (use_hashed_jni_names) { + args += [ "--use-proxy-hash" ] + } + + if (defined(invoker.enable_native_mocks) && invoker.enable_native_mocks) { + args += [ "--enable-proxy-mocks" ] + + if (defined(invoker.require_native_mocks) && + invoker.require_native_mocks) { + args += [ "--require-mocks" ] + } + } + + _manual_jni_registration = defined(invoker.manual_jni_registration) && + invoker.manual_jni_registration + _enable_jni_multiplexing = defined(invoker.enable_jni_multiplexing) && + invoker.enable_jni_multiplexing + if (_manual_jni_registration) { + args += [ "--manual-jni-registration" ] + } + if (_enable_jni_multiplexing) { + args += [ "--enable-jni-multiplexing" ] + } + + if ((!defined(invoker.prevent_header_output) || + !invoker.prevent_header_output) && + (_manual_jni_registration || _enable_jni_multiplexing)) { + assert(current_toolchain == default_toolchain, + "We do not need >1 toolchain copies of the same header.") + + _subdir = rebase_path(target_gen_dir, root_gen_dir) + _jni_header_output = + "$jni_headers_dir/$_subdir/${target_name}_generated.h" + outputs += [ _jni_header_output ] + args += [ + "--header-path", + rebase_path(_jni_header_output, root_build_dir), + ] + + # This gives targets depending on this registration access to our generated header. + public_configs = [ "//build/config/android:jni_include_dir" ] + } + + if (defined(invoker.file_exclusions)) { + _rebase_file_exclusions = + rebase_path(invoker.file_exclusions, root_build_dir) + args += [ "--file-exclusions=$_rebase_file_exclusions" ] + } + + if (defined(invoker.namespace)) { + args += [ "--namespace=${invoker.namespace}" ] + } + + if (defined(invoker.module_name)) { + args += [ "--module-name=${invoker.module_name}" ] + } + } + } + + # Declare a target for c-preprocessor-generated java files + # + # NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum + # rule instead. + # + # This target generates java files using the host C pre-processor. Each file in + # sources will be compiled using the C pre-processor. If include_path is + # specified, it will be passed (with --I) to the pre-processor. + # + # This target will create a single .srcjar. Adding this target to an + # android_library target's srcjar_deps will make the generated java files be + # included in that library's final outputs. + # + # Variables + # sources: list of files to be processed by the C pre-processor. For each + # file in sources, there will be one .java file in the final .srcjar. For a + # file named FooBar.template, a java file will be created with name + # FooBar.java. + # inputs: additional compile-time dependencies. Any files + # `#include`-ed in the templates should be listed here. + # defines: List of -D arguments for the preprocessor. + # + # Example + # java_cpp_template("foo_generated_enum") { + # sources = [ + # "android/java/templates/Foo.template", + # ] + # inputs = [ + # "android/java/templates/native_foo_header.h", + # ] + # } + template("java_cpp_template") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "inputs", + "public_deps", + "sources", + "testonly", + "visibility", + ]) + script = "//build/android/gyp/gcc_preprocess.py" + outputs = [ "$target_gen_dir/$target_name.srcjar" ] + + _include_dirs = [ + "//", + root_gen_dir, + ] + _rebased_include_dirs = rebase_path(_include_dirs, root_build_dir) + args = [ + "--include-dirs=$_rebased_include_dirs", + "--output", + rebase_path(outputs[0], root_build_dir), + ] + if (defined(invoker.defines)) { + foreach(_define, invoker.defines) { + args += [ + "--define", + _define, + ] + } + } + args += rebase_path(sources, root_build_dir) + } + } + + # Declare a target for generating Java classes from C++ enums. + # + # This target generates Java files from C++ enums using a script. + # + # This target will create a single .srcjar. Adding this target to an + # android_library target's srcjar_deps will make the generated java files be + # included in that library's final outputs. + # + # Variables + # sources: list of files to be processed by the script. For each annotated + # enum contained in the sources files the script will generate a .java + # file with the same name as the name of the enum. + # + # Example + # java_cpp_enum("foo_generated_enum") { + # sources = [ + # "src/native_foo_header.h", + # ] + # } + template("java_cpp_enum") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "sources" ]) + + # The sources aren't compiled so don't check their dependencies. + check_includes = false + script = "//build/android/gyp/java_cpp_enum.py" + + _srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir) + _rebased_sources = rebase_path(invoker.sources, root_build_dir) + + args = [ "--srcjar=$_rebased_srcjar_path" ] + _rebased_sources + outputs = [ _srcjar_path ] + } + } + + # Declare a target for generating Java classes with string constants matching + # those found in C++ files using a python script. + # + # This target will create a single .srcjar. Adding this target to an + # android_library target's srcjar_deps will make the generated java files be + # included in that library's final outputs. + # + # Variables + # sources: list of files to be processed by the script. For each string + # constant in the source files, the script will add a corresponding + # Java string to the specified template file. + # Example + # java_cpp_strings("foo_switches") { + # sources = [ + # "src/foo_switches.cc", + # ] + # template = "src/templates/FooSwitches.java.tmpl + # } + # + # foo_switches.cc: + # + # // A switch. + # const char kASwitch = "a-switch"; + # + # FooSwitches.java.tmpl + # + # // Copyright {YEAR} The Chromium Authors. All rights reserved. + # // Use of this source code is governed by a BSD-style license that can be + # // found in the LICENSE file. + # + # // This file is autogenerated by + # // {SCRIPT_NAME} + # // From + # // {SOURCE_PATH}, and + # // {TEMPLATE_PATH} + # + # package my.java.package; + # + # public abstract class FooSwitches {{ + # // ...snip... + # {NATIVE_STRINGS} + # // ...snip... + # }} + # + # result: + # A FooSwitches.java file, defining a class named FooSwitches in the package + # my.java.package. + template("java_cpp_strings") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "sources" ]) + + # The sources aren't compiled so don't check their dependencies. + check_includes = false + script = "//build/android/gyp/java_cpp_strings.py" + + _srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir) + _rebased_sources = rebase_path(invoker.sources, root_build_dir) + _rebased_template = rebase_path(invoker.template, root_build_dir) + + args = [ + "--srcjar=$_rebased_srcjar_path", + "--template=$_rebased_template", + ] + args += _rebased_sources + sources += [ invoker.template ] + + outputs = [ _srcjar_path ] + } + } + + # Declare a target for generating Java classes with string constants matching + # those found in C++ base::Feature declarations, using a python script. + # + # This target will create a single .srcjar. Adding this target to an + # android_library target's srcjar_deps will make the generated java files be + # included in that library's final outputs. + # + # Variables + # sources: list of files to be processed by the script. For each + # base::Feature in the source files, the script will add a + # corresponding Java string for that feature's name to the + # specified template file. + # Example + # java_cpp_features("foo_features") { + # sources = [ + # "src/foo_features.cc", + # ] + # template = "src/templates/FooFeatures.java.tmpl + # } + # + # foo_features.cc: + # + # // A feature. + # BASE_FEATURE(kSomeFeature, "SomeFeature", + # base::FEATURE_DISABLED_BY_DEFAULT); + # + # FooFeatures.java.tmpl + # + # // Copyright $YEAR The Chromium Authors. All rights reserved. + # // Use of this source code is governed by a BSD-style license that can be + # // found in the LICENSE file. + # + # package my.java.package; + # + # public final class FooFeatures {{ + # // ...snip... + # {NATIVE_STRINGS} + # // ...snip... + # // Do not instantiate this class. + # private FooFeatures() {{}} + # }} + # + # result: + # A FooFeatures.java file, defining a class named FooFeatures in the package + # my.java.package. + template("java_cpp_features") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "sources", + ]) + + # The sources aren't compiled so don't check their dependencies. + check_includes = false + script = "//build/android/gyp/java_cpp_features.py" + + _srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir) + _rebased_sources = rebase_path(invoker.sources, root_build_dir) + _rebased_template = rebase_path(invoker.template, root_build_dir) + + args = [ + "--srcjar=$_rebased_srcjar_path", + "--template=$_rebased_template", + ] + args += _rebased_sources + sources += [ invoker.template ] + + outputs = [ _srcjar_path ] + } + } + + # Declare a target for processing a Jinja template. + # + # Variables + # input: The template file to be processed. + # includes: List of files {% include %}'ed by input. + # output: Where to save the result. + # variables: (Optional) A list of variables to make available to the template + # processing environment, e.g. ["name=foo", "color=red"]. + # + # Example + # jinja_template("chrome_public_manifest") { + # input = "java/AndroidManifest.xml" + # output = "$target_gen_dir/AndroidManifest.xml" + # } + template("jinja_template") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + inputs = [ invoker.input ] + if (defined(invoker.includes)) { + inputs += invoker.includes + } + script = "//build/android/gyp/jinja_template.py" + + outputs = [ invoker.output ] + + args = [ + "--loader-base-dir", + rebase_path("//", root_build_dir), + "--inputs", + rebase_path(invoker.input, root_build_dir), + "--output", + rebase_path(invoker.output, root_build_dir), + "--check-includes", + ] + if (defined(invoker.includes)) { + _rebased_includes = rebase_path(invoker.includes, root_build_dir) + args += [ "--includes=$_rebased_includes" ] + } + if (defined(invoker.variables)) { + args += [ "--variables=${invoker.variables}" ] + } + } + } + + # Writes native libraries to a NativeLibaries.java file. + # + # This target will create a single .srcjar. Adding this target to an + # android_library target's srcjar_deps will make the generated java files be + # included in that library's final outputs. + # + # Variables: + # native_libraries_list_file: (Optional) Path to file listing all native + # libraries to write. + # version_number: (Optional) String of expected version of 'main' native + # library. + # enable_chromium_linker: (Optional) Whether to use the Chromium linker. + # use_final_fields: True to use final fields. When false, all other + # variables must not be set. + template("write_native_libraries_java") { + _native_libraries_file = "$target_gen_dir/$target_name.srcjar" + if (current_cpu == "arm" || current_cpu == "arm64") { + _cpu_family = "CPU_FAMILY_ARM" + } else if (current_cpu == "x86" || current_cpu == "x64") { + _cpu_family = "CPU_FAMILY_X86" + } else if (current_cpu == "mipsel" || current_cpu == "mips64el") { + _cpu_family = "CPU_FAMILY_MIPS" + } else { + assert(false, "Unsupported CPU family") + } + + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + script = "//build/android/gyp/write_native_libraries_java.py" + outputs = [ _native_libraries_file ] + args = [ + "--output", + rebase_path(_native_libraries_file, root_build_dir), + "--cpu-family", + _cpu_family, + ] + if (invoker.use_final_fields) { + # Write native_libraries_list_file via depfile rather than specifyin it + # as a dep in order allow R8 to run in parallel with native compilation. + args += [ "--final" ] + if (defined(invoker.native_libraries_list_file)) { + depfile = "$target_gen_dir/$target_name.d" + args += [ + "--native-libraries-list", + rebase_path(invoker.native_libraries_list_file, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + } + if (defined(invoker.main_component_library)) { + args += [ + "--main-component-library", + invoker.main_component_library, + ] + } + if (defined(invoker.enable_chromium_linker) && + invoker.enable_chromium_linker) { + args += [ "--enable-chromium-linker" ] + } + } + } + } + + # Declare a target for a set of Android resources generated at build + # time and stored in a single zip archive. The content of the archive + # should match the layout of a regular Android res/ folder (but the + # archive should not include a top-level res/ directory). + # + # Note that there is no associated .srcjar, R.txt or package name + # associated with this target. + # + # Variables: + # generated_resources_zip: Generated zip archive path. + # generating_target: Name of the target generating + # generated_resources_zip. This rule will check that it is part + # of its outputs. + # deps: Specifies the dependencies of this target. Any Android resources + # listed here will be also be included *after* this one when compiling + # all resources for a final apk or junit binary. This is useful to + # ensure that the resources of the current target override those of the + # dependency as well (and would not work if you have these deps to the + # generating target's dependencies). + # + # Example + # _zip_archive = "$target_gen_dir/${target_name}.resources_zip" + # + # action("my_resources__create_zip") { + # _depfile = "$target_gen_dir/${target_name}.d" + # script = "//build/path/to/create_my_resources_zip.py" + # args = [ + # "--depfile", rebase_path(_depfile, root_build_dir), + # "--output-zip", rebase_path(_zip_archive, root_build_dir), + # ] + # inputs = [] + # outputs = _zip_archive + # depfile = _depfile + # } + # + # android_generated_resources("my_resources") { + # generated_resources_zip = _zip_archive + # generating_target = ":my_resources__create_zip" + # } + # + template("android_generated_resources") { + forward_variables_from(invoker, [ "testonly" ]) + _build_config = "$target_gen_dir/${target_name}.build_config.json" + _rtxt_out_path = "$target_gen_dir/${target_name}.R.txt" + write_build_config("$target_name$build_config_target_suffix") { + forward_variables_from(invoker, [ "resource_overlay" ]) + + build_config = _build_config + resources_zip = invoker.generated_resources_zip + type = "android_resources" + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + r_text = _rtxt_out_path + } + action_with_pydeps(target_name) { + forward_variables_from(invoker, [ "visibility" ]) + public_deps = [ + ":$target_name$build_config_target_suffix", + invoker.generating_target, + ] + inputs = [ invoker.generated_resources_zip ] + outputs = [ _rtxt_out_path ] + script = "//build/android/gyp/create_r_txt.py" + args = [ + "--resources-zip-path", + rebase_path(invoker.generated_resources_zip, root_build_dir), + "--rtxt-path", + rebase_path(_rtxt_out_path, root_build_dir), + ] + } + } + + # Declare a target for processing Android resources as Jinja templates. + # + # This takes an Android resource directory where each resource is a Jinja + # template, processes each template, then packages the results in a zip file + # which can be consumed by an android resources, library, or apk target. + # + # If this target is included in the deps of an android resources/library/apk, + # the resources will be included with that target. + # + # Variables + # resources: The list of resources files to process. + # res_dir: The resource directory containing the resources. + # variables: (Optional) A list of variables to make available to the template + # processing environment, e.g. ["name=foo", "color=red"]. + # + # Example + # jinja_template_resources("chrome_public_template_resources") { + # res_dir = "res_template" + # resources = ["res_template/xml/syncable.xml"] + # variables = ["color=red"] + # } + template("jinja_template_resources") { + _resources_zip = "$target_out_dir/${target_name}.resources.zip" + _generating_target_name = "${target_name}__template" + + action_with_pydeps(_generating_target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + inputs = invoker.resources + script = "//build/android/gyp/jinja_template.py" + + outputs = [ _resources_zip ] + + _rebased_resources = rebase_path(invoker.resources, root_build_dir) + args = [ + "--inputs=${_rebased_resources}", + "--inputs-base-dir", + rebase_path(invoker.res_dir, root_build_dir), + "--outputs-zip", + rebase_path(_resources_zip, root_build_dir), + "--check-includes", + ] + if (defined(invoker.variables)) { + variables = invoker.variables + args += [ "--variables=${variables}" ] + } + } + + android_generated_resources(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "resource_overlay", + ]) + generating_target = ":$_generating_target_name" + generated_resources_zip = _resources_zip + } + } + + # Declare a prebuilt android native library. + # + # This takes a base directory and library name and then looks for the library + # in /$android_app_abi/. + # + # If you depend on this target, the library is stripped and output to the + # same locations non-prebuilt libraries are output. + # + # Variables + # base_dir: Directory where all ABIs of the library live. + # library_name: Name of the library .so file. + # + # Example + # android_native_prebuilt("elements_native") { + # base_dir = "//third_party/elements" + # lib_name = "elements.so" + # } + template("android_native_prebuilt") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + script = "//build/android/gyp/process_native_prebuilt.py" + _lib_path = "${invoker.base_dir}/$android_app_abi/${invoker.lib_name}" + _stripped_output_path = "$root_out_dir/${invoker.lib_name}" + _unstripped_output_path = + "$root_out_dir/lib.unstripped/${invoker.lib_name}" + inputs = [ _lib_path ] + outputs = [ + _stripped_output_path, + _unstripped_output_path, + ] + + # Add unstripped output to runtime deps for use by bots during stacktrace + # symbolization. + data = [ _unstripped_output_path ] + + _rebased_lib_path = rebase_path(_lib_path, root_build_dir) + _rebased_stripped_ouput_path = + rebase_path(_stripped_output_path, root_build_dir) + _rebased_unstripped_ouput_path = + rebase_path(_unstripped_output_path, root_build_dir) + _strip_tool_path = + rebase_path("//buildtools/third_party/eu-strip/bin/eu-strip", + root_build_dir) + + args = [ + "--strip-path=$_strip_tool_path", + "--input-path=$_rebased_lib_path", + "--stripped-output-path=$_rebased_stripped_ouput_path", + "--unstripped-output-path=$_rebased_unstripped_ouput_path", + ] + } + } + + # Declare an Android resources target + # + # This creates a resources zip file that will be used when building an Android + # library or apk and included into a final apk. + # + # To include these resources in a library/apk, this target should be listed in + # the library's deps. A library/apk will also include any resources used by its + # own dependencies. + # + # Variables + # sources: List of resource files for this target. + # deps: Specifies the dependencies of this target. Any Android resources + # listed in deps will be included by libraries/apks that depend on this + # target. + # alternative_android_sdk_dep: Optional. Alternative Android system + # android java target to use. + # android_manifest: AndroidManifest.xml for this target (optional). Will be + # merged into apks that directly or indirectly depend on this target. + # android_manifest_dep: Target that generates AndroidManifest (if applicable) + # custom_package: java package for generated .java files. + # allow_missing_resources: Do not fail if a resource exists in a directory + # but is not listed in sources. + # shared_resources: If true make a resource package that can be loaded by a + # different application at runtime to access the package's resources. + # resource_overlay: Whether the resources in 'sources' should override + # resources with the same name. Does not affect the behaviour of any + # android_resources() deps of this target. If a target with + # resource_overlay=true depends on another target with + # resource_overlay=true the target with the dependency overrides the + # other. + # r_text_file: (optional) path to pre-generated R.txt to be used when + # generating R.java instead of resource-based aapt-generated one. + # recursive_resource_deps: (optional) whether deps should be walked + # recursively to find resource deps. + # + # Example: + # android_resources("foo_resources") { + # deps = [":foo_strings_grd"] + # sources = [ + # "res/drawable/foo1.xml", + # "res/drawable/foo2.xml", + # ] + # custom_package = "org.chromium.foo" + # } + # + # android_resources("foo_resources_overrides") { + # deps = [":foo_resources"] + # sources = [ + # "res_overrides/drawable/foo1.xml", + # "res_overrides/drawable/foo2.xml", + # ] + # } + template("android_resources") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _base_path = "$target_gen_dir/$target_name" + if (defined(invoker.v14_skip)) { + not_needed(invoker, [ "v14_skip" ]) + } + + _res_sources_path = "$target_gen_dir/${invoker.target_name}.res.sources" + + _resources_zip = "$target_out_dir/$target_name.resources.zip" + _r_text_out_path = _base_path + "_R.txt" + _build_config = _base_path + ".build_config.json" + _build_config_target_name = "$target_name$build_config_target_suffix" + + _deps = [] + if (defined(invoker.deps)) { + _deps += invoker.deps + } + + if (defined(invoker.alternative_android_sdk_dep)) { + _android_sdk_dep = invoker.alternative_android_sdk_dep + } else { + _android_sdk_dep = default_android_sdk_dep + } + + _resource_files = [] + if (defined(invoker.sources)) { + _resource_files += invoker.sources + } + + _rebased_resource_files = rebase_path(_resource_files, root_build_dir) + write_file(_res_sources_path, _rebased_resource_files) + + # This is necessary so we only lint chromium resources. + if (defined(invoker.chromium_code)) { + _chromium_code = invoker.chromium_code + } else { + # Default based on whether target is in third_party. + _chromium_code = + filter_exclude([ get_label_info(":$target_name", "dir") ], + [ "*\bthird_party\b*" ]) != [] + } + + write_build_config(_build_config_target_name) { + type = "android_resources" + build_config = _build_config + resources_zip = _resources_zip + res_sources_path = _res_sources_path + chromium_code = _chromium_code + + forward_variables_from(invoker, + [ + "android_manifest", + "android_manifest_dep", + "custom_package", + "mergeable_android_manifests", + "resource_overlay", + "recursive_resource_deps", + ]) + + r_text = _r_text_out_path + possible_config_deps = _deps + [ _android_sdk_dep ] + + # Always merge manifests from resources. + # * Might want to change this at some point for consistency and clarity, + # but keeping for backwards-compatibility. + if (!defined(mergeable_android_manifests) && defined(android_manifest)) { + mergeable_android_manifests = [ android_manifest ] + } + } + + prepare_resources(target_name) { + forward_variables_from(invoker, + [ + "allow_missing_resources", + "public_deps", + "strip_drawables", + "visibility", + ]) + _lib_deps = filter_exclude(filter_include(_deps, java_library_patterns), + java_resource_patterns) + if (defined(public_deps)) { + # Since java library targets depend directly on sub-targets rather than + # top-level targets, public_deps are not properly propagated, at least + # in terms of the "did you depend on the target that generates your + # inputs" GN check. + assert(filter_include(public_deps, java_target_patterns) == [], + "Java targets should use deps, not public_deps. " + + "target=${target_name}, public_deps=${public_deps}") + } + + # Depend on non-library deps and on __assetres subtargets of library deps. + deps = filter_exclude(_deps, _lib_deps) + [ _android_sdk_dep ] + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + deps += [ "${_lib_dep}__assetres" ] + } + + res_sources_path = _res_sources_path + sources = _resource_files + + resources_zip = _resources_zip + r_text_out_path = _r_text_out_path + + if (defined(invoker.r_text_file)) { + r_text_in_path = invoker.r_text_file + } + } + } + + # Declare an Android assets target. + # + # Defines a set of files to include as assets in a dependent apk. + # + # To include these assets in an apk, this target should be listed in + # the apk's deps, or in the deps of a library target used by an apk. + # + # Variables + # deps: Specifies the dependencies of this target. Any Android assets + # listed in deps will be included by libraries/apks that depend on this + # target. + # sources: List of files to include as assets. + # renaming_sources: List of files to include as assets and be renamed. + # renaming_destinations: List of asset paths for files in renaming_sources. + # disable_compression: Whether to disable compression for files that are + # known to be compressable (default: false). + # treat_as_locale_paks: Causes base's BuildConfig.java to consider these + # assets to be locale paks. + # + # Example: + # android_assets("content_shell_assets") { + # deps = [ + # ":generates_foo", + # ":other_assets", + # ] + # sources = [ + # "//path/asset1.png", + # "//path/asset2.png", + # "$target_gen_dir/foo.dat", + # ] + # } + # + # android_assets("overriding_content_shell_assets") { + # deps = [ ":content_shell_assets" ] + # # Override foo.dat from content_shell_assets. + # sources = [ "//custom/foo.dat" ] + # renaming_sources = [ "//path/asset2.png" ] + # renaming_destinations = [ "renamed/asset2.png" ] + # } + template("android_assets") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _build_config = "$target_gen_dir/$target_name.build_config.json" + _build_config_target_name = "$target_name$build_config_target_suffix" + + _sources = [] + if (defined(invoker.sources)) { + _sources = invoker.sources + } + _renaming_sources = [] + if (defined(invoker.renaming_sources)) { + _renaming_sources = invoker.renaming_sources + } + write_build_config(_build_config_target_name) { + type = "android_assets" + build_config = _build_config + + forward_variables_from(invoker, + [ + "disable_compression", + "treat_as_locale_paks", + ]) + + if (defined(invoker.deps)) { + possible_config_deps = invoker.deps + } + + if (_sources != []) { + asset_sources = _sources + } + if (_renaming_sources != []) { + assert(defined(invoker.renaming_destinations)) + _source_count = 0 + foreach(_, _renaming_sources) { + _source_count += 1 + } + _dest_count = 0 + foreach(_, invoker.renaming_destinations) { + _dest_count += 1 + } + assert( + _source_count == _dest_count, + "android_assets() renaming_sources.length != renaming_destinations.length") + asset_renaming_sources = _renaming_sources + asset_renaming_destinations = invoker.renaming_destinations + } + } + + # Use an action in order to mark sources as "inputs" to a GN target so that + # GN will fail if the appropriate deps do not exist, and so that "gn refs" + # will know about the sources. We do not add these inputs & deps to the + # __build_config target because we want building .build_config.json files + # to be fast (and because write_build_config.py does not need the files to + # exist). + _all_sources = _sources + _renaming_sources + if (_all_sources != []) { + action(target_name) { + forward_variables_from(invoker, [ "deps" ]) + public_deps = [ ":$_build_config_target_name" ] + + script = "//build/android/gyp/validate_inputs.py" + inputs = _all_sources + outputs = [ "$target_gen_dir/$target_name.stamp" ] + args = [ + "--stamp", + rebase_path(outputs[0], root_build_dir), + ] + rebase_path(_all_sources, root_build_dir) + } + } else { + group(target_name) { + forward_variables_from(invoker, [ "deps" ]) + public_deps = [ ":$_build_config_target_name" ] + } + } + } + + # Declare a group() that supports forwarding java dependency information. + # + # Example + # java_group("conditional_deps") { + # if (enable_foo) { + # deps = [":foo_java"] + # } + # } + template("java_group") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _build_config_vars = [ + "input_jars_paths", + "preferred_dep", + "mergeable_android_manifests", + "proguard_configs", + "requires_android", + ] + _invoker_deps = [] + if (defined(invoker.deps)) { + _invoker_deps += invoker.deps + } + if (defined(invoker.public_deps)) { + _invoker_deps += invoker.public_deps + } + write_build_config("$target_name$build_config_target_suffix") { + forward_variables_from(invoker, _build_config_vars) + type = "group" + build_config = "$target_gen_dir/${invoker.target_name}.build_config.json" + supports_android = true + possible_config_deps = _invoker_deps + } + + _assetres_deps = filter_include(_invoker_deps, java_resource_patterns) + _invoker_deps_minus_assetres = filter_exclude(_invoker_deps, _assetres_deps) + _lib_deps = + filter_include(_invoker_deps_minus_assetres, java_library_patterns) + + _expanded_lib_deps = [] + foreach(_lib_dep, _lib_deps) { + _expanded_lib_deps += [ get_label_info(_lib_dep, "label_no_toolchain") ] + } + foreach(_group_name, + [ + "assetres", + "header", + "host", + "validate", + ]) { + group("${target_name}__$_group_name") { + deps = [] + foreach(_lib_dep, _expanded_lib_deps) { + deps += [ "${_lib_dep}__${_group_name}" ] + } + if (_group_name == "assetres") { + deps += _assetres_deps + } + } + } + + group(target_name) { + forward_variables_from(invoker, + "*", + _build_config_vars + TESTONLY_AND_VISIBILITY) + if (!defined(deps)) { + deps = [] + } + deps += [ ":$target_name$build_config_target_suffix" ] + } + } + + # Declare a Java executable target + # + # Same as java_library, but also creates a wrapper script within + # $root_out_dir/bin. + # + # Supports all variables of java_library(), plus: + # main_class: When specified, a wrapper script is created within + # $root_build_dir/bin to launch the binary with the given class as the + # entrypoint. + # wrapper_script_name: Filename for the wrapper script (default=target_name) + # wrapper_script_args: List of additional arguments for the wrapper script. + # + # Example + # java_binary("foo") { + # sources = [ "org/chromium/foo/FooMain.java" ] + # deps = [ ":bar_java" ] + # main_class = "org.chromium.foo.FooMain" + # } + # + # java_binary("foo") { + # jar_path = "lib/prebuilt.jar" + # deps = [ ":bar_java" ] + # main_class = "org.chromium.foo.FooMain" + # } + template("java_binary") { + java_library_impl(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + type = "java_binary" + } + } + + # Declare a Java Annotation Processor. + # + # Supports all variables of java_library(), plus: + # jar_path: Path to a prebuilt jar. Mutually exclusive with sources & + # srcjar_deps. + # main_class: The fully-quallified class name of the processor's entry + # point. + # + # Example + # java_annotation_processor("foo_processor") { + # sources = [ "org/chromium/foo/FooProcessor.java" ] + # deps = [ ":bar_java" ] + # main_class = "org.chromium.foo.FooProcessor" + # } + # + # java_annotation_processor("foo_processor") { + # jar_path = "lib/prebuilt.jar" + # main_class = "org.chromium.foo.FooMain" + # } + # + # java_library("...") { + # annotation_processor_deps = [":foo_processor"] + # } + # + template("java_annotation_processor") { + java_library_impl(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + type = "java_annotation_processor" + } + } + + # Declare a Robolectric host side test binary. + # + # This target creates an executable from java code for running as a + # Robolectric test suite. The executable will be in the output folder's /bin/ + # directory. + # + # Supports all variables of java_binary(). + # + # Example + # robolectric_binary("foo") { + # sources = [ "org/chromium/foo/FooTest.java" ] + # deps = [ ":bar_java" ] + # } + template("robolectric_binary") { + testonly = true + + _main_class = "org.chromium.testing.local.JunitTestMain" + _build_config = "$target_gen_dir/$target_name.build_config.json" + _build_config_target_name = "$target_name$build_config_target_suffix" + _java_binary_target_name = "${target_name}__java_binary" + + _invoker_deps = [ + "//testing/android/junit:junit_test_support", + "//third_party/android_deps:robolectric_all_java", + "//third_party/junit", + "//third_party/mockito:mockito_java", + ] + if (defined(invoker.deps)) { + _invoker_deps += invoker.deps + } + _non_java_deps = filter_exclude(_invoker_deps, java_target_patterns) + _java_assetres_deps = [ ":${_java_binary_target_name}__assetres" ] + + if (defined(invoker.alternative_android_sdk_dep)) { + _android_sdk_dep = invoker.alternative_android_sdk_dep + } else { + _android_sdk_dep = default_android_sdk_dep + } + + # A package name or a manifest is required to have resources. This is + # added so that junit tests that do not care about the package name can + # still use resources without having to explicitly set one. + if (defined(invoker.package_name)) { + _package_name = invoker.package_name + } else if (!defined(invoker.android_manifest)) { + _package_name = "no.manifest.configured" + } + + _merge_manifest_target_name = "${target_name}__merge_manifests" + _android_manifest = + "$target_gen_dir/$target_name.AndroidManifest.merged.xml" + + merge_manifests(_merge_manifest_target_name) { + if (defined(invoker.android_manifest)) { + input_manifest = invoker.android_manifest + } else { + input_manifest = "//build/android/AndroidManifest.xml" + } + + if (defined(_package_name)) { + manifest_package = _package_name + } + output_manifest = _android_manifest + build_config = _build_config + min_sdk_version = default_min_sdk_version + target_sdk_version = android_sdk_version + deps = _non_java_deps + _java_assetres_deps + + [ ":$_build_config_target_name" ] + if (defined(invoker.android_manifest_dep)) { + deps += [ invoker.android_manifest_dep ] + } + } + + _resource_arsc_output = "${target_out_dir}/${target_name}.ap_" + _compile_resources_target_name = "${target_name}__compile_resources" + compile_resources(_compile_resources_target_name) { + deps = _non_java_deps + _java_assetres_deps + + [ ":$_merge_manifest_target_name" ] + android_sdk_dep = _android_sdk_dep + build_config_dep = ":$_build_config_target_name" + build_config = _build_config + if (defined(_package_name)) { + rename_manifest_package = _package_name + } + android_manifest = _android_manifest + arsc_output = _resource_arsc_output + min_sdk_version = default_min_sdk_version + target_sdk_version = android_sdk_version + } + + # apkbuilder step needed only to add android assets to the .ap_ file. + _apkbuilder_output = "${target_out_dir}/${target_name}.robo.ap_" + _apkbuilder_target_name = "${target_name}__apkbuilder" + package_apk("$_apkbuilder_target_name") { + build_config = _build_config + min_sdk_version = default_min_sdk_version + deps = _java_assetres_deps + [ + ":$_build_config_target_name", + ":$_compile_resources_target_name", + ] + + is_robolectric_apk = true + packaged_resources_path = _resource_arsc_output + output_apk_path = _apkbuilder_output + } + + # Some may want to disable this to remove dependency on //base + # (JNI generator is in //base). + _generate_final_jni = + !defined(invoker.generate_final_jni) || invoker.generate_final_jni + if (_generate_final_jni) { + _jni_srcjar_target_name = "${target_name}__final_jni" + _outer_target_name = target_name + generate_jni_registration(_jni_srcjar_target_name) { + enable_native_mocks = true + require_native_mocks = !defined(invoker.shared_libraries) + targets = [ ":$_outer_target_name" ] + } + + if (defined(invoker.shared_libraries)) { + foreach(_dep, invoker.shared_libraries) { + assert( + string_replace(_dep, robolectric_toolchain, "") != _dep, + "$target_name has shared_libraries with incorrect toolchain. " + + "Should contain (\$robolectric_toolchain) suffix: $_dep") + } + + # Write shared library output files of all dependencies to a file. Those + # will be the shared libraries packaged into the APK. + _shared_library_list_file = "$target_gen_dir/$target_name.native_libs" + generated_file("${target_name}__shared_library_list") { + deps = invoker.shared_libraries + outputs = [ _shared_library_list_file ] + data_keys = [ "shared_libraries" ] + walk_keys = [ "shared_libraries_barrier" ] + rebase = root_build_dir + } + } + _native_libraries_target_name = "${target_name}__native_libraries" + write_native_libraries_java(_native_libraries_target_name) { + enable_chromium_linker = false + use_final_fields = true + if (defined(_shared_library_list_file)) { + native_libraries_list_file = _shared_library_list_file + } + } + } + + java_library_impl(_java_binary_target_name) { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "deps", + "shared_libraries", + ]) + type = "robolectric_binary" + main_target_name = invoker.target_name + + deps = _invoker_deps + testonly = true + main_class = _main_class + wrapper_script_name = "helper/$main_target_name" + + # As of April 2021, adding -XX:TieredStopAtLevel=1 does not affect the + # wall time of a single robolectric shard, but does reduce the CPU time by + # 66%, which makes sharding more effective. + tiered_stop_at_level_one = true + + is_robolectric = true + include_android_sdk = true + alternative_android_sdk_dep = + "//third_party/robolectric:robolectric_test_sdk_java" + + if (!defined(srcjar_deps)) { + srcjar_deps = [] + } + srcjar_deps += [ + ":$_compile_resources_target_name", + "//build/android:build_config_for_testing_gen", + ] + if (_generate_final_jni) { + srcjar_deps += [ + ":$_jni_srcjar_target_name", + ":$_native_libraries_target_name", + ] + } + } + + test_runner_script(target_name) { + forward_variables_from(invoker, + [ + "assert_no_deps", + "visibility", + ]) + test_name = invoker.target_name + test_suite = invoker.target_name + test_type = "junit" + ignore_all_data_deps = true + resource_apk = _apkbuilder_output + deps = [ + ":$_apkbuilder_target_name", + ":$_build_config_target_name", + ":${_java_binary_target_name}__host", + ":${_java_binary_target_name}__java_binary_script", + ":${_java_binary_target_name}__validate", + "//third_party/robolectric:robolectric_runtime_jars", + ] + if (defined(invoker.shared_libraries)) { + data_deps = invoker.shared_libraries + } + + # Add non-libary deps, since the __host target does not depend on them. + deps += filter_exclude(_invoker_deps, java_library_patterns) + } + } + + # Declare a java library target + # + # Variables + # deps: Specifies the dependencies of this target. Java targets in this list + # will be added to the javac classpath. + # public_deps: Dependencies that this target exposes as part of its public API. + # public_deps do not need to be listed in both the 'deps' and 'public_deps' lists. + # annotation_processor_deps: List of java_annotation_processor targets to + # use when compiling. + # + # jar_path: Path to a prebuilt jar. Mutually exclusive with sources & + # srcjar_deps. + # sources: List of .java files included in this library. + # srcjar_deps: List of srcjar dependencies. The .java files in the srcjars + # will be added to sources and be included in this library. + # + # input_jars_paths: A list of paths to the jars that should be included + # in the compile-time classpath. These are in addition to library .jars + # that appear in deps. + # + # chromium_code: If true, extra analysis warning/errors will be enabled. + # enable_errorprone: If true, enables the errorprone compiler. + # skip_build_server: If true, avoids sending tasks to the build server. + # + # jar_excluded_patterns: List of patterns of .class files to exclude. + # jar_included_patterns: List of patterns of .class files to include. + # When omitted, all classes not matched by jar_excluded_patterns are + # included. When specified, all non-matching .class files are stripped. + # + # low_classpath_priority: Indicates that the library should be placed at the + # end of the classpath. The default classpath order has libraries ordered + # before the libraries that they depend on. 'low_classpath_priority' is + # useful when one java_library() overrides another via + # 'jar_excluded_patterns' and the overriding library does not depend on + # the overridee. + # + # output_name: File name for the output .jar (not including extension). + # Defaults to the input .jar file name. + # + # proguard_configs: List of proguard configs to use in final apk step for + # any apk that depends on this library. + # + # supports_android: If true, Android targets (android_library, android_apk) + # may depend on this target. Note: if true, this target must only use the + # subset of Java available on Android. + # bypass_platform_checks: Disables checks about cross-platform (Java/Android) + # dependencies for this target. This will allow depending on an + # android_library target, for example. + # enable_desugar: If false, disables desugaring of lambdas, etc. Use this + # only when you are sure the library does not require desugaring. E.g. + # to hide warnings shown from desugaring. + # + # additional_jar_files: Use to package additional files (Java resources) + # into the output jar. Pass a list of length-2 lists with format: + # [ [ path_to_file, path_to_put_in_jar ] ] + # + # javac_args: Additional arguments to pass to javac. + # errorprone_args: Additional arguments to pass to errorprone. + # + # data_deps, testonly + # + # Example + # java_library("foo_java") { + # sources = [ + # "org/chromium/foo/Foo.java", + # "org/chromium/foo/FooInterface.java", + # "org/chromium/foo/FooService.java", + # ] + # deps = [ + # ":bar_java" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # jar_excluded_patterns = [ + # "*/FooService.class", "org/chromium/FooService\$*.class" + # ] + # } + template("java_library") { + java_library_impl(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + type = "java_library" + } + } + + # Declare a java library target for a prebuilt jar + # + # Supports all variables of java_library(). + # + # Example + # java_prebuilt("foo_java") { + # jar_path = "foo.jar" + # deps = [ + # ":foo_resources", + # ":bar_java" + # ] + # } + template("java_prebuilt") { + java_library_impl(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + type = "java_library" + } + } + + # Combines all dependent .jar files into a single .jar file. + # + # Variables: + # output: Path to the output jar. + # use_interface_jars: Use all dependent interface .jars rather than + # implementation .jars. + # use_unprocessed_jars: Use unprocessed / undesugared .jars. + # direct_deps_only: Do not recurse on deps. + # jar_excluded_patterns (optional) + # List of globs for paths to exclude. + # + # Example + # dist_jar("lib_fatjar") { + # deps = [ ":my_java_lib" ] + # output = "$root_build_dir/MyLibrary.jar" + # } + template("dist_jar") { + # TODO(crbug.com/1042017): Remove. + not_needed(invoker, [ "no_build_hooks" ]) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _use_interface_jars = + defined(invoker.use_interface_jars) && invoker.use_interface_jars + _use_unprocessed_jars = + defined(invoker.use_unprocessed_jars) && invoker.use_unprocessed_jars + _direct_deps_only = + defined(invoker.direct_deps_only) && invoker.direct_deps_only + assert(!(_use_unprocessed_jars && _use_interface_jars), + "Cannot set both use_interface_jars and use_unprocessed_jars") + + _jar_target_name = target_name + + if (defined(invoker.build_config)) { + _build_config = invoker.build_config + _build_config_dep = invoker.build_config_dep + } else { + _build_config = "$target_gen_dir/$target_name.build_config.json" + _build_config_target_name = "$target_name$build_config_target_suffix" + _build_config_dep = ":$_build_config_target_name" + + write_build_config(_build_config_target_name) { + type = "dist_jar" + supports_android = + !defined(invoker.supports_android) || invoker.supports_android + requires_android = + defined(invoker.requires_android) && invoker.requires_android + possible_config_deps = invoker.deps + build_config = _build_config + } + } + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + action_with_pydeps(_jar_target_name) { + forward_variables_from(invoker, [ "data" ]) + script = "//build/android/gyp/zip.py" + depfile = "$target_gen_dir/$target_name.d" + deps = [ _build_config_dep ] + + if (_use_interface_jars) { + _lib_deps = + filter_exclude(filter_include(invoker.deps, java_library_patterns), + java_resource_patterns) + _other_deps = filter_exclude(invoker.deps, _lib_deps) + foreach(_lib_dep, _lib_deps) { + # Expand //foo/java -> //foo/java:java + _lib_dep = get_label_info(_lib_dep, "label_no_toolchain") + deps += [ "${_lib_dep}__header" ] + } + deps += _other_deps + } else { + deps += invoker.deps + } + + inputs = [ _build_config ] + + outputs = [ invoker.output ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(invoker.output, root_build_dir), + "--no-compress", + ] + + if (_direct_deps_only) { + if (_use_interface_jars) { + args += [ "--input-zips=@FileArg($_rebased_build_config:javac:interface_classpath)" ] + } else if (_use_unprocessed_jars) { + args += [ + "--input-zips=@FileArg($_rebased_build_config:javac:classpath)", + ] + } else { + assert( + false, + "direct_deps_only does not work without use_interface_jars or use_unprocessed_jars") + } + } else { + if (_use_interface_jars) { + args += [ "--input-zips=@FileArg($_rebased_build_config:dist_jar:all_interface_jars)" ] + } else if (_use_unprocessed_jars) { + args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:javac_full_classpath)" ] + } else { + args += [ "--input-zips=@FileArg($_rebased_build_config:deps_info:device_classpath)" ] + } + } + + _excludes = [] + if (defined(invoker.jar_excluded_patterns)) { + _excludes += invoker.jar_excluded_patterns + } + if (_use_interface_jars) { + # Turbine adds files like: META-INF/TRANSITIVE/.../Foo.class + # These confuse proguard: https://crbug.com/1081443 + _excludes += [ "META-INF/*" ] + } else { + # Manifest files will never be correct when merging jars. + _excludes += [ "META-INF/*.MF" ] + } + if (_excludes != []) { + args += [ "--input-zips-excluded-globs=$_excludes" ] + } + } + } + + # Combines all dependent .jar files into a single proguarded .dex file. + # + # Variables: + # output: Path to the output dex. + # proguard_enabled: Whether to enable R8. + # proguard_configs: List of proguard configs. + # proguard_enable_obfuscation: Whether to enable obfuscation (default=true). + # package_name: Used in the Proguard map ID. + # version_code: Used in the Proguard map ID. + # + # Example + # dist_dex("lib_fatjar") { + # deps = [ ":my_java_lib" ] + # output = "$root_build_dir/MyLibrary.jar" + # } + template("dist_dex") { + _deps = [ default_android_sdk_dep ] + if (defined(invoker.deps)) { + _deps += invoker.deps + } + + _build_config = "$target_gen_dir/$target_name.build_config.json" + _build_config_target_name = "$target_name$build_config_target_suffix" + + write_build_config(_build_config_target_name) { + type = "dist_jar" + forward_variables_from(invoker, + [ + "proguard_configs", + "proguard_enabled", + ]) + supports_android = true + requires_android = true + possible_config_deps = _deps + build_config = _build_config + } + + dex(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "data", + "data_deps", + "package_name", + "proguard_configs", + "proguard_enabled", + "proguard_enable_obfuscation", + "min_sdk_version", + "version_code", + ]) + deps = [ ":$_build_config_target_name" ] + _deps + build_config = _build_config + enable_multidex = false + output = invoker.output + if (defined(proguard_enabled) && proguard_enabled) { + # The individual dependencies would have caught real missing deps in + # their respective dex steps. False positives that were suppressed at + # per-target dex steps are emitted here since this is using jar files + # rather than dex files. + ignore_desugar_missing_deps = true + } else { + _rebased_build_config = rebase_path(_build_config, root_build_dir) + input_dex_filearg = + "@FileArg(${_rebased_build_config}:deps_info:all_dex_files)" + } + } + } + + # Creates an Android .aar library. + # + # Currently supports: + # * AndroidManifest.xml + # * classes.jar + # * jni/ + # * res/ + # * R.txt + # * proguard.txt + # Does not yet support: + # * public.txt + # * annotations.zip + # * assets/ + # See: https://developer.android.com/studio/projects/android-library.html#aar-contents + # + # Variables: + # output: Path to the output .aar. + # proguard_configs: List of proguard configs (optional). + # android_manifest: Path to AndroidManifest.xml (optional). + # native_libraries: list of native libraries (optional). + # direct_deps_only: Do not recurse on deps (optional, defaults false). + # jar_excluded_patterns: List of globs for paths to exclude (optional). + # jar_included_patterns: List of globs for paths to include (optional). + # generate_final_jni: If defined an true, generate the final + # `GEN_JNI.java` and include it in the output `.aar` (optional) + # + # Example + # dist_aar("my_aar") { + # deps = [ ":my_java_lib" ] + # output = "$root_build_dir/MyLibrary.aar" + # } + template("dist_aar") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _direct_deps_only = + defined(invoker.direct_deps_only) && invoker.direct_deps_only + + _deps = [] + + _generate_final_jni = + defined(invoker.generate_final_jni) && invoker.generate_final_jni + if (_generate_final_jni) { + _outer_target_name = target_name + _jni_srcjar_target = "${target_name}__final_jni" + generate_jni_registration(_jni_srcjar_target) { + targets = [ ":$_outer_target_name" ] + } + _jni_java_target = "${target_name}__final_jni_java" + java_library_impl(_jni_java_target) { + type = "java_library" + supports_android = true + requires_android = true + srcjar_deps = [ ":$_jni_srcjar_target" ] + } + _deps += [ ":$_jni_java_target" ] + } + + if (defined(invoker.deps)) { + _deps += invoker.deps + } + + _build_config = "$target_gen_dir/$target_name.build_config.json" + _build_config_target_name = "$target_name$build_config_target_suffix" + + write_build_config(_build_config_target_name) { + type = "dist_aar" + forward_variables_from(invoker, [ "proguard_configs" ]) + possible_config_deps = _deps + supports_android = true + requires_android = true + build_config = _build_config + } + + _deps += [ ":$_build_config_target_name" ] + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + + action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "data", + "assert_no_deps", + ]) + depfile = "$target_gen_dir/$target_name.d" + deps = _deps + script = "//build/android/gyp/dist_aar.py" + + inputs = [ _build_config ] + + # Although these will be listed as deps in the depfile, they must also + # appear here so that "gn analyze" knows about them. + # https://crbug.com/827197 + if (defined(invoker.proguard_configs)) { + inputs += invoker.proguard_configs + } + + outputs = [ invoker.output ] + + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(invoker.output, root_build_dir), + "--dependencies-res-zips=@FileArg($_rebased_build_config:deps_info:dependency_zips)", + "--r-text-files=@FileArg($_rebased_build_config:deps_info:dependency_r_txt_files)", + "--proguard-configs=@FileArg($_rebased_build_config:deps_info:proguard_all_configs)", + ] + if (_direct_deps_only) { + args += [ "--jars=@FileArg($_rebased_build_config:javac:classpath)" ] + } else { + args += [ + "--jars=@FileArg($_rebased_build_config:deps_info:device_classpath)", + ] + } + + if (defined(invoker.android_manifest)) { + args += [ + "--android-manifest", + rebase_path(invoker.android_manifest, root_build_dir), + ] + } + if (defined(invoker.native_libraries) && invoker.native_libraries != []) { + inputs += invoker.native_libraries + _rebased_native_libraries = + rebase_path(invoker.native_libraries, root_build_dir) + + args += [ + "--native-libraries=$_rebased_native_libraries", + "--abi=$android_app_abi", + ] + } + if (defined(invoker.jar_excluded_patterns)) { + args += [ "--jar-excluded-globs=${invoker.jar_excluded_patterns}" ] + } + if (defined(invoker.jar_included_patterns)) { + args += [ "--jar-included-globs=${invoker.jar_included_patterns}" ] + } + if (defined(invoker.resource_included_patterns)) { + args += [ + "--resource-included-globs=${invoker.resource_included_patterns}", + ] + } + } + } + + # Declare an Android library target + # + # This target creates an Android library containing java code and Android + # resources. + # + # Supports all variables of java_library(), plus: + # deps: In addition to defining java deps, this can also include + # android_assets() and android_resources() targets. + # alternative_android_sdk_dep: android_system_java_prebuilt target to use + # in place of the default android.jar. + # + # Example + # android_library("foo_java") { + # sources = [ + # "android/org/chromium/foo/Foo.java", + # "android/org/chromium/foo/FooInterface.java", + # "android/org/chromium/foo/FooService.java", + # ] + # deps = [ + # ":bar_java" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # jar_excluded_patterns = [ + # "*/FooService.class", "org/chromium/FooService\$*.class" + # ] + # } + template("android_library") { + java_library(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + supports_android = true + requires_android = true + + if (!defined(jar_excluded_patterns)) { + jar_excluded_patterns = [] + } + jar_excluded_patterns += [ + "*/R.class", + "*/R\$*.class", + "*/Manifest.class", + "*/Manifest\$*.class", + "*/*GEN_JNI.class", + ] + } + } + + # Declare an Android robolectric library target + # + # This target creates an Android library containing java code and Android + # resources. + # + # Supports all variables of java_library(), plus: + # deps: In addition to defining java deps, this can also include + # android_assets() and android_resources() targets. + # + # Example + # robolectric_library("foo_junit") { + # sources = [ + # "android/org/chromium/foo/FooTest.java", + # "android/org/chromium/foo/FooTestUtils.java", + # "android/org/chromium/foo/FooMock.java", + # ] + # deps = [ + # "//base:base_junit_test_support" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # jar_excluded_patterns = [ + # "*/FooService.class", "org/chromium/FooService\$*.class" + # ] + # } + template("robolectric_library") { + java_library(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + testonly = true + + is_robolectric = true + include_android_sdk = true + alternative_android_sdk_dep = + "//third_party/robolectric:robolectric_test_sdk_java" + + if (!defined(jar_excluded_patterns)) { + jar_excluded_patterns = [] + } + jar_excluded_patterns += [ + "*/R.class", + "*/R\$*.class", + "*/Manifest.class", + "*/Manifest\$*.class", + "*/*GEN_JNI.class", + ] + + if (!defined(deps)) { + deps = [] + } + deps += [ "//third_party/android_deps:robolectric_all_java" ] + } + } + + # Declare an Android library target for a prebuilt jar + # + # This target creates an Android library containing java code and Android + # resources. + # + # Supports all variables of android_library(). + # + # Example + # android_java_prebuilt("foo_java") { + # jar_path = "foo.jar" + # deps = [ + # ":foo_resources", + # ":bar_java" + # ] + # } + template("android_java_prebuilt") { + android_library(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + } + } + + template("android_system_java_prebuilt") { + java_library_impl(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + supports_android = true + type = "system_java_library" + } + } + + # Creates org/chromium/build/BuildConfig.java + # This doesn't really belong in //build since it genates a file for //base. + # However, we don't currently have a better way to include this file in all + # apks that depend on //base:base_java. + # + # Variables: + # use_final_fields: True to use final fields. When false, all other + # variables must not be set. + # enable_multidex: Value for ENABLE_MULTIDEX. + # min_sdk_version: Value for MIN_SDK_VERSION. + # bundles_supported: Whether or not this target can be treated as a bundle. + # resources_version_variable: + # is_incremental_install: + # isolated_splits_enabled: Value for ISOLATED_SPLITS_ENABLED. + template("generate_build_config_srcjar") { + java_cpp_template(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + sources = [ "//build/android/java/templates/BuildConfig.template" ] + defines = [] + + # Set these even when !use_final_fields so that they have correct default + # values within robolectric_binary(), which ignores jar_excluded_patterns. + if ((defined(invoker.assertions_implicitly_enabled) && + invoker.assertions_implicitly_enabled) || enable_java_asserts) { + defines += [ "_ENABLE_ASSERTS" ] + } + if (use_cfi_diag || is_ubsan || is_ubsan_security || is_ubsan_vptr) { + defines += [ "_IS_UBSAN" ] + } + + if (is_chrome_branded) { + defines += [ "_IS_CHROME_BRANDED" ] + } + + if (defined(invoker.bundles_supported) && invoker.bundles_supported) { + defines += [ "_BUNDLES_SUPPORTED" ] + } + + if (defined(invoker.isolated_splits_enabled) && + invoker.isolated_splits_enabled) { + defines += [ "_ISOLATED_SPLITS_ENABLED" ] + } + + if (defined(invoker.is_incremental_install) && + invoker.is_incremental_install) { + defines += [ "_IS_INCREMENTAL_INSTALL" ] + } + + if (invoker.use_final_fields) { + forward_variables_from(invoker, [ "deps" ]) + defines += [ "USE_FINAL" ] + if (invoker.enable_multidex) { + defines += [ "ENABLE_MULTIDEX" ] + } + if (defined(invoker.min_sdk_version)) { + defines += [ "_MIN_SDK_VERSION=${invoker.min_sdk_version}" ] + } + if (defined(invoker.resources_version_variable)) { + defines += [ + "_RESOURCES_VERSION_VARIABLE=${invoker.resources_version_variable}", + ] + } + } + + if (defined(testonly) && testonly) { + defines += [ "_IS_FOR_TEST" ] + } + } + } + + # Creates ProductConfig.java, a file containing product-specific configuration. + # + # Currently, this includes the list of locales, both in their compressed and + # uncompressed format, as well as library loading + # + # Variables: + # build_config: Path to build_config used for locale lists. + # is_bundle_module: Whether or not this target is part of a bundle build. + # java_package: Java package for the generated class. + # use_chromium_linker: + template("generate_product_config_srcjar") { + java_cpp_template(target_name) { + defines = [] + _use_final = + defined(invoker.build_config) || + defined(invoker.use_chromium_linker) || defined(invoker.is_bundle) + if (_use_final) { + defines += [ "USE_FINAL" ] + } + + sources = [ "//build/android/java/templates/ProductConfig.template" ] + defines += [ "PACKAGE=${invoker.java_package}" ] + + _use_chromium_linker = + defined(invoker.use_chromium_linker) && invoker.use_chromium_linker + _is_bundle = defined(invoker.is_bundle_module) && invoker.is_bundle_module + defines += [ + "USE_CHROMIUM_LINKER_VALUE=$_use_chromium_linker", + "IS_BUNDLE_VALUE=$_is_bundle", + ] + if (defined(invoker.build_config)) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + _rebased_build_config = + rebase_path(invoker.build_config, root_build_dir) + defines += [ "LOCALE_LIST=@FileArg($_rebased_build_config:deps_info:locales_java_list)" ] + } + } + } + + # Declare an Android app module target, which is used as the basis for an + # Android APK or an Android app bundle module. + # + # Supports all variables of android_library(), plus: + # android_manifest: Path to AndroidManifest.xml. NOTE: This manifest must + # not contain a element. Use [min|target|max]_sdk_version + # instead. + # android_manifest_dep: Target that generates AndroidManifest (if applicable) + # png_to_webp: If true, pngs (with the exception of 9-patch) are + # converted to webp during resource packaging. + # loadable_modules: List of paths to native libraries to include. Different + # from |shared_libraries| in that: + # * dependencies of this .so are not automatically included + # * ".cr.so" is never added + # * they are not side-loaded when incremental_install=true. + # * use_chromium_linker, and enable_relocation_packing do not apply + # Use this instead of shared_libraries when you are going to load the library + # conditionally, and only when shared_libraries doesn't work for you. + # secondary_abi_loadable_modules: This is the loadable_modules analog to + # secondary_abi_shared_libraries. + # shared_libraries: List shared_library targets to bundle. If these + # libraries depend on other shared_library targets, those dependencies will + # also be included in the apk (e.g. for is_component_build). + # secondary_abi_shared_libraries: secondary abi shared_library targets to + # bundle. If these libraries depend on other shared_library targets, those + # dependencies will also be included in the apk (e.g. for is_component_build). + # native_lib_placeholders: List of placeholder filenames to add to the apk + # (optional). + # secondary_native_lib_placeholders: List of placeholder filenames to add to + # the apk for the secondary ABI (optional). + # generate_buildconfig_java: If defined and false, skip generating the + # BuildConfig java class describing the build configuration. The default + # is true when building with Chromium for non-test APKs. + # generate_final_jni: If defined and false, skip generating the + # GEN_JNI srcjar. + # generate_native_libraries_java: If defined, whether NativeLibraries.java is + # generated is solely controlled by this flag. Otherwise, the default behavior + # is NativeLibraries.java will only be generated for the base module/apk when + # its `shared_libraries` is not empty. + # jni_file_exclusions: List of source path to exclude from the + # final_jni step. + # aapt_locale_allowlist: If set, all locales not in this list will be + # stripped from resources.arsc. + # resource_exclusion_regex: Causes all drawable images matching the regex to + # be excluded (mipmaps are still included). + # resource_exclusion_exceptions: A list of globs used when + # resource_exclusion_regex is set. Files that match this list will + # still be included. + # resource_values_filter_rules: List of "source_path:name_regex" used to + # filter out unwanted values/ resources. + # shared_resources: True if this is a runtime shared library APK, like + # the system_webview_apk target. Ensures that its resources can be + # used by the loading application process. + # app_as_shared_lib: True if this is a regular application apk that can + # also serve as a runtime shared library, like the monochrome_public_apk + # target. Ensures that the resources are usable both by the APK running + # as an application, or by another process that loads it at runtime. + # shared_resources_allowlist_target: Optional name of a target specifying + # an input R.txt file that lists the resources that can be exported + # by the APK when shared_resources or app_as_shared_lib is defined. + # uncompress_dex: Store final .dex files uncompressed in the apk. + # omit_dex: If true, do not build or include classes.dex. + # strip_resource_names: True if resource names should be stripped from the + # resources.arsc file in the apk or module. + # strip_unused_resources: True if unused resources should be stripped from + # the apk or module. + # short_resource_paths: True if resource paths should be shortened in the + # apk or module. + # resources_config_paths: List of paths to the aapt2 optimize config files + # that tags resources with acceptable/non-acceptable optimizations. + # expected_android_manifest: Enables verification of expected merged + # manifest based on a golden file. + # resource_ids_provider_dep: If passed, this target will use the resource + # IDs generated by {resource_ids_provider_dep}__compile_res during + # resource compilation. + # enforce_resource_overlays_in_tests: Enables check for testonly targets that + # dependent resource targets which override another target set + # overlay_resources=true. This check is on for non-test targets and + # cannot be disabled. + # static_library_provider: Specifies a single target that this target will + # use as a static library APK. + # min_sdk_version: The minimum Android SDK version this target supports. + # Optional, default $default_min_sdk_version. + # target_sdk_version: The target Android SDK version for this target. + # Optional, default to android_sdk_version. + # max_sdk_version: The maximum Android SDK version this target supports. + # Optional, default not set. + # require_native_mocks: Enforce that any native calls using + # org.chromium.base.annotations.NativeMethods must have a mock set + # (optional). + # enable_native_mocks: Allow native calls using + # org.chromium.base.annotations.NativeMethods to be mocked in tests + # (optional). + # product_config_java_packages: Optional list of java packages. If given, a + # ProductConfig.java file will be generated for each package. + # enable_proguard_checks: Turns on -checkdiscard directives and missing + # symbols check in the proguard step (default=true). + # annotation_processor_deps: List of java_annotation_processor targets to + # use when compiling the sources given to this target (optional). + # processor_args_javac: List of args to pass to annotation processors when + # compiling sources given to this target (optional). + # bundles_supported: Enable Java code to treat this target as a bundle + # whether (by default determined by the target type). + # main_component_library: Specifies the name of the base component's library + # in a component build. If given, the system will find dependent native + # libraries at runtime by inspecting this library (optional). + # expected_libs_and_assets: Verify the list of included native libraries + # and assets is consistent with the given expectation file. + # expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff + # with this file as the base. + # expected_proguard_config: Checks that the merged set of proguard flags + # matches the given config. + # expected_proguard_config_base: Treat expected_proguard_config as a diff + # with this file as the base. + template("android_apk_or_module") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _template_name = target_name + _base_path = "$target_out_dir/$target_name/$target_name" + _build_config = "$target_gen_dir/$target_name.build_config.json" + _build_config_target = "$target_name$build_config_target_suffix" + _java_target_name = "${_template_name}__java" + + _min_sdk_version = default_min_sdk_version + _target_sdk_version = android_sdk_version + if (defined(invoker.min_sdk_version)) { + _min_sdk_version = invoker.min_sdk_version + } + if (defined(invoker.target_sdk_version)) { + _target_sdk_version = invoker.target_sdk_version + } + + _is_bundle_module = + defined(invoker.is_bundle_module) && invoker.is_bundle_module + if (_is_bundle_module) { + _is_base_module = + defined(invoker.is_base_module) && invoker.is_base_module + } + + _omit_dex = defined(invoker.omit_dex) && invoker.omit_dex + _enable_multidex = + !defined(invoker.enable_multidex) || invoker.enable_multidex + + if (!_is_bundle_module) { + _final_apk_path = invoker.final_apk_path + _final_rtxt_path = "${_final_apk_path}.R.txt" + } + + _res_size_info_path = "$target_out_dir/$target_name.ap_.info" + if (!_is_bundle_module) { + _final_apk_path_no_ext_list = + process_file_template([ _final_apk_path ], + "{{source_dir}}/{{source_name_part}}") + _final_apk_path_no_ext = _final_apk_path_no_ext_list[0] + not_needed([ "_final_apk_path_no_ext" ]) + } + + # Non-base bundle modules create only proto resources. + if (!_is_bundle_module || _is_base_module) { + _arsc_resources_path = "$target_out_dir/$target_name.ap_" + } + if (_is_bundle_module) { + # Path to the intermediate proto-format resources zip file. + _proto_resources_path = "$target_out_dir/$target_name.proto.ap_" + } else { + # resource_sizes.py needs to be able to find the unpacked resources.arsc + # file based on apk name to compute normatlized size. + _resource_sizes_arsc_path = + "$root_out_dir/arsc/" + + rebase_path(_final_apk_path_no_ext, root_build_dir) + ".ap_" + } + + if (defined(invoker.version_code)) { + _version_code = invoker.version_code + } else { + _version_code = android_default_version_code + } + + if (android_override_version_code != "") { + _version_code = android_override_version_code + } + + if (defined(invoker.version_name)) { + _version_name = invoker.version_name + } else { + _version_name = android_default_version_name + } + + if (android_override_version_name != "") { + _version_name = android_override_version_name + } + + if (defined(invoker.deps)) { + _invoker_deps = invoker.deps + } else { + _invoker_deps = [] + } + _non_java_deps = filter_exclude(_invoker_deps, java_target_patterns) + _java_assetres_deps = [ ":${_java_target_name}__assetres" ] + + _srcjar_deps = [] + if (defined(invoker.srcjar_deps)) { + _srcjar_deps = invoker.srcjar_deps + } + + _use_chromium_linker = + defined(invoker.use_chromium_linker) && invoker.use_chromium_linker + + not_needed([ "_use_chromium_linker" ]) + + # The dependency that makes the chromium linker, if any is needed. + _native_libs_deps = [] + _shared_libraries_is_valid = + defined(invoker.shared_libraries) && invoker.shared_libraries != [] + + if (_shared_libraries_is_valid) { + _native_libs_deps += invoker.shared_libraries + + # Write shared library output files of all dependencies to a file. Those + # will be the shared libraries packaged into the APK. + _shared_library_list_file = + "$target_gen_dir/${_template_name}.native_libs" + generated_file("${_template_name}__shared_library_list") { + deps = _native_libs_deps + outputs = [ _shared_library_list_file ] + data_keys = [ "shared_libraries" ] + walk_keys = [ "shared_libraries_barrier" ] + rebase = root_build_dir + } + } else { + # Must exist for instrumentation_test_apk() to depend on. + group("${_template_name}__shared_library_list") { + } + } + + _secondary_abi_native_libs_deps = [] + + if (defined(invoker.secondary_abi_shared_libraries) && + invoker.secondary_abi_shared_libraries != []) { + _secondary_abi_native_libs_deps = invoker.secondary_abi_shared_libraries + + # Write shared library output files of all dependencies to a file. Those + # will be the shared libraries packaged into the APK. + _secondary_abi_shared_library_list_file = + "$target_gen_dir/${_template_name}.secondary_abi_native_libs" + generated_file("${_template_name}__secondary_abi_shared_library_list") { + deps = _secondary_abi_native_libs_deps + outputs = [ _secondary_abi_shared_library_list_file ] + data_keys = [ "shared_libraries" ] + walk_keys = [ "shared_libraries_barrier" ] + rebase = root_build_dir + } + } else { + # Must exist for instrumentation_test_apk() to depend on. + group("${_template_name}__secondary_abi_shared_library_list") { + } + } + + _rebased_build_config = rebase_path(_build_config, root_build_dir) + assert(_rebased_build_config != "") # Mark as used. + + _generate_buildconfig_java = !defined(invoker.apk_under_test) && !_omit_dex + if (defined(invoker.generate_buildconfig_java)) { + _generate_buildconfig_java = invoker.generate_buildconfig_java + } + + _generate_productconfig_java = + defined(invoker.product_config_java_packages) && !_omit_dex + + # JNI generation usually goes hand-in-hand with buildconfig generation. + _generate_final_jni = _generate_buildconfig_java + if (defined(invoker.generate_final_jni)) { + _generate_final_jni = invoker.generate_final_jni + } + + _proguard_enabled = + defined(invoker.proguard_enabled) && invoker.proguard_enabled + + if (!_is_bundle_module && _proguard_enabled) { + _proguard_mapping_path = "$_final_apk_path.mapping" + } + + if (defined(invoker.resource_ids_provider_dep)) { + _resource_ids_provider_dep = invoker.resource_ids_provider_dep + } + + if (defined(invoker.shared_resources_allowlist_target)) { + _shared_resources_allowlist_target = + invoker.shared_resources_allowlist_target + } + + _uses_static_library = defined(invoker.static_library_provider) + + # TODO(crbug.com/864142): Allow incremental installs of bundle modules. + _incremental_apk = !_is_bundle_module && + !(defined(invoker.never_incremental) && + invoker.never_incremental) && incremental_install + if (_incremental_apk) { + _target_dir_name = get_label_info(target_name, "dir") + _incremental_install_json_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.incremental.json" + _incremental_apk_path = "${_final_apk_path_no_ext}_incremental.apk" + } + + if (!_incremental_apk && !_omit_dex) { + # Bundle modules don't build the dex here, but need to write this path + # to their .build_config.json file only when proguarding. + if (_proguard_enabled) { + _final_dex_path = "$_base_path.r8dex.jar" + } else if (!_is_bundle_module) { + _final_dex_path = "$_base_path.mergeddex.jar" + } + } + + _android_manifest = + "$target_gen_dir/${_template_name}/AndroidManifest.merged.xml" + _merge_manifest_target = "${_template_name}__merge_manifests" + merge_manifests(_merge_manifest_target) { + forward_variables_from(invoker, + [ + "manifest_package", + "max_sdk_version", + ]) + input_manifest = invoker.android_manifest + output_manifest = _android_manifest + build_config = _build_config + min_sdk_version = _min_sdk_version + target_sdk_version = _target_sdk_version + + # Depend on android_resources() targets that use generated files + # in mergeable_android_manifests (such as android_aar_prebuilt). + deps = _java_assetres_deps + [ ":$_build_config_target" ] + if (defined(invoker.android_manifest_dep)) { + deps += [ invoker.android_manifest_dep ] + } + } + + _final_deps = [ ":$_java_target_name" ] + + _enable_main_dex_list = _enable_multidex && _min_sdk_version < 21 + if (_enable_main_dex_list) { + _generated_proguard_main_dex_config = + "$_base_path.resources.main-dex-proguard.txt" + } + _generated_proguard_config = "$_base_path.resources.proguard.txt" + + if (defined(invoker.alternative_android_sdk_dep)) { + _android_sdk_dep = invoker.alternative_android_sdk_dep + } else { + _android_sdk_dep = default_android_sdk_dep + } + + if (defined(_shared_resources_allowlist_target)) { + _allowlist_gen_dir = + get_label_info(_shared_resources_allowlist_target, "target_gen_dir") + _allowlist_target_name = + get_label_info(_shared_resources_allowlist_target, "name") + _allowlist_r_txt_path = + "${_allowlist_gen_dir}/${_allowlist_target_name}" + + "__compile_resources_R.txt" + _allowlist_deps = + "${_shared_resources_allowlist_target}__compile_resources" + } + + if (_incremental_apk) { + _incremental_android_manifest = + "$target_gen_dir/${_template_name}/AndroidManifest.incremental.xml" + _incremental_manifest_target_name = "${target_name}__incremental_manifest" + action_with_pydeps(_incremental_manifest_target_name) { + deps = [ ":$_merge_manifest_target" ] + script = + "//build/android/incremental_install/generate_android_manifest.py" + inputs = [ _android_manifest ] + outputs = [ _incremental_android_manifest ] + + args = [ + "--disable-isolated-processes", + "--src-manifest", + rebase_path(_android_manifest, root_build_dir), + "--dst-manifest", + rebase_path(_incremental_android_manifest, root_build_dir), + ] + } + } + + _compile_resources_target = "${_template_name}__compile_resources" + _compile_resources_rtxt_out = + "${target_gen_dir}/${_compile_resources_target}_R.txt" + _compile_resources_emit_ids_out = + "${target_gen_dir}/${_compile_resources_target}.resource_ids" + compile_resources(_compile_resources_target) { + forward_variables_from( + invoker, + [ + "aapt_locale_allowlist", + "app_as_shared_lib", + "enforce_resource_overlays_in_tests", + "expected_android_manifest", + "expected_android_manifest_base", + "expected_android_manifest_library_version_offset", + "expected_android_manifest_version_code_offset", + "manifest_package", + "max_sdk_version", + "package_id", + "png_to_webp", + "r_java_root_package_name", + "resource_exclusion_exceptions", + "resource_exclusion_regex", + "resource_values_filter_rules", + "shared_resources", + "shared_resources_allowlist_locales", + "uses_split", + ]) + android_manifest = _android_manifest + android_manifest_dep = ":$_merge_manifest_target" + version_code = _version_code + version_name = _version_name + min_sdk_version = _min_sdk_version + target_sdk_version = _target_sdk_version + + if (defined(expected_android_manifest)) { + top_target_name = _template_name + } + + if (defined(_resource_ids_provider_dep)) { + resource_ids_provider_dep = _resource_ids_provider_dep + } + + if (defined(invoker.module_name)) { + package_name = invoker.module_name + } + + if (defined(invoker.post_process_package_resources_script)) { + post_process_script = invoker.post_process_package_resources_script + } + r_text_out_path = _compile_resources_rtxt_out + emit_ids_out_path = _compile_resources_emit_ids_out + size_info_path = _res_size_info_path + proguard_file = _generated_proguard_config + if (_enable_main_dex_list) { + proguard_file_main_dex = _generated_proguard_main_dex_config + } + + build_config = _build_config + build_config_dep = ":$_build_config_target" + android_sdk_dep = _android_sdk_dep + deps = _java_assetres_deps + _non_java_deps + + if (_incremental_apk) { + android_manifest = _incremental_android_manifest + android_manifest_dep = ":$_incremental_manifest_target_name" + } + + if (defined(invoker.apk_under_test)) { + # Set the arsc package name to match the apk_under_test package name + # So that test resources can references under_test resources via + # @type/name syntax. + r_java_root_package_name = "test" + arsc_package_name = + "@FileArg($_rebased_build_config:deps_info:arsc_package_name)" + + # Passing in the --emit-ids mapping will cause aapt2 to assign resources + # IDs that do not conflict with those from apk_under_test. + assert(!defined(resource_ids_provider_dep)) + resource_ids_provider_dep = invoker.apk_under_test + + _link_against = invoker.apk_under_test + } + + if (_is_bundle_module) { + is_bundle_module = true + proto_output = _proto_resources_path + + if (defined(invoker.base_module_target)) { + _link_against = invoker.base_module_target + } + } + + if (defined(_link_against)) { + deps += [ "${_link_against}__compile_resources" ] + include_resource = get_label_info(_link_against, "target_out_dir") + + "/" + get_label_info(_link_against, "name") + ".ap_" + } + + # Bundle modules have to reference resources from the base module. + if (!_is_bundle_module || _is_base_module) { + arsc_output = _arsc_resources_path + } + + if (defined(_shared_resources_allowlist_target)) { + # Used to ensure that the WebView resources are properly shared + # (i.e. are non-final and with package ID 0). + shared_resources_allowlist = _allowlist_r_txt_path + deps += [ _allowlist_deps ] + } + } + _srcjar_deps += [ ":$_compile_resources_target" ] + + # We don't ship apks anymore, only optimize bundle builds + if (_is_bundle_module) { + _short_resource_paths = + defined(invoker.short_resource_paths) && + invoker.short_resource_paths && enable_arsc_obfuscation + _strip_resource_names = + defined(invoker.strip_resource_names) && + invoker.strip_resource_names && enable_arsc_obfuscation + _strip_unused_resources = + defined(invoker.strip_unused_resources) && + invoker.strip_unused_resources && enable_unused_resource_stripping + _optimize_resources = _strip_resource_names || _short_resource_paths || + _strip_unused_resources + } + + if (_is_bundle_module && _optimize_resources) { + _optimized_proto_resources_path = + "$target_out_dir/$target_name.optimized.proto.ap_" + if (_short_resource_paths) { + _resources_path_map_out_path = + "${target_gen_dir}/${_template_name}_resources_path_map.txt" + } + _optimize_resources_target = "${_template_name}__optimize_resources" + optimize_resources(_optimize_resources_target) { + deps = _non_java_deps + [ ":$_compile_resources_target" ] + short_resource_paths = _short_resource_paths + strip_resource_names = _strip_resource_names + if (_short_resource_paths) { + resources_path_map_out_path = _resources_path_map_out_path + } + r_text_path = _compile_resources_rtxt_out + proto_input_path = _proto_resources_path + optimized_proto_output = _optimized_proto_resources_path + if (_strip_unused_resources) { + # These need to be kept in sync with the target names + output paths + # in the android_app_bundle template. + _unused_resources_target = "${_template_name}__unused_resources" + _unused_resources_config_path = + "$target_gen_dir/${_template_name}_unused_resources.config" + resources_config_paths = [ _unused_resources_config_path ] + deps += [ ":$_unused_resources_target" ] + } else { + resources_config_paths = [] + } + if (defined(invoker.resources_config_paths)) { + resources_config_paths += invoker.resources_config_paths + } + } + + if (_strip_unused_resources) { + # Copy the unused resources config to the final bundle output dir. + _copy_unused_resources_target = + "${_template_name}__copy_unused_resources" + _final_deps += [ ":$_copy_unused_resources_target" ] + } + } else { + not_needed(invoker, [ "resources_config_paths" ]) + } + + if (!_is_bundle_module) { + # Output the R.txt file to a more easily discoverable location for + # archiving. This is necessary when stripping resource names so that we + # have an archive of resource names to ids for shipped apks (for + # debugging purposes). We copy the file rather than change the location + # of the original because other targets rely on the location of the R.txt + # file. + _copy_rtxt_target = "${_template_name}__copy_rtxt" + copy(_copy_rtxt_target) { + deps = [ ":$_compile_resources_target" ] + sources = [ _compile_resources_rtxt_out ] + outputs = [ _final_rtxt_path ] + } + _final_deps += [ ":$_copy_rtxt_target" ] + } + + if (defined(_resource_sizes_arsc_path)) { + _copy_arsc_target = "${_template_name}__copy_arsc" + copy(_copy_arsc_target) { + deps = [ ":$_compile_resources_target" ] + + # resource_sizes.py doesn't care if it gets the optimized .arsc. + sources = [ _arsc_resources_path ] + outputs = [ _resource_sizes_arsc_path ] + } + _final_deps += [ ":$_copy_arsc_target" ] + } + + if (defined(invoker.generate_native_libraries_java)) { + _generate_native_libraries_java = invoker.generate_native_libraries_java + } else { + _generate_native_libraries_java = + (!_is_bundle_module || _is_base_module) && !_omit_dex && + !defined(invoker.apk_under_test) + } + if (_generate_native_libraries_java) { + write_native_libraries_java("${_template_name}__native_libraries") { + forward_variables_from(invoker, [ "main_component_library" ]) + + # Do not add a dep on the generated_file target in order to avoid having + # to build the native libraries before this target. The dependency is + # instead captured via a depfile. + if (_uses_static_library) { + _prefix = get_label_info(invoker.static_library_provider, + "target_gen_dir") + "/" + + get_label_info(invoker.static_library_provider, "name") + if (defined(invoker.static_library_provider_use_secondary_abi) && + invoker.static_library_provider_use_secondary_abi) { + native_libraries_list_file = "${_prefix}.secondary_abi_native_libs" + } else { + native_libraries_list_file = "${_prefix}.native_libs" + } + } else if (_native_libs_deps != []) { + native_libraries_list_file = _shared_library_list_file + } else if (_secondary_abi_native_libs_deps != []) { + native_libraries_list_file = _secondary_abi_shared_library_list_file + } + enable_chromium_linker = _use_chromium_linker + use_final_fields = true + } + _srcjar_deps += [ ":${_template_name}__native_libraries" ] + } + + _loadable_modules = [] + if (defined(invoker.loadable_modules)) { + _loadable_modules = invoker.loadable_modules + } + + if (_native_libs_deps != []) { + _loadable_modules += _sanitizer_runtimes + } + + _assertions_implicitly_enabled = defined(invoker.custom_assertion_handler) + + # Many possible paths where we wouldn't use this variable. + not_needed([ "_assertions_implicitly_enabled" ]) + + if (_generate_buildconfig_java) { + generate_build_config_srcjar("${_template_name}__build_config_srcjar") { + forward_variables_from(invoker, + [ + "min_sdk_version", + "isolated_splits_enabled", + ]) + _bundles_supported = _is_bundle_module + if (defined(invoker.bundles_supported)) { + _bundles_supported = invoker.bundles_supported + } + bundles_supported = _bundles_supported + use_final_fields = true + assertions_implicitly_enabled = _assertions_implicitly_enabled + enable_multidex = _enable_multidex + is_incremental_install = _incremental_apk + if (defined(invoker.build_config_include_product_version_resource) && + invoker.build_config_include_product_version_resource) { + resources_version_variable = + "org.chromium.base.R.string.product_version" + } + deps = [ ":$_build_config_target" ] + } + _srcjar_deps += [ ":${_template_name}__build_config_srcjar" ] + } + + if (_generate_productconfig_java) { + foreach(_package, invoker.product_config_java_packages) { + _locale_target_name = + "${_template_name}_${_package}__product_config_srcjar" + generate_product_config_srcjar("$_locale_target_name") { + forward_variables_from(invoker, [ "is_bundle_module" ]) + build_config = _build_config + java_package = _package + use_chromium_linker = _use_chromium_linker + deps = [ ":$_build_config_target" ] + } + _srcjar_deps += [ ":$_locale_target_name" ] + } + } + + if (_generate_final_jni) { + generate_jni_registration("${_template_name}__final_jni") { + forward_variables_from(invoker, + [ + "enable_jni_multiplexing", + "enable_native_mocks", + "require_native_mocks", + ]) + if (defined(invoker.bundle_target)) { + targets = [ invoker.bundle_target ] + } else { + targets = [ ":$_template_name" ] + } + if (defined(invoker.jni_file_exclusions)) { + file_exclusions = invoker.jni_file_exclusions + } + prevent_header_output = true + } + _srcjar_deps += [ ":${_template_name}__final_jni" ] + } else { + not_needed(invoker, [ "enable_native_mocks" ]) + } + + if (_is_bundle_module) { + _add_view_trace_events = + defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events && enable_trace_event_bytecode_rewriting + } + + # We cannot skip this target when omit_dex = true because it writes the + # build_config.json. + java_library_impl(_java_target_name) { + forward_variables_from(invoker, + [ + "alternative_android_sdk_dep", + "android_manifest", + "android_manifest_dep", + "annotation_processor_deps", + "apk_under_test", + "base_module_target", + "chromium_code", + "deps", + "jacoco_never_instrument", + "jar_excluded_patterns", + "javac_args", + "mergeable_android_manifests", + "native_lib_placeholders", + "parent_module_target", + "processor_args_javac", + "secondary_abi_loadable_modules", + "secondary_native_lib_placeholders", + "sources", + "library_always_compress", + ]) + version_code = _version_code + version_name = _version_name + if (_is_bundle_module) { + type = "android_app_bundle_module" + res_size_info_path = _res_size_info_path + if (defined(invoker.module_name)) { + module_name = invoker.module_name + } else { + module_name = "base" + } + add_view_trace_events = _add_view_trace_events + } else { + type = "android_apk" + } + r_text_path = _compile_resources_rtxt_out + main_target_name = _template_name + supports_android = true + requires_android = true + srcjar_deps = _srcjar_deps + merged_android_manifest = _android_manifest + if (defined(_final_dex_path)) { + final_dex_path = _final_dex_path + } + + if (_is_bundle_module) { + proto_resources_path = _proto_resources_path + if (_optimize_resources) { + proto_resources_path = _optimized_proto_resources_path + if (_short_resource_paths) { + module_pathmap_path = _resources_path_map_out_path + } + } + } else { + apk_path = _final_apk_path + if (_incremental_apk) { + incremental_apk_path = _incremental_apk_path + incremental_install_json_path = _incremental_install_json_path + } + } + + proguard_enabled = _proguard_enabled + if (_proguard_enabled) { + proguard_configs = [ _generated_proguard_config ] + if (defined(invoker.proguard_configs)) { + proguard_configs += invoker.proguard_configs + } + if (!_assertions_implicitly_enabled && !enable_java_asserts && + (!defined(testonly) || !testonly) && + # Injected JaCoCo code causes -checkdiscards to fail. + !use_jacoco_coverage) { + proguard_configs += [ "//build/android/dcheck_is_off.flags" ] + } + if (!_is_bundle_module) { + proguard_mapping_path = _proguard_mapping_path + } + } + + # Do not add a dep on the generated_file target in order to avoid having + # to build the native libraries before this target. The dependency is + # instead captured via a depfile. + if (_native_libs_deps != []) { + shared_libraries_runtime_deps_file = _shared_library_list_file + } + if (defined(_secondary_abi_shared_library_list_file)) { + secondary_abi_shared_libraries_runtime_deps_file = + _secondary_abi_shared_library_list_file + } + + loadable_modules = _loadable_modules + + if (defined(_allowlist_r_txt_path) && _is_bundle_module) { + # Used to write the file path to the target's .build_config.json only. + base_allowlist_rtxt_path = _allowlist_r_txt_path + } + } + + if (_is_bundle_module || _omit_dex) { + # Dex generation for app bundle modules take place in the + # android_app_bundle template. + not_needed(invoker, [ "custom_assertion_handler" ]) + } else if (_incremental_apk) { + not_needed(invoker, + [ + "enable_proguard_checks", + "custom_assertion_handler", + ]) + } else { + _final_dex_target_name = "${_template_name}__final_dex" + dex(_final_dex_target_name) { + forward_variables_from(invoker, + [ + "enable_proguard_checks", + "custom_assertion_handler", + "proguard_enable_obfuscation", + ]) + min_sdk_version = _min_sdk_version + proguard_enabled = _proguard_enabled + build_config = _build_config + output = _final_dex_path + enable_multidex = _enable_multidex + deps = [ + ":$_build_config_target", + ":$_java_target_name", + ] + if (_proguard_enabled) { + # Generates proguard configs + deps += [ ":$_compile_resources_target" ] + proguard_mapping_path = _proguard_mapping_path + has_apk_under_test = defined(invoker.apk_under_test) + } else { + if (_min_sdk_version >= default_min_sdk_version) { + # Enable dex merging only when min_sdk_version is >= what the library + # .dex files were created with. + input_dex_filearg = + "@FileArg(${_rebased_build_config}:deps_info:all_dex_files)" + + # Pure dex-merge. + enable_desugar = false + } else { + input_classes_filearg = + "@FileArg($_rebased_build_config:deps_info:device_classpath)" + } + } + + # The individual dependencies would have caught real missing deps in + # their respective dex steps. False positives that were suppressed at + # per-target dex steps are emitted here since this may use jar files + # rather than dex files. + if (!defined(enable_desugar)) { + ignore_desugar_missing_deps = true + } + + if (_enable_main_dex_list) { + # Generates main-dex config. + deps += [ ":$_compile_resources_target" ] + extra_main_dex_proguard_config = _generated_proguard_main_dex_config + } + } + + _final_dex_target_dep = ":$_final_dex_target_name" + + _use_baseline_profile = + _proguard_enabled && defined(invoker.baseline_profile_path) && + enable_baseline_profiles + if (_use_baseline_profile) { + _binary_profile_target = "${_template_name}__binary_baseline_profile" + _binary_baseline_profile_path = + "$target_out_dir/$_template_name.baseline.prof" + _binary_baseline_profile_metadata_path = + _binary_baseline_profile_path + "m" + create_binary_profile(_binary_profile_target) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + binary_baseline_profile_path = _binary_baseline_profile_path + binary_baseline_profile_metadata_path = + _binary_baseline_profile_metadata_path + proguard_mapping_path = _proguard_mapping_path + build_config = _build_config + input_profile_path = invoker.baseline_profile_path + deps = [ + ":$_build_config_target", + _final_dex_target_dep, + ] + } + } + } + + if (!defined(_use_baseline_profile) || !_use_baseline_profile) { + not_needed(invoker, [ "baseline_profile_path" ]) + } + + _all_native_libs_deps = _native_libs_deps + _secondary_abi_native_libs_deps + if (_all_native_libs_deps != []) { + _native_libs_filearg_dep = ":$_build_config_target" + _all_native_libs_deps += [ _native_libs_filearg_dep ] + + if (!_is_bundle_module) { + _native_libs_filearg = + "@FileArg($_rebased_build_config:native:libraries)" + } + } + + if (_is_bundle_module) { + _final_deps += [ + ":$_build_config_target", + ":$_compile_resources_target", + ":$_merge_manifest_target", + ] + _all_native_libs_deps + if (_optimize_resources) { + _final_deps += [ ":$_optimize_resources_target" ] + } + if (defined(_final_dex_target_dep)) { + not_needed([ "_final_dex_target_dep" ]) + } + } else { + # Generate size-info/*.jar.info files. + if (defined(invoker.name)) { + # Create size info files for targets that care about size + # (have proguard enabled). + _include_size_info = + defined(invoker.include_size_info) && invoker.include_size_info + if (_include_size_info || _proguard_enabled) { + _size_info_target = "${target_name}__size_info" + create_size_info_files(_size_info_target) { + name = "${invoker.name}.apk" + build_config = _build_config + res_size_info_path = _res_size_info_path + deps = [ + ":$_build_config_target", + ":$_compile_resources_target", + ":$_java_target_name", + ] + } + _final_deps += [ ":$_size_info_target" ] + } else { + not_needed(invoker, [ "name" ]) + } + } + + _create_apk_target = "${_template_name}__create" + _final_deps += [ ":$_create_apk_target" ] + package_apk("$_create_apk_target") { + forward_variables_from(invoker, + [ + "expected_libs_and_assets", + "expected_libs_and_assets_base", + "keystore_name", + "keystore_path", + "keystore_password", + "native_lib_placeholders", + "secondary_abi_loadable_modules", + "secondary_native_lib_placeholders", + "uncompress_dex", + "library_always_compress", + ]) + + if (defined(expected_libs_and_assets)) { + build_config_dep = ":$_build_config_target" + top_target_name = _template_name + } + + build_config = _build_config + min_sdk_version = _min_sdk_version + packaged_resources_path = _arsc_resources_path + + # Need full deps rather than _non_java_deps, because loadable_modules + # may include .so files extracted by __unpack_aar targets. + deps = _invoker_deps + [ ":$_build_config_target" ] + + if (_incremental_apk) { + _dex_target = "//build/android/incremental_install:apk_dex" + + deps += [ + ":$_compile_resources_target", + _dex_target, + ] + + dex_path = get_label_info(_dex_target, "target_out_dir") + "/apk.dex" + + # Incremental APKs cannot be installed via `adb install` as such they + # should be clearly named/labeled "incremental". + output_apk_path = _incremental_apk_path + + # All native libraries are side-loaded, so use a placeholder to force + # the proper bitness for the app. + _has_native_libs = + defined(_native_libs_filearg) || _loadable_modules != [] + if (_has_native_libs && !defined(native_lib_placeholders)) { + native_lib_placeholders = [ "libfix.crbug.384638.so" ] + } + } else { + loadable_modules = _loadable_modules + deps += _all_native_libs_deps + [ + ":$_compile_resources_target", + ":$_merge_manifest_target", + ] + + if (defined(_final_dex_path)) { + dex_path = _final_dex_path + deps += [ _final_dex_target_dep ] + if (_use_baseline_profile) { + # extra_assets is a list of ["{src_path}:{dst_path}"] + extra_assets = [ + rebase_path(_binary_baseline_profile_path, root_build_dir) + + ":dexopt/baseline.prof", + rebase_path(_binary_baseline_profile_metadata_path, + root_build_dir) + ":dexopt/baseline.profm", + ] + deps += [ ":$_binary_profile_target" ] + } + } + + output_apk_path = _final_apk_path + + if (defined(_native_libs_filearg)) { + native_libs_filearg = _native_libs_filearg + secondary_abi_native_libs_filearg = "@FileArg($_rebased_build_config:native:secondary_abi_libraries)" + } + } + } + } + + if (_incremental_apk) { + _write_installer_json_rule_name = "${_template_name}__incremental_json" + action_with_pydeps(_write_installer_json_rule_name) { + script = "//build/android/incremental_install/write_installer_json.py" + deps = [ ":$_build_config_target" ] + _all_native_libs_deps + + data = [ _incremental_install_json_path ] + inputs = [ _build_config ] + outputs = [ _incremental_install_json_path ] + + _rebased_incremental_apk_path = + rebase_path(_incremental_apk_path, root_build_dir) + _rebased_incremental_install_json_path = + rebase_path(_incremental_install_json_path, root_build_dir) + args = [ + "--apk-path=$_rebased_incremental_apk_path", + "--output-path=$_rebased_incremental_install_json_path", + "--dex-file=@FileArg($_rebased_build_config:deps_info:all_dex_files)", + ] + if (_proguard_enabled) { + args += [ "--show-proguard-warning" ] + } + if (defined(_native_libs_filearg)) { + args += [ "--native-libs=$_native_libs_filearg" ] + deps += [ _native_libs_filearg_dep ] + } + if (_loadable_modules != []) { + _rebased_loadable_modules = + rebase_path(_loadable_modules, root_build_dir) + args += [ "--native-libs=$_rebased_loadable_modules" ] + } + } + _final_deps += [ ":$_write_installer_json_rule_name" ] + } + + # Generate apk operation related script. + if (!_is_bundle_module && + (!defined(invoker.create_apk_script) || invoker.create_apk_script)) { + if (_uses_static_library) { + _install_artifacts_target = "${target_name}__install_artifacts" + _install_artifacts_json = + "${target_gen_dir}/${target_name}.install_artifacts" + generated_file(_install_artifacts_target) { + output_conversion = "json" + deps = [ invoker.static_library_provider ] + outputs = [ _install_artifacts_json ] + data_keys = [ "install_artifacts" ] + rebase = root_build_dir + } + } + _apk_operations_target_name = "${target_name}__apk_operations" + action_with_pydeps(_apk_operations_target_name) { + _generated_script = "$root_build_dir/bin/${invoker.target_name}" + script = "//build/android/gyp/create_apk_operations_script.py" + outputs = [ _generated_script ] + args = [ + "--script-output-path", + rebase_path(_generated_script, root_build_dir), + "--target-cpu=$target_cpu", + ] + if (defined(invoker.command_line_flags_file)) { + args += [ + "--command-line-flags-file", + invoker.command_line_flags_file, + ] + } + if (_incremental_apk) { + args += [ + "--incremental-install-json-path", + rebase_path(_incremental_install_json_path, root_build_dir), + ] + } else { + args += [ + "--apk-path", + rebase_path(_final_apk_path, root_build_dir), + ] + } + if (_uses_static_library) { + deps = [ ":$_install_artifacts_target" ] + _rebased_install_artifacts_json = + rebase_path(_install_artifacts_json, root_build_dir) + _static_library_apk_path = + "@FileArg($_rebased_install_artifacts_json[])" + args += [ + "--additional-apk", + _static_library_apk_path, + ] + } + data = [] + data_deps = [ + "//build/android:apk_operations_py", + "//build/android:stack_tools", + ] + + if (_proguard_enabled && !_incremental_apk) { + # Required by logcat command. + data_deps += [ "//build/android/stacktrace:java_deobfuscate" ] + data += [ "$_final_apk_path.mapping" ] + args += [ + "--proguard-mapping-path", + rebase_path("$_final_apk_path.mapping", root_build_dir), + ] + } + } + _final_deps += [ ":$_apk_operations_target_name" ] + } + + _enable_lint = defined(invoker.enable_lint) && invoker.enable_lint && + !disable_android_lint + if (_enable_lint) { + android_lint("${target_name}__lint") { + forward_variables_from(invoker, + [ + "lint_baseline_file", + "lint_suppressions_file", + "min_sdk_version", + ]) + build_config = _build_config + build_config_dep = ":$_build_config_target" + + # This will use library subtargets under-the-hood + deps = [ ":$_java_target_name" ] + if (defined(invoker.lint_suppressions_dep)) { + deps += [ invoker.lint_suppressions_dep ] + } + if (defined(invoker.lint_min_sdk_version)) { + min_sdk_version = invoker.lint_min_sdk_version + } + } + } else { + not_needed(invoker, + [ + "lint_baseline_file", + "lint_jar_path", + "lint_min_sdk_version", + "lint_suppressions_dep", + "lint_suppressions_file", + ]) + } + + group(target_name) { + forward_variables_from(invoker, + [ + "assert_no_deps", + "data", + "data_deps", + "metadata", + ]) + + # Generate apk related operations at runtime. + public_deps = _final_deps + + if (!defined(data_deps)) { + data_deps = [] + } + + # Include unstripped native libraries so tests can symbolize stacks. + data_deps += _all_native_libs_deps + [ ":${_java_target_name}__validate" ] + if (_enable_lint) { + data_deps += [ ":${target_name}__lint" ] + } + + if (_uses_static_library) { + data_deps += [ invoker.static_library_provider ] + } + } + } + + # Declare an Android APK target + # + # This target creates an Android APK containing java code, resources, assets, + # and (possibly) native libraries. + # + # Supports all variables of android_apk_or_module(), plus: + # apk_name: Name for final apk. + # final_apk_path: (Optional) path to output APK. + # + # Example + # android_apk("foo_apk") { + # android_manifest = "AndroidManifest.xml" + # sources = [ + # "android/org/chromium/foo/FooApplication.java", + # "android/org/chromium/foo/FooActivity.java", + # ] + # deps = [ + # ":foo_support_java" + # ":foo_resources" + # ] + # srcjar_deps = [ + # ":foo_generated_enum" + # ] + # shared_libraries = [ + # ":my_shared_lib", + # ] + # } + template("android_apk") { + # TODO(crbug.com/1042017): Remove. + not_needed(invoker, [ "no_build_hooks" ]) + android_apk_or_module(target_name) { + forward_variables_from( + invoker, + [ + "aapt_locale_allowlist", + "additional_jar_files", + "alternative_android_sdk_dep", + "android_manifest", + "android_manifest_dep", + "annotation_processor_deps", + "apk_under_test", + "app_as_shared_lib", + "assert_no_deps", + "baseline_profile_path", + "build_config_include_product_version_resource", + "bundles_supported", + "chromium_code", + "command_line_flags_file", + "create_apk_script", + "custom_assertion_handler", + "data", + "data_deps", + "deps", + "enable_lint", + "enable_jni_multiplexing", + "enable_multidex", + "enable_native_mocks", + "enable_proguard_checks", + "enforce_resource_overlays_in_tests", + "expected_android_manifest", + "expected_android_manifest_base", + "expected_android_manifest_library_version_offset", + "expected_android_manifest_version_code_offset", + "expected_libs_and_assets", + "expected_libs_and_assets_base", + "generate_buildconfig_java", + "generate_final_jni", + "generate_native_libraries_java", + "include_size_info", + "input_jars_paths", + "jacoco_never_instrument", + "javac_args", + "jni_file_exclusions", + "keystore_name", + "keystore_password", + "keystore_path", + "lint_baseline_file", + "lint_min_sdk_version", + "lint_suppressions_dep", + "lint_suppressions_file", + "loadable_modules", + "manifest_package", + "max_sdk_version", + "mergeable_android_manifests", + "product_config_java_packages", + "main_component_library", + "min_sdk_version", + "native_lib_placeholders", + "never_incremental", + "omit_dex", + "png_to_webp", + "post_process_package_resources_script", + "processor_args_javac", + "proguard_configs", + "proguard_enabled", + "proguard_enable_obfuscation", + "r_java_root_package_name", + "resource_exclusion_exceptions", + "resource_exclusion_regex", + "resource_ids_provider_dep", + "resource_values_filter_rules", + "require_native_mocks", + "secondary_abi_loadable_modules", + "secondary_abi_shared_libraries", + "secondary_native_lib_placeholders", + "shared_libraries", + "shared_resources", + "shared_resources_allowlist_locales", + "shared_resources_allowlist_target", + "sources", + "srcjar_deps", + "static_library_provider", + "static_library_provider_use_secondary_abi", + "target_sdk_version", + "testonly", + "uncompress_dex", + "library_always_compress", + "use_chromium_linker", + "version_code", + "version_name", + "visibility", + ]) + is_bundle_module = false + name = invoker.apk_name + if (defined(invoker.final_apk_path)) { + final_apk_path = invoker.final_apk_path + } else { + final_apk_path = "$root_build_dir/apks/${invoker.apk_name}.apk" + } + metadata = { + install_artifacts = [ final_apk_path ] + } + if (defined(invoker.static_library_provider)) { + metadata.install_artifacts_barrier = [] + } + } + } + + # Declare an Android app bundle module target. + # + # The module can be used for an android_apk_or_module(). + # + # Supports all variables of android_library(), plus: + # module_name: Name of the module. + # is_base_module: If defined and true, indicates that this is the bundle's + # base module (optional). + # base_module_target: Base module target of the bundle this module will be + # added to (optional). Can only be specified for non-base modules. + # bundle_target: Bundle target that this module belongs to (optional). + # Can only be specified for base modules. + template("android_app_bundle_module") { + _is_base_module = defined(invoker.is_base_module) && invoker.is_base_module + + if (_is_base_module) { + assert(!defined(invoker.base_module_target)) + } else { + assert(!defined(invoker.app_as_shared_lib)) + assert(!defined(invoker.shared_resources)) + assert(!defined(invoker.shared_resources_allowlist_target)) + assert(!defined(invoker.shared_resources_allowlist_locales)) + assert(defined(invoker.base_module_target)) + assert(!defined(invoker.bundle_target)) + } + + # android_app_bundle's write_build_config expects module targets to be named + # according to java_target_patterns otherwise it ignores them when listed in + # possible_config_deps. See https://crbug.com/1418398. + if (filter_exclude([ target_name ], [ "*_bundle_module" ]) != []) { + assert(false, + "Invalid android_app_bundle_module target name ($target_name), " + + "must end in _bundle_module.") + } + + # TODO(tiborg): We have several flags that are necessary for workarounds + # that come from the fact that the resources get compiled in the bundle + # module target, but bundle modules have to have certain flags in + # common or bundle modules have to know information about the base module. + # Those flags include version_code, version_name, and base_module_target. + # It would be better to move the resource compile target into the bundle + # target. Doing so would keep the bundle modules independent from the bundle + # and potentially reuse the same bundle modules for multiple bundles. + android_apk_or_module(target_name) { + forward_variables_from( + invoker, + [ + "add_view_trace_events", + "aapt_locale_allowlist", + "additional_jar_files", + "alternative_android_sdk_dep", + "android_manifest", + "android_manifest_dep", + "annotation_processor_deps", + "app_as_shared_lib", + "assert_no_deps", + "base_module_target", + "build_config_include_product_version_resource", + "bundle_target", + "chromium_code", + "custom_assertion_handler", + "data", + "data_deps", + "deps", + "enable_jni_multiplexing", + "enable_multidex", + "expected_android_manifest", + "expected_android_manifest_base", + "expected_android_manifest_library_version_offset", + "expected_android_manifest_version_code_offset", + "generate_buildconfig_java", + "generate_final_jni", + "generate_native_libraries_java", + "input_jars_paths", + "isolated_splits_enabled", + "is_base_module", + "jacoco_never_instrument", + "jar_excluded_patterns", + "javac_args", + "jni_file_exclusions", + "loadable_modules", + "product_config_java_packages", + "main_component_library", + "manifest_package", + "max_sdk_version", + "min_sdk_version", + "mergeable_android_manifests", + "module_name", + "native_lib_placeholders", + "package_id", + "parent_module_target", + "png_to_webp", + "processor_args_javac", + "proguard_configs", + "proguard_enabled", + "proguard_enable_obfuscation", + "resource_exclusion_exceptions", + "resource_exclusion_regex", + "resource_ids_provider_dep", + "resource_values_filter_rules", + "resources_config_paths", + "secondary_abi_loadable_modules", + "secondary_abi_shared_libraries", + "secondary_native_lib_placeholders", + "shared_libraries", + "shared_resources", + "shared_resources_allowlist_locales", + "shared_resources_allowlist_target", + "short_resource_paths", + "srcjar_deps", + "static_library_provider", + "static_library_provider_use_secondary_abi", + "strip_resource_names", + "strip_unused_resources", + "target_sdk_version", + "testonly", + "library_always_compress", + "use_chromium_linker", + "uses_split", + "version_code", + "version_name", + "visibility", + ]) + is_bundle_module = true + generate_buildconfig_java = _is_base_module + if (defined(uses_split)) { + assert(defined(parent_module_target), + "Must set parent_module_target when uses_split is set") + } + } + } + + # Declare an Android instrumentation test runner. + # + # This target creates a wrapper script to run Android instrumentation tests. + # + # Arguments: + # android_test_apk: The target containing the tests. + # + # The following args are optional: + # apk_under_test: The target being tested. + # additional_apks: Additional targets to install on device. + # data: List of runtime data file dependencies. + # data_deps: List of non-linked dependencies. + # deps: List of private dependencies. + # extra_args: Extra arguments set for test runner. + # ignore_all_data_deps: Don't build data_deps and additional_apks. + # modules: Extra dynamic feature modules to install for test target. Can + # only be used if |apk_under_test| is an Android app bundle. + # fake_modules: Similar to |modules| but fake installed instead. + # never_incremental: Disable incremental builds. + # proguard_enabled: Enable proguard + # public_deps: List of public dependencies + # + # Example + # instrumentation_test_runner("foo_test_for_bar") { + # android_test_apk: ":foo" + # apk_under_test: ":bar" + # } + template("instrumentation_test_runner") { + if (use_rts) { + action("${invoker.target_name}__rts_filters") { + script = "//build/add_rts_filters.py" + rts_file = "${root_build_dir}/gen/rts/${invoker.target_name}.filter" + inverted_rts_file = + "${root_build_dir}/gen/rts/${invoker.target_name}_inverted.filter" + args = [ + rebase_path(rts_file, root_build_dir), + rebase_path(inverted_rts_file, root_build_dir), + ] + outputs = [ + rts_file, + inverted_rts_file, + ] + } + } + _incremental_apk = !(defined(invoker.never_incremental) && + invoker.never_incremental) && incremental_install + _apk_operations_target_name = "${target_name}__apk_operations" + _apk_target = invoker.android_test_apk + if (defined(invoker.apk_under_test) && !_incremental_apk) { + # The actual target is defined in the test_runner_script template. + _install_artifacts_json = + "${target_gen_dir}/${target_name}.install_artifacts" + _install_artifacts_target_name = "${target_name}__install_artifacts" + } + + action_with_pydeps(_apk_operations_target_name) { + testonly = true + script = "//build/android/gyp/create_test_apk_wrapper_script.py" + deps = [] + _generated_script = "$root_build_dir/bin/${invoker.target_name}" + outputs = [ _generated_script ] + _apk_build_config = + get_label_info(_apk_target, "target_gen_dir") + "/" + + get_label_info(_apk_target, "name") + ".build_config.json" + _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir) + args = [ + "--script-output-path", + rebase_path(_generated_script, root_build_dir), + "--package-name", + "@FileArg($_rebased_apk_build_config:deps_info:package_name)", + ] + deps += [ "${_apk_target}$build_config_target_suffix" ] + if (_incremental_apk) { + args += [ + "--test-apk-incremental-install-json", + "@FileArg($_rebased_apk_build_config:deps_info:incremental_install_json_path)", + ] + } else { + args += [ + "--test-apk", + "@FileArg($_rebased_apk_build_config:deps_info:apk_path)", + ] + } + if (defined(invoker.proguard_mapping_path) && !_incremental_apk) { + args += [ + "--proguard-mapping-path", + rebase_path(invoker.proguard_mapping_path, root_build_dir), + ] + } + if (defined(invoker.apk_under_test)) { + if (_incremental_apk) { + deps += [ "${invoker.apk_under_test}$build_config_target_suffix" ] + _apk_under_test_build_config = + get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" + + get_label_info(invoker.apk_under_test, "name") + + ".build_config.json" + _rebased_apk_under_test_build_config = + rebase_path(_apk_under_test_build_config, root_build_dir) + _apk_under_test = "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path)" + } else { + deps += [ ":${_install_artifacts_target_name}" ] + _rebased_install_artifacts_json = + rebase_path(_install_artifacts_json, root_build_dir) + _apk_under_test = "@FileArg($_rebased_install_artifacts_json[])" + } + args += [ + "--additional-apk", + _apk_under_test, + ] + } + if (defined(invoker.additional_apks)) { + foreach(additional_apk, invoker.additional_apks) { + deps += [ "$additional_apk$build_config_target_suffix" ] + _build_config = + get_label_info(additional_apk, "target_gen_dir") + "/" + + get_label_info(additional_apk, "name") + ".build_config.json" + _rebased_build_config = rebase_path(_build_config, root_build_dir) + args += [ + "--additional-apk", + "@FileArg($_rebased_build_config:deps_info:apk_path)", + ] + } + deps += invoker.additional_apks + } + } + test_runner_script(target_name) { + forward_variables_from(invoker, + [ + "additional_apks", + "additional_locales", + "apk_under_test", + "data", + "data_deps", + "deps", + "extra_args", + "fake_modules", + "ignore_all_data_deps", + "is_unit_test", + "modules", + "proguard_mapping_path", + "use_webview_provider", + ]) + test_name = invoker.target_name + test_type = "instrumentation" + apk_target = invoker.android_test_apk + incremental_apk = _incremental_apk + + public_deps = [ + ":$_apk_operations_target_name", + apk_target, + ] + if (defined(invoker.apk_under_test)) { + public_deps += [ invoker.apk_under_test ] + } + if (defined(invoker.additional_apks)) { + public_deps += invoker.additional_apks + } + if (use_rts) { + if (!defined(data_deps)) { + data_deps = [] + } + data_deps += [ ":${invoker.target_name}__rts_filters" ] + } + } + } + + # Declare an Android instrumentation test apk + # + # This target creates an Android instrumentation test apk. + # + # Supports all variables of android_apk(), plus: + # apk_under_test: The apk being tested (optional). + # + # Example + # android_test_apk("foo_test_apk") { + # android_manifest = "AndroidManifest.xml" + # apk_name = "FooTest" + # apk_under_test = "Foo" + # sources = [ + # "android/org/chromium/foo/FooTestCase.java", + # "android/org/chromium/foo/FooExampleTest.java", + # ] + # deps = [ + # ":foo_test_support_java" + # ] + # } + template("android_test_apk") { + android_apk(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + testonly = true + + # The size info enables the test_runner to find the source file location + # of a test after it is ran. + include_size_info = true + data = [ "$root_build_dir/size-info/${invoker.apk_name}.apk.jar.info" ] + if (defined(invoker.data)) { + data += invoker.data + } + + deps = [ "//testing/android/broker:broker_java" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + data_deps = [ + # Ensure unstripped libraries are included in runtime deps so that + # symbolization can be done. + ":${target_name}__secondary_abi_shared_library_list", + ":${target_name}__shared_library_list", + ] + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + if (defined(invoker.apk_under_test)) { + data_deps += [ invoker.apk_under_test ] + } else { + enable_native_mocks = true + } + + if (defined(invoker.apk_under_test)) { + _under_test_label = + get_label_info(invoker.apk_under_test, "label_no_toolchain") + data_deps += [ + "${_under_test_label}__secondary_abi_shared_library_list", + "${_under_test_label}__shared_library_list", + ] + } + + if (defined(invoker.additional_apks)) { + data_deps += invoker.additional_apks + } + if (defined(invoker.use_webview_provider)) { + data_deps += [ invoker.use_webview_provider ] + } + + if (defined(invoker.proguard_enabled) && invoker.proguard_enabled && + !incremental_install) { + # When ProGuard is on, we use ProGuard to combine the under test java + # code and the test java code. This is to allow us to apply all ProGuard + # optimizations that we ship with, but not have them break tests. The + # apk under test will still have the same resources, assets, and + # manifest, all of which are the ones used in the tests. + proguard_configs = [ "//testing/android/proguard_for_test.flags" ] + if (defined(invoker.proguard_configs)) { + proguard_configs += invoker.proguard_configs + } + enable_proguard_checks = false + if (defined(invoker.final_apk_path)) { + _final_apk_path = final_apk_path + } else { + _final_apk_path = "$root_build_dir/apks/${invoker.apk_name}.apk" + } + data += [ "$_final_apk_path.mapping" ] + } + + create_apk_script = false + + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "data", + "data_deps", + "deps", + "extra_args", + "is_unit_test", + "proguard_configs", + ]) + } + } + + # Declare an Android instrumentation test apk with wrapper script. + # + # This target creates an Android instrumentation test apk with wrapper script + # to run the test. + # + # Supports all variables of android_test_apk. + template("instrumentation_test_apk") { + assert(defined(invoker.apk_name)) + _apk_target_name = "${target_name}__test_apk" + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + android_test_apk(_apk_target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + } + instrumentation_test_runner(target_name) { + forward_variables_from(invoker, + [ + "additional_apks", + "apk_under_test", + "data", + "data_deps", + "deps", + "extra_args", + "ignore_all_data_deps", + "is_unit_test", + "modules", + "never_incremental", + "public_deps", + "use_webview_provider", + ]) + android_test_apk = ":${_apk_target_name}" + if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) { + proguard_mapping_path = + "$root_build_dir/apks/${invoker.apk_name}.apk.mapping" + } + } + } + + # Declare an Android gtest apk + # + # This target creates an Android apk for running gtest-based unittests. + # + # Variables + # deps: Specifies the dependencies of this target. These will be passed to + # the underlying android_apk invocation and should include the java and + # resource dependencies of the apk. + # shared_library: shared_library target that contains the unit tests. + # apk_name: The name of the produced apk. If unspecified, it uses the name + # of the shared_library target suffixed with "_apk". + # use_default_launcher: Whether the default activity (NativeUnitTestActivity) + # should be used for launching tests. + # allow_cleartext_traffic: (Optional) Whether to allow cleartext network + # requests during the test. + # use_native_activity: Test implements ANativeActivity_onCreate(). + # + # Example + # unittest_apk("foo_unittests_apk") { + # deps = [ ":foo_java", ":foo_resources" ] + # shared_library = ":foo_unittests" + # } + template("unittest_apk") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + _use_native_activity = + defined(invoker.use_native_activity) && invoker.use_native_activity + _android_manifest = "$target_gen_dir/$target_name/AndroidManifest.xml" + assert(invoker.shared_library != "") + + # This trivial assert is needed in case android_manifest is defined, + # as otherwise _use_native_activity and _android_manifest would not be used. + assert(_use_native_activity != "" && _android_manifest != "") + + if (!defined(invoker.android_manifest)) { + _allow_cleartext_traffic = defined(invoker.allow_cleartext_traffic) && + invoker.allow_cleartext_traffic + jinja_template("${target_name}_manifest") { + _native_library_name = get_label_info(invoker.shared_library, "name") + if (defined(invoker.android_manifest_template)) { + input = invoker.android_manifest_template + } else { + input = + "//testing/android/native_test/java/AndroidManifest.xml.jinja2" + } + output = _android_manifest + variables = [ + "is_component_build=${is_component_build}", + "native_library_name=${_native_library_name}", + "use_native_activity=${_use_native_activity}", + "allow_cleartext_traffic=${_allow_cleartext_traffic}", + ] + } + } + + android_apk(target_name) { + data_deps = [] + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + testonly = true + create_apk_script = false + enable_native_mocks = true + + # TODO(crbug.com/1099849): Figure out why angle tests fail to launch + # with newer target_sdk_version. + if (!defined(invoker.target_sdk_version) && _use_native_activity) { + target_sdk_version = 24 + } + + assert(!defined(invoker.proguard_enabled) || !invoker.proguard_enabled || + invoker.proguard_configs != []) + + if (!defined(apk_name)) { + apk_name = get_label_info(invoker.shared_library, "name") + } + + if (!defined(android_manifest)) { + android_manifest_dep = ":${target_name}_manifest" + android_manifest = _android_manifest + } + + final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk" + + if (!defined(use_default_launcher) || use_default_launcher) { + deps += [ + "//build/android/gtest_apk:native_test_instrumentation_test_runner_java", + "//testing/android/native_test:native_test_java", + ] + } + shared_libraries = [ invoker.shared_library ] + deps += [ + ":${target_name}__secondary_abi_shared_library_list", + ":${target_name}__shared_library_list", + ] + } + } + + # Generate .java files from .aidl files. + # + # This target will store the .java files in a srcjar and should be included in + # an android_library or android_apk's srcjar_deps. + # + # Variables + # sources: Paths to .aidl files to compile. + # import_include: Path to directory containing .java files imported by the + # .aidl files. + # interface_file: Preprocessed aidl file to import. + # + # Example + # android_aidl("foo_aidl") { + # import_include = "java/src" + # sources = [ + # "java/src/com/foo/bar/FooBarService.aidl", + # "java/src/com/foo/bar/FooBarServiceCallback.aidl", + # ] + # } + template("android_aidl") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + script = "//build/android/gyp/aidl.py" + depfile = "$target_gen_dir/$target_name.d" + sources = invoker.sources + + _srcjar_path = "${target_gen_dir}/${target_name}.srcjar" + _aidl_path = "${android_sdk_build_tools}/aidl" + _framework_aidl = "$android_sdk/framework.aidl" + _imports = [ _framework_aidl ] + if (defined(invoker.interface_file)) { + assert(invoker.interface_file != "") + _imports += [ invoker.interface_file ] + } + + inputs = [ _aidl_path ] + _imports + + outputs = [ _srcjar_path ] + _rebased_imports = rebase_path(_imports, root_build_dir) + args = [ + "--aidl-path", + rebase_path(_aidl_path, root_build_dir), + "--imports=$_rebased_imports", + "--srcjar", + rebase_path(_srcjar_path, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + ] + if (defined(invoker.import_include) && invoker.import_include != []) { + _rebased_import_paths = [] + foreach(_import_path, invoker.import_include) { + _rebased_import_path = [] + _rebased_import_path = [ rebase_path(_import_path, root_build_dir) ] + _rebased_import_paths += _rebased_import_path + } + args += [ "--includes=$_rebased_import_paths" ] + } + args += rebase_path(sources, root_build_dir) + } + } + + # Compile a protocol buffer to java. + # + # This generates java files from protocol buffers and creates an Android library + # containing the classes. + # + # Variables + # sources (required) + # Paths to .proto files to compile. + # + # proto_path (required) + # Root directory of .proto files. + # + # deps (optional) + # Additional dependencies. Passed through to both the action and the + # android_library targets. + # + # import_dirs (optional) + # A list of extra import directories to be passed to protoc compiler. + # WARNING: This circumvents proto checkdeps, and should only be used + # when needed, typically when proto files cannot cleanly import through + # absolute paths, such as for third_party or generated .proto files. + # http://crbug.com/691451 tracks fixing this. + # + # generator_plugin_label (optional) + # GN label for plugin executable which generates custom cc stubs. + # Don't specify a toolchain, host toolchain is assumed. + # + # Example: + # proto_java_library("foo_proto_java") { + # proto_path = "src/foo" + # sources = [ "$proto_path/foo.proto" ] + # } + template("proto_java_library") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _template_name = target_name + + action_with_pydeps("${_template_name}__protoc_java") { + # The suffix "__protoc_java.srcjar" is used by SuperSize to identify + # protobuf symbols. + _srcjar_path = "$target_gen_dir/$target_name.srcjar" + script = "//build/protoc_java.py" + + if (defined(invoker.deps)) { + # Need to care only about targets that might generate .proto files. + # No need to depend on java_library or android_resource targets. + deps = filter_exclude(invoker.deps, java_target_patterns) + } + + sources = invoker.sources + depfile = "$target_gen_dir/$target_name.d" + outputs = [ _srcjar_path ] + args = [ + "--depfile", + rebase_path(depfile, root_build_dir), + "--protoc", + rebase_path(android_protoc_bin, root_build_dir), + "--proto-path", + rebase_path(invoker.proto_path, root_build_dir), + "--srcjar", + rebase_path(_srcjar_path, root_build_dir), + ] + + if (defined(invoker.generator_plugin_label)) { + if (host_os == "win") { + _host_executable_suffix = ".exe" + } else { + _host_executable_suffix = "" + } + + _plugin_host_label = + invoker.generator_plugin_label + "($host_toolchain)" + _plugin_path = + get_label_info(_plugin_host_label, "root_out_dir") + "/" + + get_label_info(_plugin_host_label, "name") + _host_executable_suffix + args += [ + "--plugin", + rebase_path(_plugin_path, root_build_dir), + ] + deps += [ _plugin_host_label ] + inputs = [ _plugin_path ] + } + + args += rebase_path(sources, root_build_dir) + + if (defined(invoker.import_dirs)) { + foreach(_import_dir, invoker.import_dirs) { + args += [ + "--import-dir", + rebase_path(_import_dir, root_build_dir), + ] + } + } + } + + android_library(target_name) { + chromium_code = false + sources = [] + srcjar_deps = [ ":${_template_name}__protoc_java" ] + deps = [ "//third_party/android_deps:protobuf_lite_runtime_java" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + } + } + + # Compile a flatbuffer to java. + # + # This generates java files from flat buffers and creates an Android library + # containing the classes. + # + # Variables + # sources (required) + # Paths to .fbs files to compile. + # + # root_dir (required) + # Root directory of .fbs files. + # + # deps (optional) + # Additional dependencies. Passed through to both the action and the + # android_library targets. + # + # flatc_include_dirs (optional) + # A list of extra import directories to be passed to flatc compiler. + # + # + # Example: + # flatbuffer_java_library("foo_flatbuffer_java") { + # root_dir = "src/foo" + # sources = [ "$proto_path/foo.fbs" ] + # } + template("flatbuffer_java_library") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + _template_name = target_name + _flatc_dep = "//third_party/flatbuffers:flatc($host_toolchain)" + _flatc_out_dir = get_label_info(_flatc_dep, "root_out_dir") + _flatc_bin = "$_flatc_out_dir/flatc" + + action_with_pydeps("${_template_name}__flatc_java") { + _srcjar_path = "$target_gen_dir/$target_name.srcjar" + script = "//build/android/gyp/flatc_java.py" + + deps = [ _flatc_dep ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + inputs = [ _flatc_bin ] + + sources = invoker.sources + outputs = [ _srcjar_path ] + args = [ + "--flatc", + rebase_path(_flatc_bin, root_build_dir), + "--import-dir", + rebase_path(invoker.root_dir, root_build_dir), + "--srcjar", + rebase_path(_srcjar_path, root_build_dir), + ] + rebase_path(sources, root_build_dir) + + if (defined(invoker.flatc_include_dirs)) { + foreach(_include_dir, invoker.flatc_include_dirs) { + args += [ + "--import-dir", + rebase_path(_include_dir, root_build_dir), + ] + } + } + } + + android_library(target_name) { + chromium_code = false + sources = [] + srcjar_deps = [ ":${_template_name}__flatc_java" ] + deps = [ "//third_party/flatbuffers:flatbuffers_java" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + } + } + + # Declare an Android library target for a prebuilt AAR. + # + # This target creates an Android library containing java code and Android + # resources. For libraries without resources, it will not generate + # corresponding android_resources targets. + # + # To avoid slowing down "gn gen", an associated .info file must be committed + # along with the .aar file. In order to create this file, define the target + # and then run once with the gn arg "update_android_aar_prebuilts = true". + # + # Variables + # aar_path: Path to the AAR. + # info_path: Path to the .aar.info file (generated via + # update_android_aar_prebuilts GN arg). + # proguard_configs: List of proguard configs to use in final apk step for + # any apk that depends on this library. + # ignore_aidl: Whether to ignore .aidl files found with the .aar. + # ignore_assets: Whether to ignore assets found in the .aar. + # ignore_manifest: Whether to ignore creating manifest. + # ignore_native_libraries: Whether to ignore .so files found in the .aar. + # See also extract_native_libraries. + # ignore_proguard_configs: Whether to ignore proguard configs. + # strip_resources: Whether to ignore android resources found in the .aar. + # custom_package: Java package for generated R.java files. + # extract_native_libraries: Whether to extract .so files found in the .aar. + # If the file contains .so, either extract_native_libraries or + # ignore_native_libraries must be set. + # TODO(jbudorick@): remove this arguments after crbug.com/522043 is fixed. + # requires_android: Whether this target can only be used for compiling + # Android related targets. + # + # Example + # android_aar_prebuilt("foo_java") { + # aar_path = "foo.aar" + # } + template("android_aar_prebuilt") { + _info_path = "$target_name.info" + if (defined(invoker.info_path)) { + _info_path = invoker.info_path + } + _output_path = "${target_out_dir}/${target_name}" + + # Some targets only differ by _java with other targets so _java and _junit + # need to be replaced by non-empty strings to avoid duplicate targets. (e.g. + # androidx_window_window_java vs androidx_window_window_java_java). + _target_name_without_java_or_junit = + string_replace(string_replace(target_name, "_java", "_J"), + "_junit", + "_U") + + # This unpack target is a python action, not a valid java target. Since the + # java targets below depend on it, its name must not match the java patterns + # in internal_rules.gni. + _unpack_target_name = "${_target_name_without_java_or_junit}__unpack_aar" + _ignore_aidl = defined(invoker.ignore_aidl) && invoker.ignore_aidl + _ignore_assets = defined(invoker.ignore_assets) && invoker.ignore_assets + _ignore_manifest = + defined(invoker.ignore_manifest) && invoker.ignore_manifest + _ignore_native_libraries = defined(invoker.ignore_native_libraries) && + invoker.ignore_native_libraries + _ignore_proguard_configs = defined(invoker.ignore_proguard_configs) && + invoker.ignore_proguard_configs + _extract_native_libraries = defined(invoker.extract_native_libraries) && + invoker.extract_native_libraries + _strip_resources = + defined(invoker.strip_resources) && invoker.strip_resources + + # Allow 'resource_overlay' parameter even if there are no resources in order + # to keep the logic for generated 'android_aar_prebuilt' rules simple. + not_needed(invoker, [ "resource_overlay" ]) + + _aar_common_args = [ rebase_path(invoker.aar_path, root_build_dir) ] + if (_strip_resources) { + _aar_common_args += [ "--ignore-resources" ] + } + if (defined(invoker.resource_exclusion_globs)) { + _aar_common_args += + [ "--resource-exclusion-globs=${invoker.resource_exclusion_globs}" ] + } + + # Scan the AAR file and determine the resources and jar files. + # Some libraries might not have resources; others might have two jars. + if (update_android_aar_prebuilts) { + print("Writing " + rebase_path(_info_path, "//")) + exec_script("//build/android/gyp/aar.py", + [ + "list", + "--output", + rebase_path(_info_path, root_build_dir), + ] + _aar_common_args) + } + + # If "gn gen" is failing on the following line, you need to generate an + # .info file for your new target by running: + # gn gen --args='target_os="android" update_android_aar_prebuilts=true' out/tmp + # rm -r out/tmp + _scanned_files = read_file(_info_path, "scope") + + _use_scanned_assets = !_ignore_assets && _scanned_files.assets != [] + _has_resources = _scanned_files.resources != [] + + assert(_ignore_aidl || _scanned_files.aidl == [], + "android_aar_prebuilt() aidl not yet supported." + + " Implement or use ignore_aidl = true." + + " http://crbug.com/644439") + assert( + !_scanned_files.has_native_libraries || + (_ignore_native_libraries || _extract_native_libraries), + "android_aar_prebuilt() contains .so files." + + " Please set ignore_native_libraries or extract_native_libraries.") + assert( + !(_ignore_native_libraries && _extract_native_libraries), + "ignore_native_libraries and extract_native_libraries cannot both be set.") + assert(!_scanned_files.has_native_libraries || + _scanned_files.native_libraries != []) + assert(_scanned_files.has_classes_jar || _scanned_files.subjars == []) + + action_with_pydeps(_unpack_target_name) { + script = "//build/android/gyp/aar.py" # Unzips the AAR + args = [ + "extract", + "--output-dir", + rebase_path(_output_path, root_build_dir), + "--assert-info-file", + rebase_path(_info_path, root_build_dir), + ] + _aar_common_args + inputs = [ invoker.aar_path ] + outputs = [ "${_output_path}/AndroidManifest.xml" ] + outputs += + get_path_info(rebase_path(_scanned_files.resources, "", _output_path), + "abspath") + if (_scanned_files.has_r_text_file) { + # Certain packages, in particular Play Services have no R.txt even + # though its presence is mandated by AAR spec. Such packages cause + # spurious rebuilds if this output is specified unconditionally. + outputs += [ "${_output_path}/R.txt" ] + } + + if (_scanned_files.has_classes_jar) { + outputs += [ "${_output_path}/classes.jar" ] + } + outputs += + get_path_info(rebase_path(_scanned_files.subjars, "", _output_path), + "abspath") + if (!_ignore_proguard_configs) { + if (_scanned_files.has_proguard_flags) { + outputs += [ "${_output_path}/proguard.txt" ] + } + } + + if (_extract_native_libraries && _scanned_files.has_native_libraries) { + outputs += get_path_info( + rebase_path(_scanned_files.native_libraries, "", _output_path), + "abspath") + } + if (_use_scanned_assets) { + outputs += + get_path_info(rebase_path(_scanned_files.assets, "", _output_path), + "abspath") + } + } + + _should_process_manifest = + !_ignore_manifest && !_scanned_files.is_manifest_empty + + # Create the android_resources target for resources. + if (_has_resources || _should_process_manifest) { + _res_target_name = "${target_name}__resources" + android_resources(_res_target_name) { + forward_variables_from(invoker, + [ + "custom_package", + "resource_overlay", + "testonly", + "strip_drawables", + ]) + public_deps = [ ":$_unpack_target_name" ] + if (_should_process_manifest) { + android_manifest_dep = ":$_unpack_target_name" + android_manifest = "${_output_path}/AndroidManifest.xml" + } else if (defined(_scanned_files.manifest_package) && + !defined(custom_package)) { + custom_package = _scanned_files.manifest_package + } + + sources = rebase_path(_scanned_files.resources, "", _output_path) + if (_scanned_files.has_r_text_file) { + r_text_file = "${_output_path}/R.txt" + } + } + } else if (defined(invoker.strip_drawables)) { + not_needed(invoker, [ "strip_drawables" ]) + } + + if (_ignore_manifest) { + # Having this available can be useful for DFMs that depend on AARs. It + # provides a way to have manifest entries go into the base split while + # the code goes into a DFM. + java_group("${target_name}__ignored_manifest") { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + deps = [ ":$_unpack_target_name" ] + mergeable_android_manifests = [ "${_output_path}/AndroidManifest.xml" ] + } + } + + # Create the android_assets target for assets + if (_use_scanned_assets) { + _assets_target_name = "${target_name}__assets" + android_assets(_assets_target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + deps = [ ":$_unpack_target_name" ] + renaming_sources = [] + renaming_destinations = [] + foreach(_asset_file, _scanned_files.assets) { + _original_path = + get_path_info(rebase_path(_asset_file, "", _output_path), + "abspath") + _updated_path = string_replace(_asset_file, "assets/", "", 1) + renaming_sources += [ _original_path ] + renaming_destinations += [ _updated_path ] + } + } + } + + _target_label = get_label_info(":$target_name", "label_no_toolchain") + + # Create android_java_prebuilt target for classes.jar. + if (_scanned_files.has_classes_jar) { + _java_library_vars = [ + "alternative_android_sdk_dep", + "bytecode_rewriter_target", + "enable_bytecode_checks", + "jar_excluded_patterns", + "jar_included_patterns", + "missing_classes_allowlist", + "requires_android", + "testonly", + ] + + # Create android_java_prebuilt target for extra jars within jars/. + _subjar_targets = [] + foreach(_tuple, _scanned_files.subjar_tuples) { + _current_target = "${target_name}__subjar_${_tuple[0]}" + _subjar_targets += [ ":$_current_target" ] + java_prebuilt(_current_target) { + forward_variables_from(invoker, _java_library_vars) + deps = [ ":$_unpack_target_name" ] + if (!defined(requires_android)) { + requires_android = true + } + supports_android = true + jar_path = "$_output_path/${_tuple[1]}" + _base_output_name = get_path_info(jar_path, "name") + output_name = "${invoker.target_name}-$_base_output_name" + public_target_label = _target_label + } + } + + _jar_target_name = "${target_name}__classes" + java_prebuilt(_jar_target_name) { + forward_variables_from(invoker, _java_library_vars) + forward_variables_from(invoker, + [ + "deps", + "input_jars_paths", + "mergeable_android_manifests", + "proguard_configs", + ]) + if (!defined(deps)) { + deps = [] + } + deps += _subjar_targets + [ ":$_unpack_target_name" ] + if (defined(_res_target_name)) { + deps += [ ":$_res_target_name" ] + } + if (!defined(requires_android)) { + requires_android = true + } + supports_android = true + jar_path = "$_output_path/classes.jar" + aar_path = invoker.aar_path + output_name = invoker.target_name + + if (!_ignore_proguard_configs) { + if (!defined(proguard_configs)) { + proguard_configs = [] + } + if (_scanned_files.has_proguard_flags) { + proguard_configs += [ "$_output_path/proguard.txt" ] + } + } + public_target_label = _target_label + } + } + + java_group(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + public_deps = [ ":$_unpack_target_name" ] + if (defined(invoker.public_deps)) { + public_deps += invoker.public_deps + } + deps = [] + if (defined(_jar_target_name)) { + deps += [ ":$_jar_target_name" ] + + # Although subjars are meant to be private, we add them as deps here + # because in practice they seem to contain classes required to be in the + # classpath. + deps += _subjar_targets + } + if (defined(_res_target_name)) { + deps += [ ":$_res_target_name" ] + } + if (defined(_assets_target_name)) { + deps += [ ":$_assets_target_name" ] + } + } + } + + # Create an Android application bundle from one base android_apk target, + # and zero or more associated android_apk. + # + # Variables: + # base_module_target: Name of the android_app_bundle_module target + # corresponding to the base module for this application bundle. The + # bundle file will include the same content in its base module, though in + # a slightly different format. + # + # bundle_base_path: Optional. If set, the bundle will be output to this + # directory. Defaults to "$root_build_dir/apks". + # + # bundle_name: Optional. If set, the bundle will be output to the + # filename "${bundle_name}.aab". + # + # extra_modules: Optional list of scopes, one per extra module used by + # this bundle. Each scope must have a 'name' field that specifies the + # module name (which cannot be 'base', since this is reserved for the + # base module), and an 'apk_target' field that specified the + # corresponding android_apk target name the module is modeled on. + # + # enable_language_splits: Optional. If true, enable APK splits based + # on languages. + # + # keystore_path: optional keystore path, used only when generating APKs. + # keystore_name: optional keystore name, used only when generating APKs. + # keystore_password: optional keystore password, used only when + # generating APKs. + # rotation_config: optional .textproto to enable key rotation. + # + # command_line_flags_file: Optional. If provided, named of the on-device + # file that will be used to store command-line arguments. The default + # is 'command_line_flags_file', but this is typically redefined to + # something more specific for certain bundles (e.g. the Chromium based + # APKs use 'chrome-command-line', the WebView one uses + # 'webview-command-line'). + # + # proguard_enabled: Optional. True if proguarding is enabled for this + # bundle. Default is to enable this only for release builds. Note that + # this will always perform synchronized proguarding. + # + # proguard_enable_obfuscation: Whether to enable obfuscation (default=true) + # + # enable_multidex: Optional. Enable multidexing of optimized modules jars + # when using synchronized proguarding. Only applies to base module. + # + # proguard_android_sdk_dep: Optional. android_system_java_prebuilt() target + # used as a library jar for synchronized proguarding. + # + # compress_shared_libraries: Optional. Whether to compress shared libraries + # such that they are extracted upon install. + # + # system_image_locale_allowlist: List of locales that should be included + # on system APKs generated from this bundle. + # + # static_library_provider: Specifies a single target that this target will + # use as a static library APK. + # Additionally, when allotting libraries to be packaged into modules, the + # libraries packaged into the static library will be accounted for to + # avoid library duplication. Effectively, the static library will be + # treated as the parent of the base module. + # + # expected_libs_and_assets: Verify the list of included native libraries + # and assets is consistent with the given expectation file. + # expected_libs_and_assets_base: Treat expected_libs_and_assets as a diff + # with this file as the base. + # expected_proguard_config: Checks that the merged set of proguard flags + # matches the given config. + # expected_proguard_config_base: Treat expected_proguard_config as a diff + # with this file as the base. + # + # version_code: Optional. Version code of the target. + # + # is_multi_abi: If true will add a library placeholder for the missing ABI + # if either the primary or the secondary ABI has no native libraries set. + # + # default_modules_for_testing: (optional): A list of DFM that the wrapper + # script should install. This is for local testing only, and does not + # affect the actual DFM in production. + # + # add_view_trace_events: (optional): If true will add an additional step to + # add trace events to all Android views contained in the bundle. It also + # requires build argument enable_trace_event_bytecode_rewriting = true. + # + # Example: + # android_app_bundle("chrome_public_bundle") { + # base_module_target = "//chrome/android:chrome_public_apk" + # extra_modules = [ + # { # NOTE: Scopes require one field per line, and no comma separators. + # name = "my_module" + # module_target = ":my_module" + # }, + # ] + # } + # + template("android_app_bundle") { + _target_name = target_name + _uses_static_library = defined(invoker.static_library_provider) + _proguard_enabled = + defined(invoker.proguard_enabled) && invoker.proguard_enabled + + _min_sdk_version = default_min_sdk_version + if (defined(invoker.min_sdk_version)) { + _min_sdk_version = invoker.min_sdk_version + } + + _bundle_base_path = "$root_build_dir/apks" + if (defined(invoker.bundle_base_path)) { + _bundle_base_path = invoker.bundle_base_path + } + + _bundle_name = _target_name + if (defined(invoker.bundle_name)) { + _bundle_name = invoker.bundle_name + } + _bundle_path = "$_bundle_base_path/${_bundle_name}.aab" + _rebased_bundle_path = rebase_path(_bundle_path, root_build_dir) + + _base_target_name = get_label_info(invoker.base_module_target, "name") + _base_target_gen_dir = + get_label_info(invoker.base_module_target, "target_gen_dir") + _base_module_build_config = + "$_base_target_gen_dir/${_base_target_name}.build_config.json" + _base_module_build_config_target = + "${invoker.base_module_target}$build_config_target_suffix" + _rebased_base_module_build_config = + rebase_path(_base_module_build_config, root_build_dir) + + _modules = [ + { + name = "base" + module_target = invoker.base_module_target + build_config = _base_module_build_config + build_config_target = _base_module_build_config_target + if (_uses_static_library) { + parent = "lib" + } + }, + ] + + if (_proguard_enabled) { + _dex_target = "${_target_name}__dex" + _proguard_mapping_path = "${_bundle_path}.mapping" + } + + assert(_proguard_enabled || !defined(invoker.enable_multidex), + "Bundle only adds dexing step if proguarding is enabled.") + + if (defined(invoker.extra_modules)) { + _module_count = 0 + not_needed([ "_module_count" ]) + + foreach(_module, invoker.extra_modules) { + _module_count += 1 + assert(defined(_module.name), + "Missing 'name' field for extra module #${_module_count}.") + assert(_module.name != "base", + "Module name 'base' is reserved for the main bundle module") + assert( + defined(_module.module_target), + "Missing 'module_target' field for extra module ${_module.name}.") + _module_target = _module.module_target + _module_target_name = get_label_info(_module_target, "name") + _module_target_gen_dir = + get_label_info(_module_target, "target_gen_dir") + _module.build_config = + "$_module_target_gen_dir/${_module_target_name}.build_config.json" + _module.build_config_target = + "$_module_target$build_config_target_suffix" + _module.parent = "base" + _modules += [ _module ] + } + } + + # Make build config, which is required for synchronized proguarding. + _module_java_targets = [] + _module_build_configs = [] + _module_targets = [] + foreach(_module, _modules) { + _module_targets += [ _module.module_target ] + _module_java_targets += [ "${_module.module_target}__java" ] + _module_build_configs += [ _module.build_config ] + } + + if (_uses_static_library) { + _lib_proxy_module = { + name = "lib" + } + _static_library_target_name = + get_label_info(invoker.static_library_provider, "name") + _static_library_gen_dir = + get_label_info(invoker.static_library_provider, "target_gen_dir") + _lib_proxy_module.build_config = "$_static_library_gen_dir/$_static_library_target_name.build_config.json" + _lib_proxy_module.build_config_target = + "${invoker.static_library_provider}$build_config_target_suffix" + } + + # Allot native libraries to modules they should be packaged into. This is + # necessary since all libraries that are depended on by multiple modules + # have to go into base or the static shared library if it exists. + # TODO(crbug.com/1021565): It would be nice if this lived outside the + # android_app_bundle template and the static shared library would pull in + # the libs as allotted by this step. + _native_libraries_config = + "$target_gen_dir/$_target_name.native_libraries_config" + _native_libraries_config_target = "${_target_name}__allot_native_libraries" + allot_native_libraries(_native_libraries_config_target) { + modules = _modules + native_libraries_filearg_keys = [ + "native:libraries", + "native:loadable_modules", + ] + output = _native_libraries_config + if (_uses_static_library) { + modules += [ _lib_proxy_module ] + } + } + if (defined(android_app_secondary_abi)) { + _secondary_abi_native_libraries_config = + "$target_gen_dir/$_target_name.secondary_abi_native_libraries_config" + _secondary_abi_native_libraries_config_target = + "${_target_name}__allot_secondary_abi_native_libraries" + allot_native_libraries(_secondary_abi_native_libraries_config_target) { + modules = _modules + native_libraries_filearg_keys = [ + "native:secondary_abi_libraries", + "native:secondary_abi_loadable_modules", + ] + output = _secondary_abi_native_libraries_config + if (_uses_static_library) { + modules += [ _lib_proxy_module ] + } + } + } + + # Used to expose the module Java targets of the bundle. + group("${_target_name}__java") { + deps = _module_java_targets + } + group("${_target_name}__compile_resources") { + deps = [ "${invoker.base_module_target}__compile_resources" ] + } + + _build_config = "$target_gen_dir/${_target_name}.build_config.json" + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _build_config_target = "$_target_name$build_config_target_suffix" + if (defined(invoker.proguard_android_sdk_dep)) { + _android_sdk_dep = invoker.proguard_android_sdk_dep + } else { + _android_sdk_dep = default_android_sdk_dep + } + + if (_proguard_enabled) { + _proguard_mapping_path = "${_bundle_path}.mapping" + _add_view_trace_events = + defined(invoker.add_view_trace_events) && + invoker.add_view_trace_events && enable_trace_event_bytecode_rewriting + } else { + not_needed(invoker, [ "add_view_trace_events" ]) + } + + write_build_config(_build_config_target) { + type = "android_app_bundle" + possible_config_deps = _module_targets + [ _android_sdk_dep ] + build_config = _build_config + proguard_enabled = _proguard_enabled + module_build_configs = _module_build_configs + modules = _modules + + if (_proguard_enabled) { + add_view_trace_events = _add_view_trace_events + proguard_mapping_path = _proguard_mapping_path + } + } + + if (_proguard_enabled) { + if (_add_view_trace_events) { + _trace_event_rewriter_target = + "//build/android/bytecode:trace_event_adder" + _rewritten_jar_target_name = "${target_name}__trace_event_rewritten" + _rewriter_path = root_build_dir + "/bin/helper/trace_event_adder" + _stamp = "${target_out_dir}/${target_name}.trace_event_rewrite.stamp" + action_with_pydeps(_rewritten_jar_target_name) { + script = "//build/android/gyp/trace_event_bytecode_rewriter.py" + inputs = [ + _rewriter_path, + _build_config, + ] + outputs = [ _stamp ] + depfile = "$target_gen_dir/$_rewritten_jar_target_name.d" + args = [ + "--stamp", + rebase_path(_stamp, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + "--script", + rebase_path(_rewriter_path, root_build_dir), + "--classpath", + "@FileArg($_rebased_build_config:deps_info:javac_full_classpath)", + "--classpath", + "@FileArg($_rebased_build_config:android:sdk_jars)", + "--input-jars", + "@FileArg($_rebased_build_config:deps_info:device_classpath)", + "--output-jars", + "@FileArg($_rebased_build_config:deps_info:trace_event_rewritten_device_classpath)", + ] + deps = [ + ":$_build_config_target", + _trace_event_rewriter_target, + ] + _module_java_targets + } + } + + dex(_dex_target) { + forward_variables_from(invoker, + [ + "custom_assertion_handler", + "expected_proguard_config", + "expected_proguard_config_base", + "proguard_enable_obfuscation", + ]) + if (defined(expected_proguard_config)) { + top_target_name = _target_name + } + min_sdk_version = _min_sdk_version + add_view_trace_events = _add_view_trace_events + proguard_enabled = true + proguard_mapping_path = _proguard_mapping_path + build_config = _build_config + + deps = _module_java_targets + [ ":$_build_config_target" ] + if (_add_view_trace_events) { + deps += [ ":${_rewritten_jar_target_name}" ] + } + modules = _modules + } + } + + _all_create_module_targets = [] + _all_module_zip_paths = [] + _all_module_build_configs = [] + _all_module_unused_resources_deps = [] + foreach(_module, _modules) { + _module_target = _module.module_target + _module_build_config = _module.build_config + _module_build_config_target = _module.build_config_target + _module_target_name = get_label_info(_module_target, "name") + + if (!_proguard_enabled) { + _dex_target = "${_module_target_name}__final_dex" + _dex_path = "$target_out_dir/$_module_target_name/$_module_target_name.mergeddex.jar" + dex(_dex_target) { + forward_variables_from(invoker, [ "custom_assertion_handler" ]) + min_sdk_version = _min_sdk_version + output = _dex_path + build_config = _build_config + + # This will be a pure dex-merge. + input_dex_filearg = "@FileArg($_rebased_build_config:modules:${_module.name}:all_dex_files)" + enable_desugar = false + + deps = [ + ":$_build_config_target", + ":${_module_target_name}__java", + ] + } + } + _dex_target_for_module = ":$_dex_target" + + _use_baseline_profile = + _proguard_enabled && defined(invoker.baseline_profile_path) && + enable_baseline_profiles + if (_use_baseline_profile) { + _binary_profile_target = + "${_module_target_name}__binary_baseline_profile" + _binary_baseline_profile_path = "$target_out_dir/$_module_target_name/$_module_target_name.baseline.prof" + _binary_baseline_profile_metadata_path = + _binary_baseline_profile_path + "m" + create_binary_profile(_binary_profile_target) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + binary_baseline_profile_path = _binary_baseline_profile_path + binary_baseline_profile_metadata_path = + _binary_baseline_profile_metadata_path + proguard_mapping_path = _proguard_mapping_path + build_config = _module_build_config + input_profile_path = invoker.baseline_profile_path + deps = [ + _dex_target_for_module, + _module_build_config_target, + ] + } + } else { + not_needed(invoker, [ "baseline_profile_path" ]) + } + + # Generate one module .zip file per bundle module. + # + # Important: the bundle tool uses the module's zip filename as + # the internal module name inside the final bundle, in other words, + # this file *must* be named ${_module.name}.zip + _create_module_target = "${_target_name}__${_module.name}__create" + _module_zip_path = "$target_out_dir/$target_name/${_module.name}.zip" + create_android_app_bundle_module(_create_module_target) { + forward_variables_from(invoker, + [ + "is_multi_abi", + "uncompress_dex", + ]) + module_name = _module.name + min_sdk_version = _min_sdk_version + build_config = _module_build_config + module_zip_path = _module_zip_path + native_libraries_config = _native_libraries_config + if (!_proguard_enabled) { + dex_path = _dex_path + # dex_path is read from the build_config in the proguard case. + } + + if (module_name == "base" && + defined(invoker.expected_libs_and_assets)) { + forward_variables_from(invoker, + [ + "expected_libs_and_assets", + "expected_libs_and_assets_base", + ]) + top_target_name = _target_name + build_config_target = _module_build_config_target + native_libraries_config_target = ":$_native_libraries_config_target" + if (defined(android_app_secondary_abi)) { + secondary_abi_native_libraries_config_target = + ":$_secondary_abi_native_libraries_config_target" + } + } + + deps = [ + ":$_native_libraries_config_target", + _dex_target_for_module, + _module_build_config_target, + _module_target, + ] + + if (defined(android_app_secondary_abi)) { + secondary_abi_native_libraries_config = + _secondary_abi_native_libraries_config + deps += [ ":$_secondary_abi_native_libraries_config_target" ] + } + + if (_use_baseline_profile) { + # extra_assets is a list of ["{src_path}:{dst_path}"] + extra_assets = [ + rebase_path(_binary_baseline_profile_path, root_build_dir) + + ":dexopt/baseline.prof", + rebase_path(_binary_baseline_profile_metadata_path, + root_build_dir) + ":dexopt/baseline.profm", + ] + deps += [ ":$_binary_profile_target" ] + } + } + + _all_create_module_targets += [ + ":$_create_module_target", + _module_build_config_target, + "${_module_target}__compile_resources", + ] + _all_module_zip_paths += [ _module_zip_path ] + _all_module_build_configs += [ _module_build_config ] + _all_module_unused_resources_deps += [ + "${_module_target}__compile_resources", + _dex_target_for_module, + _module_build_config_target, + ] + } + _strip_unused_resources = defined(invoker.strip_unused_resources) && + invoker.strip_unused_resources + if (_strip_unused_resources) { + # Resources only live in the base module so we define the unused resources + # target only on the base module target. + _unused_resources_target = "${_base_target_name}__unused_resources" + _unused_resources_config = + "${_base_target_gen_dir}/${_base_target_name}_unused_resources.config" + _unused_resources_r_txt_out = + "${_base_target_gen_dir}/${_base_target_name}_unused_resources.R.txt" + unused_resources(_unused_resources_target) { + deps = _all_module_unused_resources_deps + all_module_build_configs = _all_module_build_configs + build_config = _base_module_build_config + if (_proguard_enabled) { + proguard_mapping_path = _proguard_mapping_path + } + output_config = _unused_resources_config + output_r_txt = _unused_resources_r_txt_out + } + _unused_resources_final_path = "${_bundle_path}.unused_resources" + _copy_unused_resources_target = + "${_base_target_name}__copy_unused_resources" + copy(_copy_unused_resources_target) { + deps = [ ":$_unused_resources_target" ] + sources = [ _unused_resources_config ] + outputs = [ _unused_resources_final_path ] + } + } + + _all_rebased_module_zip_paths = + rebase_path(_all_module_zip_paths, root_build_dir) + + _enable_language_splits = defined(invoker.enable_language_splits) && + invoker.enable_language_splits + + _split_dimensions = [] + if (_enable_language_splits) { + _split_dimensions += [ "language" ] + } + + _keystore_path = android_keystore_path + _keystore_password = android_keystore_password + _keystore_name = android_keystore_name + + if (defined(invoker.keystore_path)) { + _keystore_path = invoker.keystore_path + _keystore_password = invoker.keystore_password + _keystore_name = invoker.keystore_name + } + + _rebased_keystore_path = rebase_path(_keystore_path, root_build_dir) + + _bundle_target_name = "${_target_name}__bundle" + action_with_pydeps(_bundle_target_name) { + script = "//build/android/gyp/create_app_bundle.py" + inputs = _all_module_zip_paths + _all_module_build_configs + + [ _BUNDLETOOL_JAR_PATH ] + outputs = [ _bundle_path ] + deps = _all_create_module_targets + [ ":$_build_config_target" ] + args = [ + "--out-bundle=$_rebased_bundle_path", + "--rtxt-out-path=$_rebased_bundle_path.R.txt", + "--pathmap-out-path=$_rebased_bundle_path.pathmap.txt", + "--module-zips=$_all_rebased_module_zip_paths", + ] + if (_split_dimensions != []) { + args += [ "--split-dimensions=$_split_dimensions" ] + } + if (defined(invoker.compress_shared_libraries) && + invoker.compress_shared_libraries) { + args += [ "--compress-shared-libraries" ] + } + + # Android P+ support loading from stored dex. + if (_min_sdk_version < 27) { + args += [ "--compress-dex" ] + } + + if (defined(invoker.rotation_config)) { + args += [ + "--rotation-config", + rebase_path(invoker.rotation_config, root_build_dir), + ] + } + + if (treat_warnings_as_errors) { + args += [ "--warnings-as-errors" ] + } + + if (_enable_language_splits) { + args += [ "--base-allowlist-rtxt-path=@FileArg($_rebased_base_module_build_config:deps_info:base_allowlist_rtxt_path)" ] + if (_strip_unused_resources) { + # Use the stripped out rtxt file to set resources that are pinned to + # the default language split. + _rebased_unused_resources_r_txt_out = + rebase_path(_unused_resources_r_txt_out, root_build_dir) + inputs += [ _unused_resources_r_txt_out ] + deps += [ ":$_unused_resources_target" ] + args += + [ "--base-module-rtxt-path=$_rebased_unused_resources_r_txt_out" ] + } else { + args += [ "--base-module-rtxt-path=@FileArg($_rebased_base_module_build_config:deps_info:r_text_path)" ] + } + } + if (defined(invoker.validate_services) && invoker.validate_services) { + args += [ "--validate-services" ] + } + + foreach(_module, _modules) { + _rebased_build_config = + rebase_path(_module.build_config, root_build_dir) + args += [ + "--uncompressed-assets=@FileArg(" + + "$_rebased_build_config:uncompressed_assets)", + "--rtxt-in-paths=@FileArg(" + + "$_rebased_build_config:deps_info:r_text_path)", + "--pathmap-in-paths=@FileArg(" + + "$_rebased_build_config:deps_info:module_pathmap_path)", + "--module-name=" + _module.name, + ] + } + + # http://crbug.com/725224. Fix for bots running out of memory. + if (defined(java_cmd_pool_size)) { + pool = "//build/config/android:java_cmd_pool($default_toolchain)" + } else { + pool = "//build/toolchain:link_pool($default_toolchain)" + } + } + + # Create size info files for targets that care about size + # (have proguard enabled). + if (_proguard_enabled) { + # Merge all module targets to obtain size info files for all targets. + _all_module_targets = _module_targets + + _size_info_target = "${_target_name}__size_info" + create_size_info_files(_size_info_target) { + name = "$_bundle_name.aab" + deps = _all_module_targets + [ ":$_build_config_target" ] + module_build_configs = _all_module_build_configs + } + } + + if (_uses_static_library) { + _install_artifacts_target = "${target_name}__install_artifacts" + _install_artifacts_json = + "${target_gen_dir}/${target_name}.install_artifacts" + generated_file(_install_artifacts_target) { + output_conversion = "json" + deps = [ invoker.static_library_provider ] + outputs = [ _install_artifacts_json ] + data_keys = [ "install_artifacts" ] + rebase = root_build_dir + } + } + + # Generate a wrapper script for the bundle. + _android_aapt2_path = android_sdk_tools_bundle_aapt2 + + _bundle_apks_path = "$_bundle_base_path/$_bundle_name.apks" + _bundle_wrapper_script_dir = "$root_build_dir/bin" + _bundle_wrapper_script_path = "$_bundle_wrapper_script_dir/$_target_name" + + action_with_pydeps("${_target_name}__wrapper_script") { + script = "//build/android/gyp/create_bundle_wrapper_script.py" + inputs = [ _base_module_build_config ] + outputs = [ _bundle_wrapper_script_path ] + + # Telemetry for bundles uses the wrapper script for installation. + data = [ + _bundle_wrapper_script_path, + _android_aapt2_path, + _keystore_path, + _bundle_path, + ] + data_deps = [ + "//build/android:apk_operations_py", + "//build/android:stack_tools", + ] + + deps = [ _base_module_build_config_target ] + args = [ + "--script-output-path", + rebase_path(_bundle_wrapper_script_path, root_build_dir), + "--package-name=@FileArg($_rebased_base_module_build_config:deps_info:package_name)", + "--aapt2", + rebase_path(_android_aapt2_path, root_build_dir), + "--bundle-path", + _rebased_bundle_path, + "--bundle-apks-path", + rebase_path(_bundle_apks_path, root_build_dir), + "--target-cpu=$target_cpu", + "--keystore-path", + _rebased_keystore_path, + "--keystore-password", + _keystore_password, + "--key-name", + _keystore_name, + ] + if (defined(invoker.default_modules_for_testing)) { + args += [ "--default-modules" ] + invoker.default_modules_for_testing + } + if (defined(invoker.system_image_locale_allowlist)) { + args += [ + "--system-image-locales=${invoker.system_image_locale_allowlist}", + ] + } + if (defined(invoker.command_line_flags_file)) { + args += [ + "--command-line-flags-file", + invoker.command_line_flags_file, + ] + } + if (_uses_static_library) { + deps += [ ":$_install_artifacts_target" ] + _rebased_install_artifacts_json = + rebase_path(_install_artifacts_json, root_build_dir) + _static_library_apk_path = + "@FileArg($_rebased_install_artifacts_json[])" + args += [ + "--additional-apk", + _static_library_apk_path, + ] + } + + if (_proguard_enabled) { + args += [ + "--proguard-mapping-path", + rebase_path(_proguard_mapping_path, root_build_dir), + ] + + # Required by logcat command. + data_deps += [ "//build/android/stacktrace:java_deobfuscate" ] + data += [ _proguard_mapping_path ] + } + } + + _enable_lint = defined(invoker.enable_lint) && invoker.enable_lint && + !disable_android_lint + if (_enable_lint) { + android_lint("${target_name}__lint") { + forward_variables_from(invoker, + [ + "lint_baseline_file", + "lint_jar_path", + "lint_suppressions_file", + ]) + build_config = _build_config + build_config_dep = ":$_build_config_target" + deps = _module_java_targets + if (defined(invoker.lint_suppressions_dep)) { + deps += [ invoker.lint_suppressions_dep ] + } + if (defined(invoker.lint_min_sdk_version)) { + min_sdk_version = invoker.lint_min_sdk_version + } else { + min_sdk_version = _min_sdk_version + } + } + } else { + not_needed(invoker, + [ + "lint_baseline_file", + "lint_jar_path", + "lint_min_sdk_version", + "lint_suppressions_dep", + "lint_suppressions_file", + ]) + } + + group(_target_name) { + public_deps = [ + ":$_bundle_target_name", + ":${_target_name}__wrapper_script", + ] + if (defined(_size_info_target)) { + public_deps += [ ":$_size_info_target" ] + } + if (_enable_lint) { + if (!defined(data_deps)) { + data_deps = [] + } + data_deps += [ ":${target_name}__lint" ] + } + } + + _apks_path = "$root_build_dir/apks/$_bundle_name.apks" + action_with_pydeps("${_target_name}_apks") { + script = "//build/android/gyp/create_app_bundle_apks.py" + inputs = [ + _bundle_path, + _BUNDLETOOL_JAR_PATH, + ] + outputs = [ _apks_path ] + data = [ _apks_path ] + args = [ + "--bundle", + _rebased_bundle_path, + "--output", + rebase_path(_apks_path, root_build_dir), + "--aapt2-path", + rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), + "--keystore-path", + rebase_path(android_keystore_path, root_build_dir), + "--keystore-name", + android_keystore_name, + "--keystore-password", + android_keystore_password, + ] + if (debuggable_apks) { + args += [ "--local-testing" ] + } + deps = [ ":$_bundle_target_name" ] + metadata = { + install_artifacts = [ _apks_path ] + } + if (defined(invoker.static_library_provider)) { + metadata.install_artifacts_barrier = [] + } + + # http://crbug.com/725224. Fix for bots running out of memory. + if (defined(java_cmd_pool_size)) { + pool = "//build/config/android:java_cmd_pool($default_toolchain)" + } else { + pool = "//build/toolchain:link_pool($default_toolchain)" + } + } + } + + # Create an .apks file from an .aab file. The .apks file will contain the + # minimal set of .apk files needed for tracking binary size. + # The file will be created at "$bundle_path_without_extension.minimal.apks". + # + # Variables: + # bundle_path: Path to the input .aab file. + # + # Example: + # create_app_bundle_minimal_apks("minimal_apks") { + # deps = [ + # ":bundle_target", + # ] + # bundle_path = "$root_build_dir/apks/Bundle.aab" + # } + template("create_app_bundle_minimal_apks") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY + [ "deps" ]) + script = "//build/android/gyp/create_app_bundle_apks.py" + _dir = get_path_info(invoker.bundle_path, "dir") + _name = get_path_info(invoker.bundle_path, "name") + _output_path = "$_dir/$_name.minimal.apks" + outputs = [ _output_path ] + inputs = [ invoker.bundle_path ] + args = [ + "--bundle", + rebase_path(invoker.bundle_path, root_build_dir), + "--output", + rebase_path(_output_path, root_build_dir), + "--aapt2-path", + rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), + "--keystore-path", + rebase_path(android_keystore_path, root_build_dir), + "--keystore-name", + android_keystore_name, + "--keystore-password", + android_keystore_password, + "--minimal", + ] + } + } +} + +# Generate an Android resources target that contains localized strings +# describing the current locale used by the Android framework to display +# UI strings. These are used by +# org.chromium.chrome.browser.ChromeLocalizationUtils. +# +# Variables: +# ui_locales: List of Chromium locale names to generate resources for. +# +template("generate_ui_locale_resources") { + _generating_target_name = "${target_name}__generate" + _rebased_output_zip_path = rebase_path(target_gen_dir, root_gen_dir) + _output_zip = "${root_out_dir}/resource_zips/${_rebased_output_zip_path}/" + + "${target_name}.zip" + + action_with_pydeps(_generating_target_name) { + script = "//build/android/gyp/create_ui_locale_resources.py" + outputs = [ _output_zip ] + args = [ + "--locale-list=${invoker.ui_locales}", + "--output-zip", + rebase_path(_output_zip, root_build_dir), + ] + } + + android_generated_resources(target_name) { + generating_target = ":$_generating_target_name" + generated_resources_zip = _output_zip + } +} diff --git a/config/android/sdk.gni b/config/android/sdk.gni new file mode 100644 index 000000000000..fb39315c4640 --- /dev/null +++ b/config/android/sdk.gni @@ -0,0 +1,13 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# The default SDK release used by public builds. Value may differ in +# internal builds. +default_android_sdk_release = "t" + +# SDK releases against which public builds are supported. +public_sdk_releases = [ + "t", + "tprivacysandbox", +] diff --git a/config/android/system_image.gni b/config/android/system_image.gni new file mode 100644 index 000000000000..79f856099023 --- /dev/null +++ b/config/android/system_image.gni @@ -0,0 +1,174 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +# Creates a stub .apk suitable for use with compressed system APKs. +# +# Variables: +# package_name: Package name to use for the stub. +# version_code: Version code for the stub. +# version_name: Version name for the stub. +# package_info_from_target: Use the package name and version_code from this +# apk/bundle target. +# static_library_name: For static library apks, name for the . +# static_library_version: For static library apks, version for the +# tag (for TrichromeLibrary, we set this to be the same +# as the package's version_code) +# stub_output: Path to output stub apk (default: do not create a stub). +# +# package_name and package_info_from_target are mutually exclusive. +template("system_image_stub_apk") { + # Android requires stubs end with -Stub.apk. + assert(filter_exclude([ invoker.stub_output ], [ "*-Stub.apk" ]) == [], + "stub_output \"${invoker.stub_output}\" must end with \"-Stub.apk\"") + + _resource_apk_path = "${target_out_dir}/$target_name.ap_" + _resource_apk_target_name = "${target_name}__compile_resources" + + _manifest_target_name = "${target_name}__manifest" + _manifest_path = "$target_gen_dir/$_manifest_target_name.xml" + action("$_manifest_target_name") { + outputs = [ _manifest_path ] + script = "//build/android/gyp/create_stub_manifest.py" + args = [ + "--output", + rebase_path(_manifest_path, root_build_dir), + ] + if (defined(invoker.static_library_name)) { + args += [ + "--static-library-name", + invoker.static_library_name, + ] + + # TODO(crbug.com/1408164): Make static_library_version mandatory. + if (defined(invoker.static_library_version)) { + args += [ + "--static-library-version", + invoker.static_library_version, + ] + } else { + args += [ "--static-library-version=1" ] + } + } + } + + action_with_pydeps(_resource_apk_target_name) { + script = "//build/android/gyp/compile_resources.py" + inputs = [ + _manifest_path, + android_sdk_jar, + ] + outputs = [ _resource_apk_path ] + args = [ + "--aapt2-path", + rebase_path(android_sdk_tools_bundle_aapt2, root_build_dir), + "--min-sdk-version=$default_min_sdk_version", + "--target-sdk-version=$default_android_sdk_version", + "--android-manifest", + rebase_path(_manifest_path, root_build_dir), + "--arsc-path", + rebase_path(_resource_apk_path, root_build_dir), + ] + deps = [ ":$_manifest_target_name" ] + if (defined(invoker.package_name)) { + _package_name = invoker.package_name + _version_code = invoker.version_code + _version_name = invoker.version_name + } else { + _target = invoker.package_info_from_target + deps += [ "${_target}$build_config_target_suffix" ] + _build_config = get_label_info(_target, "target_gen_dir") + "/" + + get_label_info(_target, "name") + ".build_config.json" + inputs += [ _build_config ] + _rebased_build_config = rebase_path(_build_config, root_build_dir) + _package_name = "@FileArg($_rebased_build_config:deps_info:package_name)" + _version_code = "@FileArg($_rebased_build_config:deps_info:version_code)" + _version_name = "@FileArg($_rebased_build_config:deps_info:version_name)" + } + args += [ + "--rename-manifest-package=$_package_name", + "--arsc-package-name=$_package_name", + "--version-code=$_version_code", + "--version-name=$_version_name", + "--include-resources", + rebase_path(android_sdk_jar, root_build_dir), + ] + } + + package_apk(target_name) { + forward_variables_from(invoker, + [ + "keystore_name", + "keystore_path", + "keystore_password", + ]) + min_sdk_version = default_min_sdk_version + deps = [ ":$_resource_apk_target_name" ] + + packaged_resources_path = _resource_apk_path + output_apk_path = invoker.stub_output + } +} + +# Generates artifacts for system APKs. +# +# Variables: +# apk_or_bundle_target: Target that creates input bundle or apk. +# input_apk_or_bundle: Path to input .apk or .aab. +# static_library_name: For static library apks, name for the . +# static_library_version: For static library apks, version for the +# tag (for TrichromeLibrary, we set this to be the same +# as the package's version_code) +# output: Path to the output system .apk or .zip. +# fuse_apk: Fuse all apk splits into a single .apk (default: false). +# stub_output: Path to output stub apk (default: do not create a stub). +# +template("system_image_apks") { + if (defined(invoker.stub_output)) { + _stub_apk_target_name = "${target_name}__stub" + system_image_stub_apk(_stub_apk_target_name) { + forward_variables_from(invoker, + [ + "static_library_name", + "static_library_version", + ]) + package_info_from_target = invoker.apk_or_bundle_target + stub_output = invoker.stub_output + } + } + + action_with_pydeps(target_name) { + script = "//build/android/gyp/system_image_apks.py" + deps = [ invoker.apk_or_bundle_target ] + inputs = [ invoker.input_apk_or_bundle ] + if (defined(invoker.stub_output)) { + public_deps = [ ":$_stub_apk_target_name" ] + } + outputs = [ invoker.output ] + args = [ + "--input", + rebase_path(invoker.input_apk_or_bundle, root_out_dir), + "--output", + rebase_path(invoker.output, root_out_dir), + ] + + _is_bundle = + filter_exclude([ invoker.input_apk_or_bundle ], [ "*.aab" ]) == [] + + if (_is_bundle) { + _wrapper_path = "$root_out_dir/bin/" + + get_label_info(invoker.apk_or_bundle_target, "name") + args += [ + "--bundle-wrapper", + rebase_path(_wrapper_path, root_out_dir), + ] + inputs += [ _wrapper_path ] + deps += [ "//build/android:apk_operations_py" ] + if (defined(invoker.fuse_apk) && invoker.fuse_apk) { + args += [ "--fuse-apk" ] + } + } + } +} diff --git a/config/android/test/proto/BUILD.gn b/config/android/test/proto/BUILD.gn new file mode 100644 index 000000000000..1d0f37a33951 --- /dev/null +++ b/config/android/test/proto/BUILD.gn @@ -0,0 +1,103 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") +import("//third_party/protobuf/proto_library.gni") + +# The purpose of these targets is test that |deps| satisfies java compilation +# dependencies, and that |import_dirs| allows us to deal with various relative +# imports to other proto dependencies. Although we should strive to avoid using +# |import_dirs| and relative import paths, preferring to use absolute imports +# whenever possible. See https://crbug.com/691451. While this target is +# primarily to test that the Java proto targets build correctly, also build the +# C++ versions of the protos as well. There are currently some configurations of +# Java protos that can be built but will not work for C++, see +# https://crbug.com/1039014, so make sure we don't create any tests that would +# violate that. +group("test_build_protos") { + deps = [ + ":absolute_root_proto", + ":absolute_root_proto_java", + ":relative_root_proto", + ":relative_root_proto_java", + ] +} + +proto_java_library("absolute_root_proto_java") { + proto_path = "//" + import_dirs = [ "relative_dep/" ] + sources = [ + "root/absolute_child.proto", + "root/absolute_root.proto", + ] + deps = [ + ":absolute_dep_proto_java", + ":relative_dep_proto_java", + ] +} + +proto_java_library("relative_root_proto_java") { + proto_path = "root/" + import_dirs = [ + "relative_dep/", + "//", + ] + sources = [ + "root/relative_child.proto", + "root/relative_root.proto", + ] + deps = [ + ":absolute_dep_proto_java", + ":relative_dep_proto_java", + ] +} + +proto_java_library("absolute_dep_proto_java") { + proto_path = "//" + sources = [ "absolute_dep/absolute_dep.proto" ] +} + +proto_java_library("relative_dep_proto_java") { + proto_path = "relative_dep/" + sources = [ "relative_dep/relative_dep.proto" ] +} + +proto_library("absolute_root_proto") { + proto_in_dir = "//" + import_dirs = [ "relative_dep/" ] + sources = [ + "root/absolute_child.proto", + "root/absolute_root.proto", + ] + link_deps = [ + ":absolute_dep_proto", + ":relative_dep_proto", + ] +} + +proto_library("relative_root_proto") { + proto_in_dir = "root/" + import_dirs = [ + "relative_dep/", + "//", + ] + sources = [ + "root/relative_child.proto", + "root/relative_root.proto", + ] + link_deps = [ + ":absolute_dep_proto", + ":relative_dep_proto", + ] +} + +proto_library("absolute_dep_proto") { + proto_in_dir = "//" + sources = [ "absolute_dep/absolute_dep.proto" ] +} + +proto_library("relative_dep_proto") { + proto_in_dir = "relative_dep/" + sources = [ "relative_dep/relative_dep.proto" ] +} diff --git a/config/android/test/proto/absolute_dep/absolute_dep.proto b/config/android/test/proto/absolute_dep/absolute_dep.proto new file mode 100644 index 000000000000..f4aa92a9c59b --- /dev/null +++ b/config/android/test/proto/absolute_dep/absolute_dep.proto @@ -0,0 +1,10 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +syntax = "proto2"; + +package build.config.android.test; +option java_package = "build.config.android.test"; + +message AbsoluteDep {} diff --git a/config/android/test/proto/relative_dep/relative_dep.proto b/config/android/test/proto/relative_dep/relative_dep.proto new file mode 100644 index 000000000000..917d2c3e5445 --- /dev/null +++ b/config/android/test/proto/relative_dep/relative_dep.proto @@ -0,0 +1,10 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +syntax = "proto2"; + +package build.config.android.test; +option java_package = "build.config.android.test"; + +message RelativeDep {} diff --git a/config/android/test/proto/root/absolute_child.proto b/config/android/test/proto/root/absolute_child.proto new file mode 100644 index 000000000000..389538c9b04e --- /dev/null +++ b/config/android/test/proto/root/absolute_child.proto @@ -0,0 +1,10 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +syntax = "proto2"; + +package build.config.android.test; +option java_package = "build.config.android.test"; + +message AbsoluteChild {} diff --git a/config/android/test/proto/root/absolute_root.proto b/config/android/test/proto/root/absolute_root.proto new file mode 100644 index 000000000000..ad138abe06c9 --- /dev/null +++ b/config/android/test/proto/root/absolute_root.proto @@ -0,0 +1,18 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +syntax = "proto2"; + +package build.config.android.test; +option java_package = "build.config.android.test"; + +import "build/config/android/test/proto/root/absolute_child.proto"; +import "build/config/android/test/proto/absolute_dep/absolute_dep.proto"; +import "relative_dep.proto"; + +message AbsoluteRoot { + optional AbsoluteChild absolute_child = 1; + optional AbsoluteDep absolute_dep = 2; + optional RelativeDep relative_dep = 3; +} diff --git a/config/android/test/proto/root/relative_child.proto b/config/android/test/proto/root/relative_child.proto new file mode 100644 index 000000000000..225758cbee49 --- /dev/null +++ b/config/android/test/proto/root/relative_child.proto @@ -0,0 +1,10 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +syntax = "proto2"; + +package build.config.android.test; +option java_package = "build.config.android.test"; + +message RelativeChild {} diff --git a/config/android/test/proto/root/relative_root.proto b/config/android/test/proto/root/relative_root.proto new file mode 100644 index 000000000000..9644fa113412 --- /dev/null +++ b/config/android/test/proto/root/relative_root.proto @@ -0,0 +1,18 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +syntax = "proto2"; + +package build.config.android.test; +option java_package = "build.config.android.test"; + +import "relative_child.proto"; +import "build/config/android/test/proto/absolute_dep/absolute_dep.proto"; +import "relative_dep.proto"; + +message RelativeRoot { + optional RelativeChild relative_child = 1; + optional AbsoluteDep absolute_dep = 2; + optional RelativeDep relative_dep = 3; +} diff --git a/config/android/test/resource_overlay/BUILD.gn b/config/android/test/resource_overlay/BUILD.gn new file mode 100644 index 000000000000..3b7936384c61 --- /dev/null +++ b/config/android/test/resource_overlay/BUILD.gn @@ -0,0 +1,60 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/rules.gni") + +# Tests for 'resource_overlay' parameter in android_resources() template. + +template("test_resources") { + jinja_template_resources(target_name) { + forward_variables_from(invoker, "*") + testonly = true + variables = [ + "resource_name=$resource_name", + "resource_value=$resource_value", + ] + res_dir = "java/res_template" + resources = [ "java/res_template/values/values.xml" ] + } +} + +test_resources("dependency_tagged_dependency_resources") { + resource_overlay = true + resource_name = "resource_overlay_dependency_tagged_secret" + resource_value = 41 +} + +test_resources("dependency_tagged_root_resources") { + resource_name = "resource_overlay_dependency_tagged_secret" + resource_value = 42 + deps = [ ":dependency_tagged_dependency_resources" ] +} + +test_resources("root_tagged_dependency_resources") { + resource_name = "resource_overlay_root_tagged_secret" + resource_value = 41 +} + +test_resources("root_tagged_root_resources") { + resource_overlay = true + resource_name = "resource_overlay_root_tagged_secret" + resource_value = 42 + deps = [ ":root_tagged_dependency_resources" ] +} + +android_library("unit_device_javatests") { + testonly = true + sources = [ + "java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java", + ] + resources_package = "org.chromium.build.resource_overlay" + deps = [ + ":dependency_tagged_root_resources", + ":root_tagged_root_resources", + "//base:base_java_test_support", + "//third_party/androidx:androidx_test_monitor_java", + "//third_party/androidx:androidx_test_runner_java", + "//third_party/junit", + ] +} diff --git a/config/android/test/resource_overlay/java/res_template/values/values.xml b/config/android/test/resource_overlay/java/res_template/values/values.xml new file mode 100644 index 000000000000..13ff516535a6 --- /dev/null +++ b/config/android/test/resource_overlay/java/res_template/values/values.xml @@ -0,0 +1,10 @@ + + + + + {{resource_value}} + \ No newline at end of file diff --git a/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java b/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java new file mode 100644 index 000000000000..d42450e5ec35 --- /dev/null +++ b/config/android/test/resource_overlay/java/src/org/chromium/build/resource_overlay/ResourceOverlayTest.java @@ -0,0 +1,49 @@ +// Copyright 2020 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.build.resource_overlay; + +import static org.junit.Assert.assertEquals; + +import android.content.res.Resources; + +import androidx.test.InstrumentationRegistry; +import androidx.test.filters.SmallTest; + +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.chromium.base.test.BaseJUnit4ClassRunner; +import org.chromium.base.test.util.Batch; + +/** + * Test for resource_overlay parameter in android_resources() build rule. + */ +@RunWith(BaseJUnit4ClassRunner.class) +@Batch(Batch.UNIT_TESTS) +public class ResourceOverlayTest { + /** + * Test that when an android_resources() target with resource_overlay=false has a resource with + * the same name but a different value as a dependency with resource_overlay=true that the value + * of the resource in the dependency is used. + */ + @Test + @SmallTest + public void testDependencyTagged() { + Resources resources = InstrumentationRegistry.getTargetContext().getResources(); + assertEquals(41, resources.getInteger(R.integer.resource_overlay_dependency_tagged_secret)); + } + + /** + * Test that when an android_resources() target with resource_overlay=true has a resource with + * the same name but different value as one of its dependencies that the value of resource in + * the target with resource_overlay=true is used. + */ + @Test + @SmallTest + public void testRootTagged() { + Resources resources = InstrumentationRegistry.getTargetContext().getResources(); + assertEquals(42, resources.getInteger(R.integer.resource_overlay_root_tagged_secret)); + } +} diff --git a/config/apple/BUILD.gn b/config/apple/BUILD.gn new file mode 100644 index 000000000000..add2395d6bc5 --- /dev/null +++ b/config/apple/BUILD.gn @@ -0,0 +1,17 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/apple/symbols.gni") + +# The ldflags referenced below are handled by +# //build/toolchain/apple/linker_driver.py. +# Remove this config if a target wishes to change the arguments passed to the +# strip command during linking. This config by default strips all symbols +# from a binary, but some targets may wish to specify an exports file to +# preserve specific symbols. +config("strip_all") { + if (enable_stripping) { + ldflags = [ "-Wcrl,strip,-x,-S" ] + } +} diff --git a/config/apple/OWNERS b/config/apple/OWNERS new file mode 100644 index 000000000000..6f3324f07c8f --- /dev/null +++ b/config/apple/OWNERS @@ -0,0 +1 @@ +file://build/apple/OWNERS diff --git a/config/apple/sdk_info.py b/config/apple/sdk_info.py new file mode 100755 index 000000000000..81b06d438df1 --- /dev/null +++ b/config/apple/sdk_info.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import doctest +import itertools +import os +import plistlib +import re +import subprocess +import sys + + +# This script prints information about the build system, the operating +# system and the iOS or Mac SDK (depending on the platform "iphonesimulator", +# "iphoneos" or "macosx" generally). + + +def SplitVersion(version): + """Splits the Xcode version to 3 values. + + >>> list(SplitVersion('8.2.1.1')) + ['8', '2', '1'] + >>> list(SplitVersion('9.3')) + ['9', '3', '0'] + >>> list(SplitVersion('10.0')) + ['10', '0', '0'] + """ + version = version.split('.') + return itertools.islice(itertools.chain(version, itertools.repeat('0')), 0, 3) + + +def FormatVersion(version): + """Converts Xcode version to a format required for DTXcode in Info.plist + + >>> FormatVersion('8.2.1') + '0821' + >>> FormatVersion('9.3') + '0930' + >>> FormatVersion('10.0') + '1000' + """ + major, minor, patch = SplitVersion(version) + return ('%2s%s%s' % (major, minor, patch)).replace(' ', '0') + + +def FillXcodeVersion(settings, developer_dir): + """Fills the Xcode version and build number into |settings|.""" + if developer_dir: + xcode_version_plist_path = os.path.join(developer_dir, + 'Contents/version.plist') + with open(xcode_version_plist_path, 'rb') as f: + version_plist = plistlib.load(f) + settings['xcode_version'] = FormatVersion( + version_plist['CFBundleShortVersionString']) + settings['xcode_version_int'] = int(settings['xcode_version'], 10) + settings['xcode_build'] = version_plist['ProductBuildVersion'] + return + + lines = subprocess.check_output(['xcodebuild', + '-version']).decode('UTF-8').splitlines() + settings['xcode_version'] = FormatVersion(lines[0].split()[-1]) + settings['xcode_version_int'] = int(settings['xcode_version'], 10) + settings['xcode_build'] = lines[-1].split()[-1] + + +def FillMachineOSBuild(settings): + """Fills OS build number into |settings|.""" + machine_os_build = subprocess.check_output(['sw_vers', '-buildVersion' + ]).decode('UTF-8').strip() + settings['machine_os_build'] = machine_os_build + + +def FillSDKPathAndVersion(settings, platform, xcode_version): + """Fills the SDK path and version for |platform| into |settings|.""" + settings['sdk_path'] = subprocess.check_output( + ['xcrun', '-sdk', platform, '--show-sdk-path']).decode('UTF-8').strip() + settings['sdk_version'] = subprocess.check_output( + ['xcrun', '-sdk', platform, + '--show-sdk-version']).decode('UTF-8').strip() + settings['sdk_platform_path'] = subprocess.check_output( + ['xcrun', '-sdk', platform, + '--show-sdk-platform-path']).decode('UTF-8').strip() + settings['sdk_build'] = subprocess.check_output( + ['xcrun', '-sdk', platform, + '--show-sdk-build-version']).decode('UTF-8').strip() + settings['toolchains_path'] = os.path.join( + subprocess.check_output(['xcode-select', + '-print-path']).decode('UTF-8').strip(), + 'Toolchains/XcodeDefault.xctoolchain') + + +def CreateXcodeSymlinkAt(src, dst, root_build_dir): + """Create symlink to Xcode directory at target location.""" + + if not os.path.isdir(dst): + os.makedirs(dst) + + dst = os.path.join(dst, os.path.basename(src)) + updated_value = os.path.join(root_build_dir, dst) + + # Update the symlink only if it is different from the current destination. + if os.path.islink(dst): + current_src = os.readlink(dst) + if current_src == src: + return updated_value + os.unlink(dst) + sys.stderr.write('existing symlink %s points %s; want %s. Removed.' % + (dst, current_src, src)) + os.symlink(src, dst) + return updated_value + + +def main(): + doctest.testmod() + + parser = argparse.ArgumentParser() + parser.add_argument('--developer_dir') + parser.add_argument('--get_sdk_info', + action='store_true', + default=False, + help='Returns SDK info in addition to xcode info.') + parser.add_argument('--get_machine_info', + action='store_true', + default=False, + help='Returns machine info in addition to xcode info.') + parser.add_argument('--create_symlink_at', + help='Create symlink of SDK at given location and ' + 'returns the symlinked paths as SDK info instead ' + 'of the original location.') + parser.add_argument('--root_build_dir', + default='.', + help='Value of gn $root_build_dir') + parser.add_argument('platform', + choices=['iphoneos', 'iphonesimulator', 'macosx']) + args = parser.parse_args() + if args.developer_dir: + os.environ['DEVELOPER_DIR'] = args.developer_dir + + settings = {} + if args.get_machine_info: + FillMachineOSBuild(settings) + FillXcodeVersion(settings, args.developer_dir) + if args.get_sdk_info: + FillSDKPathAndVersion(settings, args.platform, settings['xcode_version']) + + for key in sorted(settings): + value = settings[key] + if args.create_symlink_at and '_path' in key: + value = CreateXcodeSymlinkAt(value, args.create_symlink_at, + args.root_build_dir) + if isinstance(value, str): + value = '"%s"' % value + print('%s=%s' % (key, value)) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/config/apple/symbols.gni b/config/apple/symbols.gni new file mode 100644 index 000000000000..3b4dee447c25 --- /dev/null +++ b/config/apple/symbols.gni @@ -0,0 +1,30 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/sanitizers/sanitizers.gni") + +# This file declares arguments and configs that control whether dSYM debug +# info is produced and whether build products are stripped. + +declare_args() { + # Produce dSYM files for targets that are configured to do so. dSYM + # generation is controlled globally as it is a linker output (produced via + # the //build/toolchain/apple/linker_driver.py. Enabling this will result in + # all shared library, loadable module, and executable targets having a dSYM + # generated. + enable_dsyms = is_official_build || using_sanitizer + + # Strip symbols from linked targets by default. If this is enabled, the + # //build/config/apple:strip_all config will be applied to all linked targets. + # If custom stripping parameters are required, remove that config from a + # linked target and apply custom -Wcrl,strip flags. See + # //build/toolchain/apple/linker_driver.py for more information. + enable_stripping = is_official_build +} + +# Save unstripped copies of targets with a ".unstripped" suffix. This is +# useful to preserve the original output when enable_stripping=true but +# we're not actually generating real dSYMs. +save_unstripped_output = enable_stripping && !enable_dsyms diff --git a/config/arm.gni b/config/arm.gni new file mode 100644 index 000000000000..cc82ed5ba922 --- /dev/null +++ b/config/arm.gni @@ -0,0 +1,150 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/v8_target_cpu.gni") + +# These are primarily relevant in current_cpu == "arm" contexts, where +# ARM code is being compiled. But they can also be relevant in the +# other contexts when the code will change its behavior based on the +# cpu it wants to generate code for. +if (current_cpu == "arm" || v8_current_cpu == "arm") { + declare_args() { + # Version of the ARM processor when compiling on ARM. Ignored on non-ARM + # platforms. + arm_version = 7 + + # The ARM architecture. This will be a string like "armv6" or "armv7-a". + # An empty string means to use the default for the arm_version. + arm_arch = "" + + # The ARM floating point hardware. This will be a string like "neon" or + # "vfpv3". An empty string means to use the default for the arm_version. + arm_fpu = "" + + # The ARM variant-specific tuning mode. This will be a string like "armv6" + # or "cortex-a15". An empty string means to use the default for the + # arm_version. + arm_tune = "" + + # Whether to use the neon FPU instruction set or not. + arm_use_neon = "" + + # Whether to enable optional NEON code paths. + arm_optionally_use_neon = false + + # Thumb is a reduced instruction set available on some ARM processors that + # has increased code density. + arm_use_thumb = true + } + + # For lacros build, we use ARM v8 by default. + if (is_chromeos_lacros && arm_arch == "") { + arm_version = 8 + arm_arch = "armv8-a+crc" + } + + if (current_os == "android" || target_os == "android") { + arm_float_abi = "softfp" + } else { + declare_args() { + # The ARM floating point mode. This is either the string "hard", "soft", + # or "softfp". An empty string means to use the default one for the + # arm_version. + arm_float_abi = "" + } + } + assert(arm_float_abi == "" || arm_float_abi == "hard" || + arm_float_abi == "soft" || arm_float_abi == "softfp") + + if (arm_use_neon == "") { + if (current_os == "linux" && target_cpu != v8_target_cpu) { + # Don't use neon on V8 simulator builds as a default. + arm_use_neon = false + } else { + arm_use_neon = true + } + } + + if (arm_version == 6) { + if (arm_arch == "") { + # v8 can still with version 6 but only with the armv6k extension. + arm_arch = "armv6k" + } + if (arm_tune != "") { + arm_tune = "" + } + if (arm_float_abi == "") { + arm_float_abi = "softfp" + } + if (arm_fpu == "") { + arm_fpu = "vfp" + } + arm_use_thumb = false + arm_use_neon = false + } else if (arm_version == 7) { + if (arm_arch == "") { + arm_arch = "armv7-a" + } + + if (arm_float_abi == "") { + if (current_os == "linux" && target_cpu != v8_target_cpu) { + # Default to the same as Android for V8 simulator builds. + arm_float_abi = "softfp" + } else { + arm_float_abi = "hard" + } + } + + if (arm_fpu == "") { + if (arm_use_neon) { + arm_fpu = "neon" + } else { + arm_fpu = "vfpv3-d16" + } + } + } else if (arm_version == 8) { + if (arm_arch == "") { + arm_arch = "armv8-a" + } + if (arm_tune == "") { + arm_tune = "generic-armv8-a" + } + + if (arm_float_abi == "") { + arm_float_abi = "hard" + } + + if (arm_fpu == "") { + if (arm_use_neon) { + arm_fpu = "neon" + } else { + arm_fpu = "vfpv3-d16" + } + } + } +} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") { + # arm64 supports only "hard". + arm_float_abi = "hard" + arm_use_neon = true + declare_args() { + # Enables the new Armv8 branch protection features. Valid strings are: + # - "pac": Enables Pointer Authentication Code (PAC, featured in Armv8.3) + # - "standard": Enables both PAC and Branch Target Identification (Armv8.5). + # - "none": No branch protection. + arm_control_flow_integrity = "none" + + if ((is_android || is_linux) && target_cpu == "arm64") { + # Enable PAC and BTI on AArch64 Linux/Android systems. + # target_cpu == "arm64" filters out some cases (e.g. the ChromeOS x64 + # MSAN build) where the target platform is x64, but V8 is configured to + # use the arm64 simulator. + arm_control_flow_integrity = "standard" + } + } + assert(arm_control_flow_integrity == "none" || + arm_control_flow_integrity == "standard" || + arm_control_flow_integrity == "pac", + "Invalid branch protection option") +} diff --git a/config/buildflags_paint_preview.gni b/config/buildflags_paint_preview.gni new file mode 100644 index 000000000000..951b6608ecdd --- /dev/null +++ b/config/buildflags_paint_preview.gni @@ -0,0 +1,16 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromecast_build.gni") +import("//build/config/features.gni") + +declare_args() { + # Enable basic paint preview support. Does not work on iOS. Should + # not be included with Chromecast hardware devices. + # Used by //components/paint_preview and //third_party/harfbuzz-ng. + # TODO(crbug.com/webrtc/11223) Move back this file in + # //components/paint_preview/ once WebRTC doesn't roll harfbuzz-ng anymore, + # for consistency sake. + enable_paint_preview = !is_castos && !is_cast_android && !is_ios +} diff --git a/config/c++/BUILD.gn b/config/c++/BUILD.gn new file mode 100644 index 000000000000..c00dcef4cd1e --- /dev/null +++ b/config/c++/BUILD.gn @@ -0,0 +1,100 @@ +import("//build/config/c++/c++.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/dcheck_always_on.gni") +import("//buildtools/deps_revisions.gni") + +assert(use_custom_libcxx, "should only be used if use_custom_libcxx is set") + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is specific to libc++. Please see that target for advice on what should +# go in :runtime_library vs. :compiler. +config("runtime_library") { + cflags = [] + cflags_cc = [] + defines = [] + include_dirs = [] + ldflags = [] + libs = [] + + # Fixed libc++ configuration macros are in + # buildtools/third_party/libc++/__config_site. This config only has defines + # that vary depending on gn args, and non-define flags. + + if (!libcxx_is_shared) { + # Don't leak any symbols on a static build. + defines += [ "_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS" ] + if (!export_libcxxabi_from_executables && !is_win) { + defines += [ "_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS" ] + } + } + + include_dirs += [ "//buildtools/third_party/libc++" ] + + # libc++ has two levels of additional checking: + # 1. _LIBCPP_ENABLE_ASSERTIONS enables assertions for bounds checking. + # We always enable this in __config_site, in all build configurations. + # 2. _LIBCPP_ENABLE_DEBUG_MODE enables iterator debugging and other + # expensive checks. Enable these only if enable_iterator_debugging is on. + if (enable_iterator_debugging) { + defines += [ "_LIBCPP_ENABLE_DEBUG_MODE" ] + } + + defines += [ "CR_LIBCXX_REVISION=$libcxx_revision" ] + + if (is_win) { + # Intentionally not using libc++abi on Windows because libc++abi only + # implements the Itanium C++ ABI, and not the Microsoft ABI which we use on + # Windows (and we need to use in order to interoperate correctly with COM + # among other things). + assert(!export_libcxxabi_from_executables, + "Don't use libcxxabi on Windows.") + + cflags_cc += + [ "-I" + rebase_path("$libcxx_prefix/include", root_build_dir) ] + + # Add a debug visualizer for Microsoft's debuggers so that they can display + # libc++ types well. + if (libcxx_natvis_include) { + # chrome.natvis listed as an input in //buildtools/third_party/libc++ to + # guarantee relinking on changes. + ldflags += [ "/NATVIS:" + rebase_path("libc++.natvis", root_build_dir) ] + } + } else { + cflags_cc += [ + "-nostdinc++", + "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir), + "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir), + ] + + cflags_objcc = cflags_cc + + # Make sure we don't link against the system libstdc++ or libc++. + if (is_clang) { + ldflags += [ "-nostdlib++" ] + } else { + # Gcc has a built-in abs() definition with default visibility. + # If it was not disabled, it would conflict with libc++'s abs() + # with hidden visibility. + cflags += [ "-fno-builtin-abs" ] + + ldflags += [ "-nodefaultlibs" ] + + # Unfortunately, there's no way to disable linking against just libc++ + # (gcc doesn't have -notstdlib++: + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83931); -nodefaultlibs + # removes all of the default libraries, so add back the ones that we need. + libs += [ + "c", + "gcc_s", + "m", + "rt", + ] + } + } + if (use_custom_libcxx && enable_safe_libcxx) { + defines += [ "_LIBCPP_ENABLE_ASSERTIONS=1" ] + } +} diff --git a/config/c++/c++.gni b/config/c++/c++.gni new file mode 100644 index 000000000000..25ece4cdbe14 --- /dev/null +++ b/config/c++/c++.gni @@ -0,0 +1,89 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") +import("//build_overrides/build.gni") + +declare_args() { + # Use in-tree libc++ (buildtools/third_party/libc++ and + # buildtools/third_party/libc++abi) instead of the system C++ library for C++ + # standard library support. + # Don't check in changes that set this to false for more platforms; doing so + # is not supported. + use_custom_libcxx = is_fuchsia || is_android || is_apple || is_linux || + is_chromeos || (is_win && is_clang) + + # Use libc++ instead of stdlibc++ when using the host_cpu toolchain, even if + # use_custom_libcxx is false. This is useful for cross-compiles where a custom + # toolchain for the target_cpu has been set as the default toolchain, but + # use_custom_libcxx should still be true when building for the host. The + # expected usage is to set use_custom_libcxx=false and + # use_custom_libcxx_for_host=true in the passed in buildargs. + use_custom_libcxx_for_host = false + + # Builds libcxx Natvis into the symbols for type visualization. + # Set to false to workaround http://crbug.com/966676 and + # http://crbug.com/966687. + libcxx_natvis_include = true + + # When set, enables libc++ debug mode with iterator debugging. + # + # Iterator debugging is generally useful for catching bugs. But it can + # introduce extra locking to check the state of an iterator against the state + # of the current object. For iterator- and thread-heavy code, this can + # significantly slow execution - two orders of magnitude slowdown has been + # seen (crbug.com/903553) and iterator debugging also slows builds by making + # generation of snapshot_blob.bin take ~40-60 s longer. Therefore this + # defaults to off. + enable_iterator_debugging = false +} + +use_custom_libcxx = + use_custom_libcxx || (use_custom_libcxx_for_host && !is_a_target_toolchain) +use_custom_libcxx = use_custom_libcxx && !is_nacl + +declare_args() { + # WARNING: Setting this to a non-default value is highly discouraged. + # If true, libc++ will be built as a shared library; otherwise libc++ will be + # linked statically. Setting this to something other than the default is + # unsupported and can be broken by libc++ rolls. Note that if this is set to + # true, you must also set libcxx_abi_unstable=false, which is bad for + # performance and memory use. + libcxx_is_shared = use_custom_libcxx && is_component_build +} + +# TODO(https://crbug.com/1385662): This is temporarily guarded to make it easier +# to roll out this change. Once the various projects (ANGLE, v8, et cetera) +# rolling in Chrome's //build have updated, remove this entirely. +if (!defined(enable_safe_libcxx)) { + enable_safe_libcxx = true +} + +# libc++abi needs to be exported from executables to be picked up by shared +# libraries on certain instrumented builds. +export_libcxxabi_from_executables = + use_custom_libcxx && !is_apple && !is_win && !is_component_build && + (is_asan || is_ubsan_vptr) + +# On Android, many shared libraries get loaded from the context of a JRE. In +# this case, there's no "main executable" to export libc++abi from. We could +# export libc++abi from each "toplevel" shared library instead, but that would +# require adding an explicit dependency for each one, and might introduce +# subtle, hard-to-fix problems down the line if the dependency is missing. +# +# export_libcxxabi_from_executables was added to avoid having an RPATH set in +# static sanitizer builds just for executables to find libc++. But on Android, +# the Bionic dynamic loader doesn't even look at RPATH; instead, LD_LIBRARY_PATH +# is set for tests. Because of this, we make libc++ a shared library on android +# since it should get loaded properly. +if (is_android && export_libcxxabi_from_executables) { + export_libcxxabi_from_executables = false + libcxx_is_shared = true +} + +libcxx_prefix = "//buildtools/third_party/libc++/trunk" +libcxxabi_prefix = "//buildtools/third_party/libc++abi/trunk" + +assert(!(is_ios && libcxx_is_shared), + "Can't build libc++ as a shared library on iOS.") diff --git a/config/c++/libc++.natvis b/config/c++/libc++.natvis new file mode 100644 index 000000000000..6378548dd2d9 --- /dev/null +++ b/config/c++/libc++.natvis @@ -0,0 +1,435 @@ + + + + + + {__value_} + + __value_ + + + + {*($T1*)this} + + *($T1*)this + + + + + {{ size={$T2} }} + + + $T2 + __elems_ + + + + + + + + + + + {*(char**)this} + {(char*)this} + *(char**)this + (char*)this + + ((size_t*)this)[1] + *(((char*)this) + 3*sizeof(size_t) - 1) + + ((size_t*)this)[2] & (~((size_t)0) >> 1) + + 22 + + ((size_t*)this)[1] + + *(((char*)this) + 3*sizeof(size_t) - 1) + + *(char**)this + (char*)this + + + + + + + {*(wchar_t**)this} + {(wchar_t*)this} + *(wchar_t**)this + (wchar_t*)this + + ((size_t*)this)[1] + *(((char*)this) + 3*sizeof(size_t) - 1) + + ((size_t*)this)[2] & (~((size_t)0) >> 1) + + 10 + + ((size_t*)this)[1] + + *(((char*)this) + 3*sizeof(size_t) - 1) + + *(wchar_t**)this + (wchar_t*)this + + + + + + + + {{ size={size()} }} + + + size() + + *(*(__map_.__begin_ + ($i + __start_) / block_size()) + + ($i + __start_) % block_size()) + + + + + + + + empty + non-empty + + + head() + __next_ + __value_ + + + + + + + {{ size={__size_} }} + + + __size_ + __begin_ + + + + + + + {{ size={size()} }} + + + size() + __end_.__next_ + __next_ + + ((std::Cr::list<$T1,$T2>::__node_pointer)this) + ->__value_ + + + + + + + + {{ size={size()} }} + + size() + + size() + + ((__node_pointer)&__tree_.__pair1_)->__left_ + + + ((std::Cr::map<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__left_ + + + ((std::Cr::map<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__right_ + + + ((std::Cr::map<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__value_.__cc_ + + + + + + + + {{ size={size()} }} + + size() + + size() + + ((__node_pointer)&__tree_.__pair1_)->__left_ + + + ((std::Cr::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__left_ + + + ((std::Cr::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__right_ + + + ((std::Cr::multimap<$T1,$T2,$T3,$T4>::__node_pointer)this) + ->__value_.__cc_ + + + + + + + + {{ size={size()} }} + + size() + + size() + + ((__base::__node_pointer)&__tree_.__pair1_)->__left_ + + + ((std::Cr::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) + ->__left_ + + + ((std::Cr::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) + ->__right_ + + + ((std::Cr::multiset<$T1,$T2,$T3>::__base::__node_pointer)this) + ->__value_ + + + + + + + {c} + + c + comp + + + + + + {{ size={size()} }} + + size() + + size() + + ((__base::__node_pointer)&__tree_.__pair1_)->__left_ + + + ((std::Cr::set<$T1,$T2,$T3>::__base::__node_pointer)this) + ->__left_ + + + ((std::Cr::set<$T1,$T2,$T3>::__base::__node_pointer)this) + ->__right_ + + + ((std::Cr::set<$T1,$T2,$T3>::__base::__node_pointer)this) + ->__value_ + + + + + + + + {c} + + c + + + + + {__value_} + + + + () + + + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}) + + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + + + + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}) + + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + + + + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}) + + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + + + + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}) + + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ + + + + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}, {(std::Cr::__tuple_leaf<4,$T5,0>)__base_}) + + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ + (std::Cr::__tuple_leaf<4,$T5,0>)__base_ + + + + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}, {(std::Cr::__tuple_leaf<4,$T5,0>)__base_}, {(std::Cr::__tuple_leaf<5,$T6,0>)__base_}) + + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ + (std::Cr::__tuple_leaf<4,$T5,0>)__base_ + (std::Cr::__tuple_leaf<5,$T6,0>)__base_ + + + + + ({(std::Cr::__tuple_leaf<0,$T1,0>)__base_}, {(std::Cr::__tuple_leaf<1,$T2,0>)__base_}, {(std::Cr::__tuple_leaf<2,$T3,0>)__base_}, {(std::Cr::__tuple_leaf<3,$T4,0>)__base_}, {(std::Cr::__tuple_leaf<4,$T5,0>)__base_}, {(std::Cr::__tuple_leaf<5,$T6,0>)__base_}, {(std::Cr::__tuple_leaf<6,$T7,0>)__base_}) + + (std::Cr::__tuple_leaf<0,$T1,0>)__base_ + (std::Cr::__tuple_leaf<1,$T2,0>)__base_ + (std::Cr::__tuple_leaf<2,$T3,0>)__base_ + (std::Cr::__tuple_leaf<3,$T4,0>)__base_ + (std::Cr::__tuple_leaf<4,$T5,0>)__base_ + (std::Cr::__tuple_leaf<5,$T6,0>)__base_ + (std::Cr::__tuple_leaf<6,$T7,0>)__base_ + + + + + + value() + empty + + unique_ptr {value()} + + value() + + + + + + + + + + {{ size={size()} }} + + bucket_count() + + bucket_count() != 0 ? (float)size() / bucket_count() : 0.f + *(float*)&__table_.__p3_ + + + + size() + + (*(__table::__node_pointer*)&node)->__value_ + node = node->__next_ + + + + + + + {__cc} + + __cc + + + + + + {{ size={size()} }} + + + size() + __begin_ + + + + diff --git a/config/chrome_build.gni b/config/chrome_build.gni new file mode 100644 index 000000000000..b5156d5c677a --- /dev/null +++ b/config/chrome_build.gni @@ -0,0 +1,68 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Select the desired branding flavor. False means normal Chromium branding, + # true means official Google Chrome branding (requires extra Google-internal + # resources). + is_chrome_branded = false + + # Whether to enable the Chrome for Testing (CfT) flavor. This arg is not + # compatible with `is_chrome_branded`. + # + # Design document: https://goo.gle/chrome-for-testing + is_chrome_for_testing = false + + # Whether to use internal Chrome for Testing (CfT). + # Requires `src-internal/` and `is_chrome_for_testing = true`. + # + # When true, use Google-internal icons, otherwise fall back to Chromium icons. + is_chrome_for_testing_branded = false + + # Set to true to enable settings for high end Android devices, typically + # enhancing speed at the expense of resources such as binary sizes and memory. + is_high_end_android = false + + if (is_android) { + # By default, Trichrome channels are compiled using separate package names. + # Set this to 'true' to compile Trichrome channels using the Stable channel's + # package name. This currently only affects builds with `android_channel = + # "beta"`. + use_stable_package_name_for_trichrome = false + } +} + +assert( + !is_chrome_for_testing || !is_chrome_branded, + "`is_chrome_for_testing = true` is incompatible with `is_chrome_branded = true`") + +assert( + is_chrome_for_testing || !is_chrome_for_testing_branded, + "`is_chrome_for_testing_branded = true` requires `is_chrome_for_testing = true`") + +declare_args() { + # Refers to the subdirectory for branding in various places including + # chrome/app/theme. + # + # `branding_path_product` must not contain slashes. + if (is_chrome_for_testing) { + if (is_chrome_for_testing_branded) { + branding_path_component = "google_chrome/google_chrome_for_testing" + } else { + branding_path_component = "chromium" + } + branding_path_product = "chromium" + } else if (is_chrome_branded) { + branding_path_component = "google_chrome" + branding_path_product = "google_chrome" + } else { + branding_path_component = "chromium" + branding_path_product = "chromium" + } +} + +declare_args() { + # The path to the BRANDING file in chrome/app/theme. + branding_file_path = "//chrome/app/theme/$branding_path_component/BRANDING" +} diff --git a/config/chromebox_for_meetings/BUILD.gn b/config/chromebox_for_meetings/BUILD.gn new file mode 100644 index 000000000000..08d74f97d263 --- /dev/null +++ b/config/chromebox_for_meetings/BUILD.gn @@ -0,0 +1,11 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/chromebox_for_meetings/buildflags.gni") + +buildflag_header("buildflags") { + header = "buildflags.h" + flags = [ "PLATFORM_CFM=$is_cfm" ] +} diff --git a/config/chromebox_for_meetings/OWNERS b/config/chromebox_for_meetings/OWNERS new file mode 100644 index 000000000000..985da0c1b6b4 --- /dev/null +++ b/config/chromebox_for_meetings/OWNERS @@ -0,0 +1 @@ +file://chromeos/ash/components/chromebox_for_meetings/OWNERS diff --git a/config/chromebox_for_meetings/README.md b/config/chromebox_for_meetings/README.md new file mode 100644 index 000000000000..ddbe3c9c254e --- /dev/null +++ b/config/chromebox_for_meetings/README.md @@ -0,0 +1,31 @@ +# CfM GN Build Flags + +Note: GN Flags are Build time flags + +You can get a comprehensive list of all arguments supported by gn by running the +command gn args --list out/some-directory (the directory passed to gn args is +required as gn args will invokes gn gen to generate the build.ninja files). + +## is_cfm (BUILDFLAG(PLATFORM_CFM)) + +Flag for building chromium for CfM devices. + +### Query Flag +```bash +$ gn args out_/{Release||Debug} --list=is_cfm +``` + +### Enable Flag +```bash +$ gn args out_/{Release||Debug} +$ Editor will open add is_cfm=true save and exit +``` + +### Alt: EnrollmentRequisitionManager + +We can alternatively use the EnrollmentRequisitionManager to determine if +chromium is running a CfM enabled Platform in source code + +```cpp +policy::EnrollmentRequisitionManager::IsRemoraRequisition(); +``` diff --git a/config/chromebox_for_meetings/buildflags.gni b/config/chromebox_for_meetings/buildflags.gni new file mode 100644 index 000000000000..22ad88a32b34 --- /dev/null +++ b/config/chromebox_for_meetings/buildflags.gni @@ -0,0 +1,8 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # True if compiling for Chromebox for Meeting devices. + is_cfm = false +} diff --git a/config/chromecast/BUILD.gn b/config/chromecast/BUILD.gn new file mode 100644 index 000000000000..acaf990a21ca --- /dev/null +++ b/config/chromecast/BUILD.gn @@ -0,0 +1,90 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromecast_build.gni") + +assert(is_castos || is_cast_android) + +config("static_config") { + if (!is_clang) { + ldflags = [ + # Don't allow visible symbols from libraries that contain + # assembly code with symbols that aren't hidden properly. + # http://b/26390825 + "-Wl,--exclude-libs=libffmpeg.a", + ] + + if (!is_android) { + ldflags += [ + # We want to statically link libstdc++/libgcc on Linux. + # (On Android, libstdc++ and libgcc aren't used.) + "-static-libstdc++", + "-static-libgcc", + ] + } + } +} + +config("ldconfig") { + visibility = [ ":*" ] + configs = [] + + # Chromecast executables depend on several shared libraries in + # /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary. + # This is explicitly disabled in Chrome for security reasons (see comments in + # //build/config/gcc/BUILD.gn), but necessary on Chromecast so that OEM's may + # override the default libraries shipped in the Cast receiver package. + if (target_rpath == "") { + ldflags = [ + "-Wl,-rpath=/oem_cast_shlib", + "-Wl,-rpath=\$ORIGIN/lib", + "-Wl,-rpath=\$ORIGIN", + ] + } else { + ldflags = [ "-Wl,-rpath=${target_rpath}" ] + } + + # Binaries which don't live in the same directory as Chrome component + # libraries may still depend on them. Explicitly add the component library + # directory to the rpath for the component build. + if (is_component_build) { + ldflags += [ "-Wl,-rpath=/system/chrome" ] + } +} + +config("executable_config") { + configs = [ ":ldconfig" ] + + if (!is_clang && current_cpu == "arm") { + ldflags = [ + # Export stdlibc++ and libgcc symbols to force shlibs to refer to these + # symbols from the executable. + "-Wl,--export-dynamic", + + "-lm", # stdlibc++ requires math.h + + # In case we redefined stdlibc++ symbols (e.g. tc_malloc) + "-Wl,--allow-multiple-definition", + + "-Wl,--whole-archive", + "-l:libstdc++.a", + "-l:libgcc.a", + "-Wl,--no-whole-archive", + ] + + # Despite including libstdc++/libgcc archives, we still need to specify + # static linking for them in order to prevent the executable from having a + # dynamic dependency on them. + configs += [ ":static_config" ] + } +} + +# Shared libaries should not have RPATH or RUNPATH set. This allows the +# shared libs to inherit RPATH from the parent executable that is loading +# the shared library. (See internal b/37514052 for more details.) +config("shared_library_config") { + if (current_cpu == "arm") { + configs = [ ":static_config" ] + } +} diff --git a/config/chromecast/OWNERS b/config/chromecast/OWNERS new file mode 100644 index 000000000000..253037d736b5 --- /dev/null +++ b/config/chromecast/OWNERS @@ -0,0 +1,3 @@ +mfoltz@chromium.org +rwkeane@google.com +seantopping@chromium.org diff --git a/config/chromecast_build.gni b/config/chromecast_build.gni new file mode 100644 index 000000000000..e8294ce7b021 --- /dev/null +++ b/config/chromecast_build.gni @@ -0,0 +1,124 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# The args declared in this file should be referenced by components outside of +# //chromecast. Args needed only in //chromecast should be declared in +# //chromecast/chromecast.gni. +# +# TODO(crbug.com/1294964): Rename this file after is_chromecast is removed. +declare_args() { + # Set this true for an audio-only Chromecast build. + # TODO(crbug.com/1293538): Replace with a buildflag for speaker-only builds not + # specific to Cast. + is_cast_audio_only = false + + # If non empty, rpath of executables is set to this. + # If empty, default value is used. + target_rpath = "" + + # Set true to enable modular_updater. + enable_modular_updater = false + + # True to enable the cast audio renderer. + # + # TODO(crbug.com/1293520): Remove this buildflag. + enable_cast_audio_renderer = false + + # Set this to true to build for Nest hardware running Linux (aka "CastOS"). + # Set this to false to use the defaults for Linux. + is_castos = false + + # Set this to true to build for Android-based Cast devices. + # Set this to false to use the defaults for Android. + is_cast_android = false +} + +# Restrict is_castos and is_cast_android to only be set on the target toolchain. +is_castos = is_castos && current_toolchain == default_toolchain +is_cast_android = is_cast_android && current_toolchain == default_toolchain + +declare_args() { + # Set this true for a Chromecast build. Chromecast builds are supported on + # Linux, Android, ChromeOS, and Fuchsia. + enable_cast_receiver = false +} + +declare_args() { + # True to enable the cast renderer. It is enabled by default for Linux and + # Android audio only builds. + # + # TODO(crbug.com/1293520): Remove this buildflag. + enable_cast_renderer = + enable_cast_receiver && + (is_linux || is_chromeos || (is_cast_audio_only && is_android)) +} + +# Configures media options for cast. See media/media_options.gni +cast_mojo_media_services = [] +cast_mojo_media_host = "" + +if (enable_cast_audio_renderer) { + if (is_android) { + cast_mojo_media_services = [ + "cdm", + "audio_decoder", + ] + } + + if (!is_cast_audio_only) { + cast_mojo_media_services += [ "video_decoder" ] + } + + if (is_android && is_cast_audio_only) { + cast_mojo_media_host = "browser" + } else { + cast_mojo_media_host = "gpu" + } +} else if (enable_cast_renderer) { + # In this path, mojo media services are hosted in two processes: + # 1. "renderer" and "cdm" run in browser process. This is hard coded in the + # code. + # 2. "video_decoder" runs in the process specified by "cast_mojo_media_host". + cast_mojo_media_services = [ + "cdm", + "renderer", + ] + if (!is_cast_audio_only) { + cast_mojo_media_services += [ "video_decoder" ] + } + + cast_mojo_media_host = "gpu" +} else if (is_android) { + # On Android, all the enabled mojo media services run in the process specified + # by "cast_mojo_media_host". + cast_mojo_media_services = [ + "cdm", + "audio_decoder", + ] + if (!is_cast_audio_only) { + # These are Cast/Android devices with Video capabilities (and GPU) + cast_mojo_media_services += [ "video_decoder" ] + cast_mojo_media_host = "gpu" + } else { + # These are Cast/Android devices with only Audio capabilities (no GPU) + cast_mojo_media_host = "browser" + } +} + +# Assert that Chromecast is being built for a supported platform. +assert(is_linux || is_chromeos || is_android || is_fuchsia || + !enable_cast_receiver, + "Cast receiver builds are not supported on $current_os") + +assert(enable_cast_receiver || !is_cast_audio_only, + "is_cast_audio_only = true requires enable_cast_receiver = true.") + +assert(enable_cast_receiver || !is_castos, + "is_castos = true requires enable_cast_receiver = true.") +assert(is_linux || !is_castos, "is_castos = true requires is_linux = true.") + +assert(enable_cast_receiver || !is_cast_android, + "is_cast_android = true requires enable_cast_receiver = true.") +assert(is_android || !is_cast_android, + "is_cast_android = true requires is_android = true.") diff --git a/config/chromeos/BUILD.gn b/config/chromeos/BUILD.gn new file mode 100644 index 000000000000..0606072656c8 --- /dev/null +++ b/config/chromeos/BUILD.gn @@ -0,0 +1,62 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromeos/ui_mode.gni") + +assert(is_chromeos) + +declare_args() { + # The location to a file used to dump symbols ordered by Call-Chain Clustering (C3) + # https://research.fb.com/wp-content/uploads/2017/01/cgo2017-hfsort-final1.pdf? + # to a file, used for generating orderfiles in Chrome OS + dump_call_chain_clustering_order = "" +} + +config("print_orderfile") { + if (dump_call_chain_clustering_order != "") { + _output_orderfile = + rebase_path(dump_call_chain_clustering_order, root_build_dir) + ldflags = [ "-Wl,--print-symbol-order=$_output_orderfile" ] + } +} + +config("compiler_cpu_abi") { + # Lacros currently uses the *-generic-crosstoolchain.gni files generated + # by the simplechrome sdk in build/args/chromeos. These target triples + # match the target toolchain defaults in these directories. Passing them + # redundantly is harmless and prepares for using Chromium's toolchain. + # Non-Lacros Chrome OS builds use per-board toolchains, which might use + # different triples. So don't do this there. + if (is_chromeos_device && is_chromeos_lacros) { + if (current_cpu == "x64") { + asmflags = [ "--target=x86_64-cros-linux-gnu" ] + cflags = [ "--target=x86_64-cros-linux-gnu" ] + ldflags = [ "--target=x86_64-cros-linux-gnu" ] + } else if (current_cpu == "arm") { + asmflags = [ "--target=armv7a-cros-linux-gnueabihf" ] + cflags = [ "--target=armv7a-cros-linux-gnueabihf" ] + ldflags = [ "--target=armv7a-cros-linux-gnueabihf" ] + } else if (current_cpu == "arm64") { + asmflags = [ "--target=aarch64-cros-linux-gnu" ] + cflags = [ "--target=aarch64-cros-linux-gnu" ] + ldflags = [ "--target=aarch64-cros-linux-gnu" ] + } else { + assert(false, "add support for $current_cpu here") + } + } +} + +config("runtime_library") { + # These flags are added by the Chrome OS toolchain compiler wrapper, + # or are implicitly passed by Chome OS's toolchain's clang due to the cmake + # flags that clang was built with. + # Passing them redundantly is harmless and prepares for using Chromium's + # toolchain for Lacros. + if (is_chromeos_device) { + ldflags = [ + "--rtlib=compiler-rt", + "--unwindlib=libunwind", + ] + } +} diff --git a/config/chromeos/args.gni b/config/chromeos/args.gni new file mode 100644 index 000000000000..8fb50537461d --- /dev/null +++ b/config/chromeos/args.gni @@ -0,0 +1,41 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # This is used only by Simple Chrome to bind its value to test-runner scripts + # generated at build-time. + cros_board = "" + + # Similar to cros_board above, this used only by test-runner scripts in + # Simple Chrome. + cros_sdk_version = "" +} + +# Ensure that if one is set, the other is as well. +assert(cros_board == "" == (cros_sdk_version == "")) + +declare_args() { + # Determines if we're building for a Chrome OS device (or VM) and not just + # linux-chromeos. NOTE: Most test targets in Chrome expect to run under + # linux-chromeos, so some have compile-time asserts that intentionally fail + # when this build flag is set. Build and run the tests for linux-chromeos + # instead. + # https://chromium.googlesource.com/chromium/src/+/main/docs/chromeos_build_instructions.md + # https://chromium.googlesource.com/chromiumos/docs/+/main/simple_chrome_workflow.md + is_chromeos_device = false + + # Determines if we run the test in skylab, aka the CrOS labs. + is_skylab = false + + # Determines if we collect hardware information in chrome://system and + # feedback logs. A similar build flag "hw_details" is defined in Chrome OS + # (see https://crrev.com/c/3123455). + is_chromeos_with_hw_details = false + + # Determines if we're willing to link against libinput + use_libinput = false + + # Refers to the separate branding required for the reven build. + is_reven = false +} diff --git a/config/chromeos/rules.gni b/config/chromeos/rules.gni new file mode 100644 index 000000000000..10af886af5ef --- /dev/null +++ b/config/chromeos/rules.gni @@ -0,0 +1,653 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/chromeos/args.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/dcheck_always_on.gni") +import("//build/config/gclient_args.gni") +import("//build/config/python.gni") +import("//build/util/generate_wrapper.gni") + +assert(is_chromeos && is_chromeos_device) + +# Determine the real paths for various items in the SDK, which may be used +# in the 'generate_runner_script' template below. We do so outside the template +# to confine exec_script to a single invocation. +if (cros_sdk_version != "") { + # Ideally these should be maps, however, gn doesn't support map, so using a + # list of list to simulate a map: + # [key1, [value1, value2, ...]], [key2, [value1, value2, ...]], where + # the keys are boards and values are symlinks or symlink targets, and the + # mapping shouldn't be used for anything else. + # + # A sample usage is: + # foreach(m, _symlink_targets_map) { + # if(m[0] == target_key) { + # target_value = m[1] + # } + # } + # + _symlink_map = [] + _symlink_targets_map = [] + + if (is_chromeos_ash) { + _potential_test_boards = [ cros_board ] + } else { + _potential_test_boards = [] + if (cros_boards != "") { + _potential_test_boards += string_split(cros_boards, ":") + } + if (cros_boards_with_qemu_images != "") { + _potential_test_boards += string_split(cros_boards_with_qemu_images, ":") + } + } + + foreach(b, _potential_test_boards) { + _cache_path_prefix = + "//build/cros_cache/chrome-sdk/symlinks/${b}+${cros_sdk_version}" + + _cros_is_vm = false + foreach(b1, string_split(cros_boards_with_qemu_images, ":")) { + if (b == b1) { + _cros_is_vm = true + } + } + + _symlinks = [] + _symlinks = [ + # Tast harness & test data. + rebase_path("${_cache_path_prefix}+autotest_server_package.tar.bz2"), + + # Binutils (and other toolchain tools) used to deploy Chrome to the device. + rebase_path( + "${_cache_path_prefix}+environment_chromeos-base_chromeos-chrome.tar.xz"), + rebase_path("${_cache_path_prefix}+target_toolchain"), + ] + if (_cros_is_vm) { + # VM-related tools. + _symlinks += [ + rebase_path("${_cache_path_prefix}+sys-firmware/seabios"), + rebase_path("${_cache_path_prefix}+chromiumos_test_image.tar.xz"), + rebase_path("${_cache_path_prefix}+app-emulation/qemu"), + ] + } + _symlink_map += [ [ + b, + _symlinks, + ] ] + } + + _all_symlinks = [] + foreach(m, _symlink_map) { + _all_symlinks += m[1] + } + _all_symlink_targets = + exec_script("//build/get_symlink_targets.py", _all_symlinks, "list lines") + _index = 0 + foreach(m, _symlink_map) { + _symlink_targets = [] + foreach(_, m[1]) { + _symlink_targets += [ _all_symlink_targets[_index] ] + _index += 1 + } + + _symlink_targets_map += [ [ + m[0], + _symlink_targets, + ] ] + } +} + +# Creates dependencies required by skylab testing. If passed the +# generated_script and test_exe this will generate the skylab runner script. +# If passed tast_attr_expr, tast_tests or tast_disabled_tests this will +# generate a filter file containing the expression for running tests in skylab. +# Args: +# generated_script: Name of the generated runner script created for test_exe +# test_exe: Name of the executable to run with the generated script. +# tast_attr_expr: Tast expression to determine tests to run. This creates the +# initial set of tests that can be further filtered.. +# tast_tests: Names of tests to enable in tast. All other tests will be +# disabled that are not listed. +# tast_disabled_tests: Names of tests to disable in tast. All other tests that +# match the tast expression will still run. +# tast_control: gni file with collections of tests to be used for specific +# filters (e.g. "//chromeos/tast_control.gni"). Any lists of strings in +# this file will be used to generate additional tast expressions with +# those strings expanded into tests to disable (i.e. as && !"name:test"). +# The name of those lists are then intended to be used to specify in +# test_suites.pyl which collection to be used on specific test suites. +template("generate_skylab_deps") { + forward_variables_from(invoker, + [ + "generated_script", + "test_exe", + "tast_attr_expr", + "tast_tests", + "tast_disabled_tests", + "tast_control", + ]) + if (defined(test_exe) || defined(generated_script)) { + assert(defined(test_exe) && defined(generated_script), + "The test_exe and generated_script must both be defined when " + + "generating the skylab runner script") + action(target_name) { + script = "//build/chromeos/generate_skylab_deps.py" + outputs = [ generated_script ] + args = [ + "generate-runner", + "--test-exe", + test_exe, + "--output", + rebase_path(generated_script, root_build_dir), + ] + + deps = [ "//testing/buildbot/filters:chromeos_filters" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + + data = [ generated_script ] + if (defined(invoker.data)) { + data += invoker.data + } + + data_deps = [ "//testing:test_scripts_shared" ] + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + } + } + if (defined(tast_attr_expr) || defined(tast_tests) || + defined(tast_disabled_tests)) { + if (defined(tast_disabled_tests)) { + assert(defined(tast_attr_expr), + "tast_attr_expr must be used when specifying tast_disabled_tests.") + } + _generated_filter = "$root_build_dir/bin/${target_name}.filter" + _skylab_args = [ + "generate-filter", + "--output", + rebase_path(_generated_filter), + ] + if (defined(tast_control)) { + _skylab_args += [ + "--tast-control", + rebase_path(tast_control), + ] + } + if (defined(tast_attr_expr)) { + _skylab_args += [ + "--tast-expr", + tast_attr_expr, + ] + } + if (defined(tast_tests)) { + foreach(_test, tast_tests) { + _skylab_args += [ + "--enabled-tests", + _test, + ] + } + } + if (defined(tast_disabled_tests)) { + foreach(_test, tast_disabled_tests) { + _excluded_test_name_and_board = [] + _excluded_test_name_and_board = string_split(_test, "@") + [ "" ] + _excluded_test_name = _excluded_test_name_and_board[0] + _excluded_board = _excluded_test_name_and_board[1] + if (_excluded_board == "" || _excluded_board == cros_board) { + _skylab_args += [ + "--disabled-tests", + _excluded_test_name, + ] + } + } + } + action(target_name) { + script = "//build/chromeos/generate_skylab_deps.py" + if (defined(tast_control)) { + sources = [ tast_control ] + } + outputs = [ _generated_filter ] + args = _skylab_args + if (defined(invoker.data_deps)) { + data_deps = invoker.data_deps + } + data = [ _generated_filter ] + if (defined(invoker.data)) { + data += invoker.data + } + if (defined(invoker.deps)) { + deps = invoker.deps + } + } + } +} + +# Creates a script at $generated_script that can be used to launch a cros VM +# and optionally run a test within it. +# Args: +# test_exe: Name of test binary located in the out dir. This will get copied +# to the VM and executed there. +# tast_attr_expr: Tast expression to pass to local_test_runner on the VM. +# tast_tests: List of Tast tests to run on the VM. Note that when this is +# specified, the target name used to invoke this template will be +# designated as the "name" of this test and will primarly used for test +# results tracking and displaying (eg: flakiness dashboard). +# generated_script: Path to place the generated script. +# deploy_chrome: If true, deploys a locally built chrome located in the root +# build dir to the VM or DUT after launching it. +# deploy_lacros: If true, deploys a locally built Lacros located in the root +# build dir to the VM or DUT after launching it. +# runtime_deps_file: Path to file listing runtime deps for the test. If set, +# all files listed will be copied to the VM before testing. +# skip_generating_board_args: By default, this template generates an '--board' +# arg with corresponding '--flash' or '--use-vm' args for device and vm +# respectively. This argument instructs the template to skip generating +# them, and it's designed for use cases where one builds for one board +# (e.g. amd64-generic), but tests on a different board (e.g. eve). +# tast_vars: A list of "key=value" runtime variable pairs to pass to invoke +# strip_chrome: If true, strips Chrome before deploying it for non-Tast tests. +# the Tast tests. For more details, please see: +# https://chromium.googlesource.com/chromiumos/platform/tast/+/HEAD/docs/writing_tests.md#Runtime-variables +template("generate_runner_script") { + forward_variables_from(invoker, + [ + "deploy_chrome", + "deploy_lacros", + "generated_script", + "runtime_deps_file", + "skip_generating_board_args", + "strip_chrome", + "tast_attr_expr", + "tast_tests", + "tast_vars", + "testonly", + "test_exe", + ]) + + if (!defined(skip_generating_board_args)) { + skip_generating_board_args = false + } + + if (skip_generating_board_args) { + # cros_board is not needed, so setting it to empty to avoid being used + # accidentally below. + cros_board = "" + not_needed([ cros_board ]) + } + + if (!defined(deploy_chrome)) { + deploy_chrome = false + } + if (!defined(deploy_lacros)) { + deploy_lacros = false + } + if (!defined(strip_chrome)) { + strip_chrome = false + } + is_tast = defined(tast_attr_expr) || defined(tast_tests) + assert(!(is_tast && defined(test_exe)), + "Tast tests are invoked from binaries shipped with the VM image. " + + "There should be no locally built binary needed.") + assert(is_tast || !defined(tast_vars), + "tast_vars is only support for Tast tests") + + if (is_tast) { + not_needed([ "strip_chrome" ]) + } + + # If we're in the cros chrome-sdk (and not the raw ebuild), the test will + # need some additional runtime data located in the SDK cache. + _sdk_data = [] + if (cros_sdk_version != "") { + assert(defined(generated_script), + "Must specify where to place generated test launcher script via " + + "'generated_script'") + + foreach(b, _potential_test_boards) { + _cros_is_vm = false + foreach(b1, string_split(cros_boards_with_qemu_images, ":")) { + if (b == b1) { + _cros_is_vm = true + } + } + + # Determine the real paths for various items in the SDK, which may be used + # in the 'generate_runner_script' template below. + if (is_tast || _cros_is_vm || deploy_chrome) { + _symlink_targets = [] + foreach(m, _symlink_targets_map) { + if (b == m[0]) { + _symlink_targets = [] + _symlink_targets = m[1] + } + } + + if (is_tast) { + # Add tast sdk items. + _sdk_data += [ _symlink_targets[0] ] + } + if (deploy_chrome) { + # To deploy chrome to the VM, it needs to be stripped down to fit into + # the VM. This is done by using binutils in the toolchain. So add the + # toolchain to the data. + _sdk_data += [ + _symlink_targets[1], + _symlink_targets[2], + ] + } + if (_cros_is_vm) { + # Add vm sdk items. + _sdk_data += [ + _symlink_targets[3], + _symlink_targets[4], + _symlink_targets[5], + ] + } + } + } + } + + generate_wrapper(target_name) { + executable = "//build/chromeos/test_runner.py" + wrapper_script = generated_script + executable_args = [] + + if (defined(runtime_deps_file)) { + write_runtime_deps = runtime_deps_file + } + + # Build executable_args for the three different test types: GTest, Tast, + # and host-side commands (eg telemetry). + if (defined(test_exe)) { + executable_args += [ + "gtest", + "--test-exe", + test_exe, + ] + if (defined(runtime_deps_file)) { + executable_args += [ + "--runtime-deps-path", + rebase_path(runtime_deps_file, root_build_dir), + ] + } + } else if (is_tast) { + # When --tast-tests is specified, test_runner.py will call + # local_test_runner on the VM to run the set of tests. + executable_args += [ + "tast", + "--suite-name", + target_name, + ] + if (defined(tast_attr_expr)) { + executable_args += [ + "--attr-expr", + tast_attr_expr, + ] + } else { + foreach(test, tast_tests) { + executable_args += [ + "-t", + test, + ] + } + } + if (defined(tast_vars)) { + foreach(var, tast_vars) { + executable_args += [ + "--tast-var", + var, + ] + } + } + if (dcheck_always_on) { + executable_args += [ + "--tast-extra-use-flags", + "chrome_dcheck", + ] + } + } else { + executable_args += [ "host-cmd" ] + } + executable_args += [ + "--cros-cache", + "build/cros_cache/", + "--path-to-outdir", + rebase_path(root_out_dir, "//"), + "-v", + ] + + if (!is_tast && strip_chrome) { + executable_args += [ "--strip-chrome" ] + } + + if (!skip_generating_board_args) { + executable_args += [ + "--board", + cros_board, + ] + + _cros_is_vm = false + foreach(b, string_split(cros_boards_with_qemu_images, ":")) { + if (cros_board == b) { + _cros_is_vm = true + } + } + if (_cros_is_vm) { + executable_args += [ "--use-vm" ] + } else { + executable_args += [ "--flash" ] + } + } + + # If we have public Chromium builds, use public Chromium OS images when + # flashing the test device. + if (!is_chrome_branded) { + executable_args += [ "--public-image" ] + } + + if (deploy_lacros) { + executable_args += [ "--deploy-lacros" ] + } + + if (deploy_chrome && !defined(test_exe)) { + executable_args += [ "--deploy-chrome" ] + } + + # executable_args should be finished, now build the data and deps lists. + deps = [ "//testing/buildbot/filters:chromeos_filters" ] + if (defined(invoker.deps)) { + deps += invoker.deps + } + data = [ + "//.vpython3", + + # We use android test-runner's results libs to construct gtest output + # json. + "//build/android/pylib/__init__.py", + "//build/android/pylib/base/", + "//build/android/pylib/results/", + "//build/chromeos/", + "//build/util/", + + # Needed for various SDK components used below. + "//build/cros_cache/chrome-sdk/misc/", + "//build/cros_cache/chrome-sdk/symlinks/", + "//chrome/VERSION", + + # The LKGM file controls what version of the VM image to download. Add it + # as data here so that changes to it will trigger analyze. + "//chromeos/CHROMEOS_LKGM", + "//third_party/chromite/", + ] + + data += _sdk_data + + if (defined(invoker.data)) { + data += invoker.data + } + + data_deps = [ "//testing:test_scripts_shared" ] + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + } +} + +template("tast_test") { + forward_variables_from(invoker, "*") + + if (!defined(deploy_lacros_chrome)) { + deploy_lacros_chrome = false + } + + # Default the expression to match any chrome-related test. + if (!defined(tast_attr_expr) && !defined(tast_tests)) { + # The following expression filters out all non-critical tests. See the link + # below for more details: + # https://chromium.googlesource.com/chromiumos/platform/tast/+/main/docs/test_attributes.md + tast_attr_expr = "\"group:mainline\" && \"dep:chrome\"" + + if (defined(enable_tast_informational_tests) && + enable_tast_informational_tests) { + tast_attr_expr += " && informational" + } else { + tast_attr_expr += " && !informational" + } + if (!is_chrome_branded) { + tast_attr_expr += " && !\"dep:chrome_internal\"" + } + } else { + assert(defined(tast_attr_expr) != defined(tast_tests), + "Specify one of tast_tests or tast_attr_expr.") + } + + # Append any disabled tests to the expression. + if (defined(tast_disabled_tests)) { + assert(defined(tast_attr_expr), + "tast_attr_expr must be used when specifying tast_disabled_tests.") + foreach(_test, tast_disabled_tests) { + _excluded_test_name_and_board = [] + _excluded_test_name_and_board = string_split(_test, "@") + [ "" ] + _excluded_test_name = _excluded_test_name_and_board[0] + _excluded_board = _excluded_test_name_and_board[1] + if (_excluded_board == "" || _excluded_board == cros_board) { + tast_attr_expr += " && !\"name:${_excluded_test_name}\"" + } + } + } + if (defined(tast_attr_expr)) { + tast_attr_expr = "( " + tast_attr_expr + " )" + } + generate_runner_script(target_name) { + testonly = true + generated_script = "$root_build_dir/bin/run_${target_name}" + runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps" + deploy_chrome = true + deploy_lacros = deploy_lacros_chrome + data_deps = [ + "//:chromiumos_preflight", # Builds the browser. + "//chromeos:cros_chrome_deploy", # Adds additional browser run-time deps. + + # Tools used to symbolize Chrome crash dumps. + # TODO(crbug.com/1156772): Remove these if/when all tests pick them up by + # default. + "//third_party/breakpad:dump_syms", + "//third_party/breakpad:minidump_dump", + "//third_party/breakpad:minidump_stackwalk", + ] + + data = [ "//components/crash/content/tools/generate_breakpad_symbols.py" ] + if (deploy_lacros_chrome) { + data += [ + # A script needed to launch Lacros in Lacros Tast tests. + "//build/lacros/mojo_connection_lacros_launcher.py", + ] + } + } +} + +template("lacros_tast_tests") { + forward_variables_from(invoker, + [ + "tast_attr_expr", + "tast_disabled_tests", + "tast_tests", + "tast_control", + ]) + assert(defined(tast_attr_expr) != defined(tast_tests), + "Specify one of tast_tests or tast_attr_expr.") + + _lacros_data_deps = [ + "//chrome", # Builds the browser. + + # Tools used to symbolize Chrome crash dumps. + # TODO(crbug.com/1156772): Remove these if/when all tests pick them up by + # default. + "//third_party/breakpad:dump_syms", + "//third_party/breakpad:minidump_dump", + "//third_party/breakpad:minidump_stackwalk", + ] + + _lacros_data = [ + "//components/crash/content/tools/generate_breakpad_symbols.py", + + # A script needed to launch Lacros in Lacros Tast tests. + "//build/lacros/mojo_connection_lacros_launcher.py", + ] + + if (is_skylab) { + generate_skylab_deps(target_name) { + data = _lacros_data + data_deps = _lacros_data_deps + + # To disable a test on specific milestones, add it to the appropriate + # collection in the following file + tast_control = "//chromeos/tast_control.gni" + } + } else { + # Append any disabled tests to the expression. + if (defined(tast_disabled_tests)) { + assert(defined(tast_attr_expr), + "tast_attr_expr must be used when specifying tast_disabled_tests.") + foreach(_test, tast_disabled_tests) { + _excluded_test_name_and_board = [] + _excluded_test_name_and_board = string_split(_test, "@") + [ "" ] + _excluded_test_name = _excluded_test_name_and_board[0] + _excluded_board = _excluded_test_name_and_board[1] + if (_excluded_board == "" || _excluded_board == cros_board) { + tast_attr_expr += " && !\"name:${_excluded_test_name}\"" + } + } + } + if (defined(tast_attr_expr)) { + tast_attr_expr = "( " + tast_attr_expr + " )" + } + generate_runner_script(target_name) { + testonly = true + deploy_lacros = true + generated_script = "$root_build_dir/bin/run_${target_name}" + runtime_deps_file = "$root_out_dir/${target_name}.runtime_deps" + + # At build time, Lacros tests don't know whether they'll run on VM or HW, + # and instead, these flags are specified at runtime when invoking the + # generated runner script. + skip_generating_board_args = true + + # By default, tast tests download a lacros-chrome from a gcs location and + # use it for testing. To support running lacros tast tests from Chromium CI, + # a Var is added to support pointing the tast tests to use a specified + # pre-deployed lacros-chrome. The location is decided by: + # https://source.chromium.org/chromium/chromium/src/+/main:third_party/chromite/scripts/deploy_chrome.py;l=80;drc=86f1234a4be8e9574442e076cdc835897f7bea61 + tast_vars = [ "lacros.DeployedBinary=/usr/local/lacros-chrome" ] + + data_deps = _lacros_data_deps + + data = _lacros_data + } + } +} diff --git a/config/chromeos/ui_mode.gni b/config/chromeos/ui_mode.gni new file mode 100644 index 000000000000..ce8fa8b4da8c --- /dev/null +++ b/config/chromeos/ui_mode.gni @@ -0,0 +1,60 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromeos/args.gni") + +declare_args() { + # Deprecated, use is_chromeos_lacros. + # + # This controls UI configuration for Chrome. + # If this flag is set, we assume Chrome runs on Chrome OS devices, using + # Wayland (instead of X11). + # + # TODO(crbug.com/1052397): + # Define chromeos_product instead, which takes either "browser" or "ash". + # Re-define the following variables as: + # is_chromeos_lacros = chromeos_product == "browser" + # is_chromeos_ash = chromeos_product == "ash" + chromeos_is_browser_only = false + + # Setting this to true when building linux Lacros-chrome will cause it to + # *also* build linux ash-chrome in a subdirectory using an alternate + # toolchain. + # Don't set this unless you're sure you want it, because it'll double + # your build time. + also_build_ash_chrome = false + + # Setting this to true when building linux ash-chrome will cause it to + # *also* build linux Lacros-chrome in a subdirectory using an alternate toolchain. + also_build_lacros_chrome = false + + # Setting this when building ash-chrome will cause it to + # *also* build Lacros-chrome in a subdirectory using an alternate toolchain. + # You can set this to either "amd64" or "arm". + also_build_lacros_chrome_for_architecture = "" +} + +# is_chromeos_{ash,lacros} is used to specify that it is specific to either +# ash or lacros chrome for chromeos. For platform specific logic that +# applies to both, use is_chromeos instead. +# .gn files are evaluated multiple times for each toolchain. +# is_chromeos_{ash,lacros} should be set true only for builds with target +# toolchains. +is_chromeos_ash = is_chromeos && !chromeos_is_browser_only +is_chromeos_lacros = is_chromeos && chromeos_is_browser_only + +# also_build_ash_chrome and also_build_lacros_chrome cannot be both true. +assert(!(also_build_ash_chrome && also_build_lacros_chrome)) + +# Can't set both also_build_lacros_chrome and +# also_build_lacros_chrome_for_architecture. +assert(!(also_build_lacros_chrome == true && + also_build_lacros_chrome_for_architecture != "")) + +# also_build_lacros_chrome_for_architecture is for device only. +assert(is_chromeos_device || also_build_lacros_chrome_for_architecture == "") + +# also_build_lacros_chrome_for_architecture is for ash build only. +assert(!chromeos_is_browser_only || + also_build_lacros_chrome_for_architecture == "") diff --git a/config/clang/BUILD.gn b/config/clang/BUILD.gn new file mode 100644 index 000000000000..ed39cc68cc40 --- /dev/null +++ b/config/clang/BUILD.gn @@ -0,0 +1,81 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("clang.gni") + +config("find_bad_constructs") { + if (clang_use_chrome_plugins) { + cflags = [] + + # The plugin is built directly into clang, so there's no need to load it + # dynamically. + cflags += [ + "-Xclang", + "-add-plugin", + "-Xclang", + "find-bad-constructs", + + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "raw-ref-template-as-trivial-member", + + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "check-stack-allocated", + ] + + if (is_linux || is_chromeos || is_android || is_fuchsia) { + cflags += [ + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "check-ipc", + ] + } + + if (enable_check_raw_ptr_fields) { + cflags += [ + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "check-raw-ptr-fields", + + # TODO(keishi): Remove this once crrev.com/c/4387753 is rolled out. + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "raw-ptr-exclude-path=base/no_destructor.h", + + # TODO(keishi): Remove this once crrev.com/c/4086161 lands. + "-Xclang", + "-plugin-arg-find-bad-constructs", + "-Xclang", + "raw-ptr-exclude-path=base/containers/span.h", + ] + } + } +} + +# Enables some extra Clang-specific warnings. Some third-party code won't +# compile with these so may want to remove this config. +config("extra_warnings") { + cflags = [ + "-Wheader-hygiene", + + # Warns when a const char[] is converted to bool. + "-Wstring-conversion", + + "-Wtautological-overlap-compare", + ] +} + +group("llvm-symbolizer_data") { + if (is_win) { + data = [ "$clang_base_path/bin/llvm-symbolizer.exe" ] + } else { + data = [ "$clang_base_path/bin/llvm-symbolizer" ] + } +} diff --git a/config/clang/clang.gni b/config/clang/clang.gni new file mode 100644 index 000000000000..1aad3d60b840 --- /dev/null +++ b/config/clang/clang.gni @@ -0,0 +1,22 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/toolchain.gni") + +default_clang_base_path = "//third_party/llvm-build/Release+Asserts" + +declare_args() { + # Indicates if the build should use the Chrome-specific plugins for enforcing + # coding guidelines, etc. Only used when compiling with Chrome's Clang, not + # Chrome OS's. + clang_use_chrome_plugins = + is_clang && !is_nacl && current_os != "zos" && + default_toolchain != "//build/toolchain/cros:target" + + enable_check_raw_ptr_fields = + build_with_chromium && !is_official_build && + ((is_linux && !is_castos) || (is_android && !is_cast_android)) + + clang_base_path = default_clang_base_path +} diff --git a/config/compiler/BUILD.gn b/config/compiler/BUILD.gn new file mode 100644 index 000000000000..28c2255ba809 --- /dev/null +++ b/config/compiler/BUILD.gn @@ -0,0 +1,2729 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/android/config.gni") +import("//build/config/c++/c++.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromeos/args.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/coverage/coverage.gni") +import("//build/config/dcheck_always_on.gni") +import("//build/config/gclient_args.gni") +import("//build/config/host_byteorder.gni") +import("//build/config/rust.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/ui.gni") +import("//build/toolchain/cc_wrapper.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +if (current_cpu == "arm" || current_cpu == "arm64") { + import("//build/config/arm.gni") +} +if (current_cpu == "mipsel" || current_cpu == "mips64el" || + current_cpu == "mips" || current_cpu == "mips64") { + import("//build/config/mips.gni") +} +if (is_mac) { + import("//build/config/apple/symbols.gni") +} +if (is_ios) { + import("//build/config/ios/ios_sdk.gni") +} +if (is_nacl) { + # To keep NaCl variables out of builds that don't include NaCl, all + # variables defined in nacl/config.gni referenced here should be protected by + # is_nacl conditions. + import("//build/config/nacl/config.gni") +} + +lld_path = "" +if (!is_clang) { + declare_args() { + # This allows overriding the location of lld. + lld_path = rebase_path("$clang_base_path/bin", root_build_dir) + } +} else { + # clang looks for lld next to it, no need for -B. + lld_path = "" +} + +declare_args() { + # Normally, Android builds are lightly optimized, even for debug builds, to + # keep binary size down. Setting this flag to true disables such optimization + android_full_debug = false + + # Compile in such a way as to make it possible for the profiler to unwind full + # stack frames. Setting this flag has a large effect on the performance of the + # generated code than just setting profiling, but gives the profiler more + # information to analyze. + # Requires profiling to be set to true. + enable_full_stack_frames_for_profiling = false + + # When we are going to use gold we need to find it. + # This is initialized below, after use_gold might have been overridden. + gold_path = "" + + # Enable fatal linker warnings. Building Chromium with certain versions + # of binutils can cause linker warning. + fatal_linker_warnings = true + + # Build with C++ RTTI enabled. Chromium builds without RTTI by default, + # but some sanitizers are known to require it, like CFI diagnostics + # and UBsan variants. + use_rtti = use_cfi_diag || is_ubsan_vptr || is_ubsan_security + + # AFDO (Automatic Feedback Directed Optimizer) is a form of profile-guided + # optimization that GCC supports. It used by ChromeOS in their official + # builds. To use it, set auto_profile_path to the path to a file containing + # the needed gcov profiling data. + auto_profile_path = "" + + # Path to an AFDO profile to use while building with clang, if any. Empty + # implies none. + clang_sample_profile_path = "" + + # Some configurations have default sample profiles. If this is true and + # clang_sample_profile_path is empty, we'll fall back to the default. + # + # We currently only have default profiles for Chromium in-tree, so we disable + # this by default for all downstream projects, since these profiles are likely + # nonsensical for said projects. + clang_use_default_sample_profile = + chrome_pgo_phase == 0 && build_with_chromium && is_official_build && + (is_android || chromeos_is_browser_only) + + # This configuration is used to select a default profile in Chrome OS based on + # the microarchitectures we are using. This is only used if + # clang_use_default_sample_profile is true and clang_sample_profile_path is + # empty. + chromeos_afdo_platform = "atom" + + # Emit debug information for profiling wile building with clang. + # Only enable this for ChromeOS official builds for AFDO. + clang_emit_debug_info_for_profiling = is_chromeos_device && is_official_build + + # Turn this on to have the compiler output extra timing information. + compiler_timing = false + + # Turn this on to use ghash feature of lld for faster debug link on Windows. + # http://blog.llvm.org/2018/01/improving-link-time-on-windows-with.html + use_ghash = true + + # Whether to enable ThinLTO optimizations. Turning ThinLTO optimizations on + # can substantially increase link time and binary size, but they generally + # also make binaries a fair bit faster. + # + # TODO(gbiv): We disable optimizations by default on most platforms because + # the space overhead is too great. We should use some mixture of profiles and + # optimization settings to better tune the size increase. + thin_lto_enable_optimizations = + (is_chromeos || is_android || is_win || is_linux || is_mac || + (is_ios && use_lld)) && is_official_build + + # Initialize all local variables with a pattern. This flag will fill + # uninitialized floating-point types (and 32-bit pointers) with 0xFF and the + # rest with 0xAA. This makes behavior of uninitialized memory bugs consistent, + # recognizable in the debugger, and crashes on memory accesses through + # uninitialized pointers. + # + # TODO(crbug.com/1131993): This regresses binary size by ~1MB on Android and + # needs to be evaluated before enabling it there as well. + init_stack_vars = !(is_android && is_official_build) + + # Zero init has favorable performance/size tradeoffs for Chrome OS + # but was not evaluated for other platforms. + init_stack_vars_zero = is_chromeos + + # This argument is to control whether enabling text section splitting in the + # final binary. When enabled, the separated text sections with prefix + # '.text.hot', '.text.unlikely', '.text.startup' and '.text.exit' will not be + # merged to '.text' section. This allows us to identify the hot code section + # ('.text.hot') in the binary which may be mlocked or mapped to huge page to + # reduce TLB misses which gives performance improvement on cpu usage. + # The gold linker by default has text section splitting enabled. + use_text_section_splitting = false + + # Turn off the --call-graph-profile-sort flag for lld by default. Enable + # selectively for targets where it's beneficial. + enable_call_graph_profile_sort = chrome_pgo_phase == 2 + + # Enable DWARF v5. + use_dwarf5 = false + + # Override this to put full paths to PDBs in Windows PE files. This helps + # windbg and Windows Performance Analyzer with finding the PDBs in some local- + # build scenarios. This is never needed for bots or official builds. Because + # this puts the output directory in the DLLs/EXEs it breaks build determinism. + # Bugs have been reported to the windbg/WPA teams and this workaround will be + # removed when they are fixed. + use_full_pdb_paths = false + + # Enable -H, which prints the include tree during compilation. + # For use by tools/clang/scripts/analyze_includes.py + show_includes = false + + # Enable Profi algorithm. Profi can infer block and edge counts. + # https://clang.llvm.org/docs/UsersManual.html#using-sampling-profilers + # TODO(crbug.com/1375958i:) Possibly enable this for Android too. + use_profi = is_chromeos + + # If true, linker crashes will be rerun with `--reproduce` which causes + # a reproducer file to be saved. + save_reproducers_on_lld_crash = false + + # Allow projects that wish to stay on C++17 to override Chromium's default. + # TODO(crbug.com/1402249): evaluate removing this end of 2023 + use_cxx17 = false +} + +declare_args() { + # Set to true to use icf, Identical Code Folding. + # + # icf=all is broken in older golds, see + # https://sourceware.org/bugzilla/show_bug.cgi?id=17704 + # chromeos binutils has been patched with the fix, so always use icf there. + # The bug only affects x86 and x64, so we can still use ICF when targeting + # other architectures. + # + # lld doesn't have the bug. + use_icf = (is_posix || is_fuchsia) && !is_debug && !using_sanitizer && + !use_clang_coverage && current_os != "zos" && + !(is_android && use_order_profiling) && + (use_lld || (use_gold && (is_chromeos || !(current_cpu == "x86" || + current_cpu == "x64")))) +} + +if (is_android || (is_chromeos_ash && is_chromeos_device)) { + # Set the path to use orderfile for linking Chrome + # Note that this is for using only one orderfile for linking + # the Chrome binary/library. + declare_args() { + chrome_orderfile_path = "" + + if (defined(default_chrome_orderfile)) { + # Allow downstream tools to set orderfile path with + # another variable. + chrome_orderfile_path = default_chrome_orderfile + } else if (is_chromeos_ash && is_chromeos_device) { + chrome_orderfile_path = "//chromeos/profiles/chromeos.orderfile.txt" + } + } +} + +assert(!(llvm_force_head_revision && use_goma), + "can't use goma with trunk clang") +assert(!(llvm_force_head_revision && use_remoteexec), + "can't use rbe with trunk clang") + +# default_include_dirs --------------------------------------------------------- +# +# This is a separate config so that third_party code (which would not use the +# source root and might have conflicting versions of some headers) can remove +# this and specify their own include paths. +config("default_include_dirs") { + include_dirs = [ + "//", + root_gen_dir, + ] +} + +# Compiler instrumentation can introduce dependencies in DSOs to symbols in +# the executable they are loaded into, so they are unresolved at link-time. +config("no_unresolved_symbols") { + if (!using_sanitizer && + (is_linux || is_chromeos || is_android || is_fuchsia)) { + ldflags = [ + "-Wl,-z,defs", + "-Wl,--as-needed", + ] + } +} + +# compiler --------------------------------------------------------------------- +# +# Base compiler configuration. +# +# See also "runtime_library" below for related stuff and a discussion about +# where stuff should go. Put warning related stuff in the "warnings" config. + +config("compiler") { + asmflags = [] + cflags = [] + cflags_c = [] + cflags_cc = [] + cflags_objc = [] + cflags_objcc = [] + ldflags = [] + defines = [] + configs = [] + + # System-specific flags. If your compiler flags apply to one of the + # categories here, add it to the associated file to keep this shared config + # smaller. + if (is_win) { + configs += [ "//build/config/win:compiler" ] + } else if (is_android) { + configs += [ "//build/config/android:compiler" ] + } else if (is_linux || is_chromeos) { + configs += [ "//build/config/linux:compiler" ] + } else if (is_nacl) { + configs += [ "//build/config/nacl:compiler" ] + } else if (is_mac) { + configs += [ "//build/config/mac:compiler" ] + } else if (is_ios) { + configs += [ "//build/config/ios:compiler" ] + } else if (is_fuchsia) { + configs += [ "//build/config/fuchsia:compiler" ] + } else if (current_os == "aix") { + configs += [ "//build/config/aix:compiler" ] + } else if (current_os == "zos") { + configs += [ "//build/config/zos:compiler" ] + } + + configs += [ + # See the definitions below. + ":clang_revision", + ":rustc_revision", + ":compiler_cpu_abi", + ":compiler_codegen", + ":compiler_deterministic", + ] + + # Here we enable -fno-delete-null-pointer-checks, which makes various nullptr + # operations (e.g. dereferencing) into defined behavior. This avoids deletion + # of some security-critical code: see https://crbug.com/1139129. + # Nacl does not support the flag. And, we still want UBSAN to catch undefined + # behavior related to nullptrs, so do not add this flag if UBSAN is enabled. + # GCC seems to have some bugs compiling constexpr code when this is defined, + # so only enable it if using_clang. See: https://gcc.gnu.org/PR97913 + # TODO(mpdenton): remove is_clang once GCC bug is fixed. + if (!is_nacl && !is_ubsan && is_clang) { + cflags += [ "-fno-delete-null-pointer-checks" ] + } + + # Don't emit the GCC version ident directives, they just end up in the + # .comment section or debug info taking up binary size, and makes comparing + # .o files built with different compiler versions harder. + if (!is_win || is_clang) { + cflags += [ "-fno-ident" ] + } + + # In general, Windows is totally different, but all the other builds share + # some common compiler and linker configuration. + if (!is_win) { + # Common POSIX compiler flags setup. + # -------------------------------- + cflags += [ "-fno-strict-aliasing" ] # See http://crbug.com/32204 + + # Stack protection. + if (is_apple) { + # The strong variant of the stack protector significantly increases + # binary size, so only enable it in debug mode. + if (is_debug) { + cflags += [ "-fstack-protector-strong" ] + } else { + cflags += [ "-fstack-protector" ] + } + } else if ((is_posix && !is_chromeos && !is_nacl) || is_fuchsia) { + # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc supports it. + # See also https://crbug.com/533294 + if (current_os != "zos") { + cflags += [ "--param=ssp-buffer-size=4" ] + } + + # The x86 toolchain currently has problems with stack-protector. + if (is_android && current_cpu == "x86") { + cflags += [ "-fno-stack-protector" ] + } else if (current_os != "aix") { + # Not available on aix. + cflags += [ "-fstack-protector" ] + } + } + + if (use_lld) { + ldflags += [ "-fuse-ld=lld" ] + if (lld_path != "") { + ldflags += [ "-B$lld_path" ] + } + } + + # Linker warnings. + if (fatal_linker_warnings && !is_apple && current_os != "aix" && + current_os != "zos") { + ldflags += [ "-Wl,--fatal-warnings" ] + } + if (fatal_linker_warnings && is_apple) { + ldflags += [ "-Wl,-fatal_warnings" ] + } + } + + if (is_clang && is_debug) { + # Allow comparing the address of references and 'this' against 0 + # in debug builds. Technically, these can never be null in + # well-defined C/C++ and Clang can optimize such checks away in + # release builds, but they may be used in asserts in debug builds. + cflags_cc += [ + "-Wno-undefined-bool-conversion", + "-Wno-tautological-undefined-compare", + ] + } + + # Non-Apple Posix and Fuchsia compiler flags setup. + # ----------------------------------- + if ((is_posix && !is_apple) || is_fuchsia) { + if (enable_profiling) { + if (!is_debug) { + cflags += [ "-g" ] + + if (enable_full_stack_frames_for_profiling) { + cflags += [ + "-fno-inline", + "-fno-optimize-sibling-calls", + ] + } + } + } + + # Explicitly pass --build-id to ld. Compilers used to always pass this + # implicitly but don't any more (in particular clang when built without + # ENABLE_LINKER_BUILD_ID=ON). + if (is_official_build) { + # The sha1 build id has lower risk of collision but is more expensive to + # compute, so only use it in the official build to avoid slowing down + # links. + ldflags += [ "-Wl,--build-id=sha1" ] + } else if (current_os != "aix" && current_os != "zos") { + ldflags += [ "-Wl,--build-id" ] + } + + if (!is_android) { + defines += [ + # _FILE_OFFSET_BITS=64 should not be set on Android in order to maintain + # the behavior of the Android NDK from earlier versions. + # See https://android-developers.googleblog.com/2017/09/introducing-android-native-development.html + "_FILE_OFFSET_BITS=64", + "_LARGEFILE_SOURCE", + "_LARGEFILE64_SOURCE", + ] + } + + if (!is_nacl) { + if (exclude_unwind_tables) { + cflags += [ + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + ] + defines += [ "NO_UNWIND_TABLES" ] + } else { + cflags += [ "-funwind-tables" ] + } + } + } + + # Apple compiler flags setup. + # --------------------------------- + if (is_apple) { + # On Intel, clang emits both Apple's "compact unwind" information and + # DWARF eh_frame unwind information by default, for compatibility reasons. + # This flag limits emission of eh_frame information to functions + # whose unwind information can't be expressed in the compact unwind format + # (which in practice means almost everything gets only compact unwind + # entries). This reduces object file size a bit and makes linking a bit + # faster. + # On arm64, this is already the default behavior. + if (current_cpu == "x64") { + asmflags += [ "-femit-dwarf-unwind=no-compact-unwind" ] + cflags += [ "-femit-dwarf-unwind=no-compact-unwind" ] + } + } + + # Linux/Android/Fuchsia common flags setup. + # --------------------------------- + if (is_linux || is_chromeos || is_android || is_fuchsia) { + asmflags += [ "-fPIC" ] + cflags += [ "-fPIC" ] + ldflags += [ "-fPIC" ] + + if (!is_clang) { + # Use pipes for communicating between sub-processes. Faster. + # (This flag doesn't do anything with Clang.) + cflags += [ "-pipe" ] + } + + ldflags += [ + "-Wl,-z,noexecstack", + "-Wl,-z,relro", + ] + + if (!is_component_build) { + ldflags += [ "-Wl,-z,now" ] + } + } + + # Linux-specific compiler flags setup. + # ------------------------------------ + if (use_gold) { + ldflags += [ "-fuse-ld=gold" ] + if (!is_android) { + # On Android, this isn't needed. gcc in the NDK knows to look next to + # it with -fuse-ld=gold, and clang gets a --gcc-toolchain flag passed + # above. + if (gold_path != "") { + ldflags += [ "-B$gold_path" ] + } + + ldflags += [ + # Experimentation found that using four linking threads + # saved ~20% of link time. + # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36 + # Only apply this to the target linker, since the host + # linker might not be gold, but isn't used much anyway. + "-Wl,--threads", + "-Wl,--thread-count=4", + ] + } + + # TODO(thestig): Make this flag work with GN. + #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) { + # ldflags += [ + # "-Wl,--detect-odr-violations", + # ] + #} + } + + if (use_icf && (!is_apple || use_lld)) { + ldflags += [ "-Wl,--icf=all" ] + } + + if (is_linux || is_chromeos) { + cflags += [ "-pthread" ] + # Do not use the -pthread ldflag here since it becomes a no-op + # when using -nodefaultlibs, which would cause an unused argument + # error. "-lpthread" is added in //build/config:default_libs. + } + + # Clang-specific compiler flags setup. + # ------------------------------------ + if (is_clang) { + cflags += [ "-fcolor-diagnostics" ] + + # Enable -fmerge-all-constants. This used to be the default in clang + # for over a decade. It makes clang non-conforming, but is fairly safe + # in practice and saves some binary size. We might want to consider + # disabling this (https://bugs.llvm.org/show_bug.cgi?id=18538#c13), + # but for now it looks like our build might rely on it + # (https://crbug.com/829795). + cflags += [ "-fmerge-all-constants" ] + } + + if (use_lld) { + # TODO(thakis): Make the driver pass --color-diagnostics to the linker + # if -fcolor-diagnostics is passed to it, and pass -fcolor-diagnostics + # in ldflags instead. + if (is_win) { + # On Windows, we call the linker directly, instead of calling it through + # the driver. + ldflags += [ "--color-diagnostics" ] + } else { + ldflags += [ "-Wl,--color-diagnostics" ] + } + } + + # Enable text section splitting only on linux when using lld for now. Other + # platforms can be added later if needed. + if ((is_linux || is_chromeos) && use_lld && use_text_section_splitting) { + ldflags += [ "-Wl,-z,keep-text-section-prefix" ] + } + + if (is_clang && !is_nacl && current_os != "zos") { + cflags += [ "-fcrash-diagnostics-dir=" + clang_diagnostic_dir ] + if (save_reproducers_on_lld_crash && use_lld) { + ldflags += [ + "-fcrash-diagnostics=all", + "-fcrash-diagnostics-dir=" + clang_diagnostic_dir, + ] + } + + # TODO(hans): Remove this once Clang generates better optimized debug info + # by default. https://crbug.com/765793 + cflags += [ + "-mllvm", + "-instcombine-lower-dbg-declare=0", + ] + if (!is_debug && use_thin_lto && is_a_target_toolchain) { + if (is_win) { + ldflags += [ "-mllvm:-instcombine-lower-dbg-declare=0" ] + } else { + ldflags += [ "-Wl,-mllvm,-instcombine-lower-dbg-declare=0" ] + } + } + + # TODO(crbug.com/1235145): Investigate why/if this should be needed. + if (is_win) { + cflags += [ "/clang:-ffp-contract=off" ] + } else { + cflags += [ "-ffp-contract=off" ] + } + } + + # C11/C++11 compiler flags setup. + # --------------------------- + if (is_linux || is_chromeos || is_android || (is_nacl && is_clang) || + current_os == "aix") { + if (is_clang) { + standard_prefix = "c" + + # Since we build with -std=c* and not -std=gnu*, _GNU_SOURCE will not be + # defined by the compiler. However, lots of code relies on the + # non-standard features that _GNU_SOURCE enables, so define it manually. + defines += [ "_GNU_SOURCE" ] + + if (is_nacl) { + # Undefine __STRICT_ANSI__ to get non-standard features which would + # otherwise not be enabled by NaCl's sysroots. + cflags += [ "-U__STRICT_ANSI__" ] + } + } else { + # Gcc does not support ##__VA_ARGS__ when in standards-conforming mode, + # but we use this feature in several places in Chromium. + # TODO(thomasanderson): Replace usages of ##__VA_ARGS__ with the + # standard-compliant __VA_OPT__ added by C++20, and switch the gcc build + # to -std=c*. + standard_prefix = "gnu" + } + + cflags_c += [ "-std=${standard_prefix}11" ] + if (is_nacl && !is_nacl_saigo) { + # This is for the pnacl_newlib toolchain. It's only used to build + # a few independent ppapi test files that don't pull in any other + # dependencies. + cflags_cc += [ "-std=${standard_prefix}++14" ] + if (is_clang) { + cflags_cc += [ "-fno-trigraphs" ] + } + } else if (is_clang) { + if (use_cxx17) { + cflags_cc += [ "-std=${standard_prefix}++17" ] + } else { + cflags_cc += [ "-std=${standard_prefix}++20" ] + } + } else { + # The gcc bots are currently using GCC 9, which is not new enough to + # support "c++20"/"gnu++20". + cflags_cc += [ "-std=${standard_prefix}++2a" ] + } + } else if (is_win) { + cflags_c += [ "/std:c11" ] + if (use_cxx17 || (!is_clang && defined(msvc_use_cxx17) && msvc_use_cxx17)) { + cflags_cc += [ "/std:c++17" ] + } else { + cflags_cc += [ "/std:c++20" ] + } + } else if (!is_nacl) { + # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either + # gnu11/gnu++11 or c11/c++11; we technically don't need this toolchain any + # more, but there are still a few buildbots using it, so until those are + # turned off we need the !is_nacl clause and the (is_nacl && is_clang) + # clause, above. + cflags_c += [ "-std=c11" ] + + if (use_cxx17) { + cflags_cc += [ "-std=c++17" ] + } else { + cflags_cc += [ "-std=c++20" ] + } + } + + if (is_clang && current_os != "zos") { + # C++17 removes trigraph support, but clang still warns that it ignores + # them when seeing them. Don't. + cflags_cc += [ "-Wno-trigraphs" ] + } + + if (use_relative_vtables_abi) { + cflags_cc += [ "-fexperimental-relative-c++-abi-vtables" ] + ldflags += [ "-fexperimental-relative-c++-abi-vtables" ] + } + + # Add flags for link-time optimization. These flags enable + # optimizations/transformations that require whole-program visibility at link + # time, so they need to be applied to all translation units, and we may end up + # with miscompiles if only part of the program is compiled with LTO flags. For + # that reason, we cannot allow targets to enable or disable these flags, for + # example by disabling the optimize configuration. + # TODO(pcc): Make this conditional on is_official_build rather than on gn + # flags for specific features. + if (!is_debug && use_thin_lto && is_a_target_toolchain) { + assert(use_lld, "LTO is only supported with lld") + + cflags += [ + "-flto=thin", + "-fsplit-lto-unit", + ] + + # Limit the size of the ThinLTO cache to the lesser of 10% of + # available disk space, 40GB and 100000 files. + cache_policy = "cache_size=10%:cache_size_bytes=40g:cache_size_files=100000" + + # An import limit of 30 has better performance (per speedometer) and lower + # binary size than the default setting of 100. + # TODO(gbiv): We ideally shouldn't need to specify this; ThinLTO + # should be able to better manage binary size increases on its own. + import_instr_limit = 30 + + if (is_win) { + ldflags += [ + "/opt:lldltojobs=all", + "-mllvm:-import-instr-limit=$import_instr_limit", + "/lldltocache:" + + rebase_path("$root_out_dir/thinlto-cache", root_build_dir), + "/lldltocachepolicy:$cache_policy", + "-mllvm:-disable-auto-upgrade-debug-info", + ] + } else { + ldflags += [ "-flto=thin" ] + + # Enabling ThinLTO on Chrome OS too, in an effort to reduce the memory + # usage in crbug.com/1038040. Note this will increase build time in + # Chrome OS. + + # In ThinLTO builds, we run at most one link process at a time, + # and let it use all cores. + # TODO(thakis): Check if '=0' (that is, number of cores, instead + # of "all" which means number of hardware threads) is faster. + ldflags += [ "-Wl,--thinlto-jobs=all" ] + if (is_apple) { + ldflags += [ + "-Wl,-cache_path_lto," + + rebase_path("$root_out_dir/thinlto-cache", root_build_dir), + "-Wcrl,object_path_lto", + ] + } else { + ldflags += + [ "-Wl,--thinlto-cache-dir=" + + rebase_path("$root_out_dir/thinlto-cache", root_build_dir) ] + } + + ldflags += [ "-Wl,--thinlto-cache-policy=$cache_policy" ] + + if (is_chromeos) { + # ARM was originally set lower than x86 to keep the size + # bloat of ThinLTO to <10%, but that's potentially no longer true. + # FIXME(inglorion): maybe tune these? + # TODO(b/271459198): Revert limit on amd64 to 30 when fixed. + import_instr_limit = 20 + } else if (is_android) { + # TODO(crbug.com/1308318): Investigate if we can get the > 6% perf win + # of import_instr_limit 30 with a binary size hit smaller than ~2 MiB. + import_instr_limit = 5 + } + + ldflags += [ "-Wl,-mllvm,-import-instr-limit=$import_instr_limit" ] + + if (!is_chromeos) { + # TODO(https://crbug.com/972449): turn on for ChromeOS when that + # toolchain has this flag. + # We only use one version of LLVM within a build so there's no need to + # upgrade debug info, which can be expensive since it runs the verifier. + ldflags += [ "-Wl,-mllvm,-disable-auto-upgrade-debug-info" ] + } + } + + # TODO(https://crbug.com/1211155): investigate why this isn't effective on + # arm32. + if (!is_android || current_cpu == "arm64") { + cflags += [ "-fwhole-program-vtables" ] + if (!is_win) { + ldflags += [ "-fwhole-program-vtables" ] + } + } + + # This flag causes LTO to create an .ARM.attributes section with the correct + # architecture. This is necessary because LLD will refuse to link a program + # unless the architecture revision in .ARM.attributes is sufficiently new. + # TODO(pcc): The contents of .ARM.attributes should be based on the + # -march flag passed at compile time (see llvm.org/pr36291). + if (current_cpu == "arm") { + ldflags += [ "-march=$arm_arch" ] + } + } + + if (compiler_timing) { + if (is_clang && !is_nacl) { + cflags += [ "-ftime-trace" ] + } else if (is_win) { + cflags += [ + # "Documented" here: + # http://aras-p.info/blog/2017/10/23/Best-unknown-MSVC-flag-d2cgsummary/ + "/d2cgsummary", + ] + } + } + + # Pass flag to LLD so Android builds can allow debuggerd to properly symbolize + # stack crashes (http://crbug.com/919499). + if (use_lld && is_android) { + ldflags += [ "-Wl,--no-rosegment" ] + } + + # TODO(crbug.com/1374347): Cleanup undefined symbol errors caught by + # --no-undefined-version. + if (use_lld && !is_win && !is_mac && !is_ios) { + ldflags += [ "-Wl,--undefined-version" ] + } + + if (use_lld && is_apple) { + ldflags += [ "-Wl,--strict-auto-link" ] + } + + # LLD does call-graph-sorted binary layout by default when profile data is + # present. On Android this increases binary size due to more thinks for long + # jumps. Turn it off by default and enable selectively for targets where it's + # beneficial. + if (use_lld && !enable_call_graph_profile_sort) { + if (is_win) { + ldflags += [ "/call-graph-profile-sort:no" ] + } else { + ldflags += [ "-Wl,--no-call-graph-profile-sort" ] + } + } + + if (is_clang && !is_nacl && show_includes) { + if (is_win) { + # TODO(crbug.com/1223741): Goma mixes the -H and /showIncludes output. + assert(!use_goma, "show_includes on Windows is not reliable with goma") + cflags += [ + "/clang:-H", + "/clang:-fshow-skipped-includes", + ] + } else { + cflags += [ + "-H", + "-fshow-skipped-includes", + ] + } + } + + # This flag enforces that member pointer base types are complete. It helps + # prevent us from running into problems in the Microsoft C++ ABI (see + # https://crbug.com/847724). + if (is_clang && !is_nacl && target_os != "chromeos" && + (is_win || use_custom_libcxx)) { + cflags += [ "-fcomplete-member-pointers" ] + } + + # Use DWARF simple template names, with the following exceptions: + # + # * Windows is not supported as it doesn't use DWARF. + # * Apple platforms (e.g. MacOS, iPhone, iPad) aren't supported because xcode + # lldb doesn't have the needed changes yet. + # TODO(crbug.com/1379070): Remove if the upstream default ever changes. + if (is_clang && !is_nacl && !is_win && !is_apple) { + cflags_cc += [ "-gsimple-template-names" ] + } + + # MLGO specific flags. These flags enable an ML-based inliner trained on + # Chrome on Android (arm32) with ThinLTO enabled, optimizing for size. + # The "release" ML model is embedded into clang as part of its build. + # Currently, the ML inliner is only enabled when targeting Android due to: + # a) Android is where size matters the most. + # b) MLGO presently has the limitation of only being able to embed one model + # at a time; It is unclear if the embedded model is beneficial for + # non-Android targets. + # MLGO is only officially supported on linux. + if (use_ml_inliner && is_a_target_toolchain) { + assert( + is_android && host_os == "linux", + "MLGO is currently only supported for targeting Android on a linux host") + if (use_thin_lto) { + ldflags += [ "-Wl,-mllvm,-enable-ml-inliner=release" ] + } + } + + # Pass the same C/C++ flags to the objective C/C++ compiler. + cflags_objc += cflags_c + cflags_objcc += cflags_cc + + # Assign any flags set for the C compiler to asmflags so that they are sent + # to the assembler. The Windows assembler takes different types of flags + # so only do so for posix platforms. + if (is_posix || is_fuchsia) { + asmflags += cflags + asmflags += cflags_c + } + + # Rust compiler flags setup. + # --------------------------- + rustflags = [ + # Overflow checks are optional in Rust, but even if switched + # off they do not cause undefined behavior (the overflowing + # behavior is defined). Because containers are bounds-checked + # in safe Rust, they also can't provoke buffer overflows. + # As such these checks may be less important in Rust than C++. + # But in (simplistic) testing they have negligible performance + # overhead, and this helps to provide consistent behavior + # between different configurations, so we'll keep them on until + # we discover a reason to turn them off. + "-Coverflow-checks=on", + + # By default Rust passes `-nodefaultlibs` to the linker, however this + # conflicts with our `--unwind=none` flag for Android dylibs, as the latter + # is then unused and produces a warning/error. So this removes the + # `-nodefaultlibs` from the linker invocation from Rust, which would be used + # to compile dylibs on Android, such as for constructing unit test APKs. + "-Cdefault-linker-libraries", + + # Require `unsafe` blocks even in `unsafe` fns. This is intended to become + # an error by default eventually; see + # https://github.com/rust-lang/rust/issues/71668 + "-Dunsafe_op_in_unsafe_fn", + + # To make Rust .d files compatible with ninja + "-Zdep-info-omit-d-target", + + # If a macro panics during compilation, show which macro and where it is + # defined. + "-Zmacro-backtrace", + + # For deterministic builds, keep the local machine's current working + # directory from appearing in build outputs. + "-Zremap-cwd-prefix=.", + ] + if (rust_abi_target != "") { + rustflags += [ "--target=$rust_abi_target" ] + } + if (!use_thin_lto) { + # Don't include bitcode if it won't be used. + rustflags += [ "-Cembed-bitcode=no" ] + } + if (is_official_build) { + rustflags += [ "-Ccodegen-units=1" ] + } +} + +# Defers LTO optimization to the linker, for use when: +# * Having the C++ toolchain do the linking against Rust staticlibs, and it +# will be using LTO. +# * Having Rust toolchain invoke the linker, and you're linking Rust and C++ +# together, so this defers LTO to the linker. +# +# Otherwise, Rust does LTO during compilation. +# +# https://doc.rust-lang.org/rustc/linker-plugin-lto.html +config("rust_defer_lto_to_linker") { + if (!is_debug && use_thin_lto && is_a_target_toolchain) { + rustflags = [ "-Clinker-plugin-lto" ] + } +} + +# The BUILDCONFIG file sets this config on targets by default, which means when +# building with ThinLTO, no optimization is performed in the link step. +config("thinlto_optimize_default") { + if (!is_debug && use_thin_lto && is_a_target_toolchain) { + lto_opt_level = 0 + + if (is_win) { + ldflags = [ "/opt:lldlto=" + lto_opt_level ] + } else { + ldflags = [ "-Wl,--lto-O" + lto_opt_level ] + } + + rustflags = [ "-Clinker-plugin-lto=yes" ] + } +} + +# Use this to enable optimization in the ThinLTO link step for select targets +# when thin_lto_enable_optimizations is set by doing: +# +# configs -= [ "//build/config/compiler:thinlto_optimize_default" ] +# configs += [ "//build/config/compiler:thinlto_optimize_max" ] +# +# Since it makes linking significantly slower and more resource intensive, only +# use it on important targets such as the main browser executable or dll. +config("thinlto_optimize_max") { + if (!is_debug && use_thin_lto && is_a_target_toolchain) { + if (thin_lto_enable_optimizations) { + lto_opt_level = 2 + } else { + lto_opt_level = 0 + } + + if (is_win) { + ldflags = [ "/opt:lldlto=" + lto_opt_level ] + } else { + ldflags = [ "-Wl,--lto-O" + lto_opt_level ] + } + + rustflags = [ "-Clinker-plugin-lto=yes" ] + } +} + +# This provides the basic options to select the target CPU and ABI. +# It is factored out of "compiler" so that special cases can use this +# without using everything that "compiler" brings in. Options that +# tweak code generation for a particular CPU do not belong here! +# See "compiler_codegen", below. +config("compiler_cpu_abi") { + cflags = [] + ldflags = [] + defines = [] + + configs = [] + if (is_chromeos) { + configs += [ "//build/config/chromeos:compiler_cpu_abi" ] + } + + # TODO(https://crbug.com/1383873): Remove this once figured out. + if (is_apple && current_cpu == "arm64") { + cflags += [ "-fno-global-isel" ] + ldflags += [ "-fno-global-isel" ] + } + + if ((is_posix && !is_apple) || is_fuchsia) { + # CPU architecture. We may or may not be doing a cross compile now, so for + # simplicity we always explicitly set the architecture. + if (current_cpu == "x64") { + cflags += [ + "-m64", + "-msse3", + ] + ldflags += [ "-m64" ] + } else if (current_cpu == "x86") { + cflags += [ "-m32" ] + ldflags += [ "-m32" ] + if (!is_nacl) { + cflags += [ + "-mfpmath=sse", + "-msse3", + ] + } + } else if (current_cpu == "arm") { + if (is_clang && !is_android && !is_nacl && + !(is_chromeos_lacros && is_chromeos_device)) { + cflags += [ "--target=arm-linux-gnueabihf" ] + ldflags += [ "--target=arm-linux-gnueabihf" ] + } + if (!is_nacl) { + cflags += [ + "-march=$arm_arch", + "-mfloat-abi=$arm_float_abi", + ] + } + if (arm_tune != "") { + cflags += [ "-mtune=$arm_tune" ] + } + } else if (current_cpu == "arm64") { + if (is_clang && !is_android && !is_nacl && !is_fuchsia && + !(is_chromeos_lacros && is_chromeos_device)) { + cflags += [ "--target=aarch64-linux-gnu" ] + ldflags += [ "--target=aarch64-linux-gnu" ] + } + } else if (current_cpu == "mipsel" && !is_nacl) { + ldflags += [ "-Wl,--hash-style=sysv" ] + if (custom_toolchain == "") { + if (is_clang) { + if (is_android) { + cflags += [ "--target=mipsel-linux-android" ] + ldflags += [ "--target=mipsel-linux-android" ] + } else { + cflags += [ "--target=mipsel-linux-gnu" ] + ldflags += [ "--target=mipsel-linux-gnu" ] + } + } else { + cflags += [ "-EL" ] + ldflags += [ "-EL" ] + } + } + + if (mips_arch_variant == "r6") { + cflags += [ "-mno-odd-spreg" ] + ldflags += [ "-mips32r6" ] + if (is_clang) { + cflags += [ + "-march=mipsel", + "-mcpu=mips32r6", + ] + } else { + cflags += [ + "-mips32r6", + "-Wa,-mips32r6", + ] + if (is_android) { + ldflags += [ "-Wl,-melf32ltsmip" ] + } + } + if (mips_use_msa == true) { + cflags += [ + "-mmsa", + "-mfp64", + ] + } + } else if (mips_arch_variant == "r2") { + ldflags += [ "-mips32r2" ] + if (is_clang) { + cflags += [ + "-march=mipsel", + "-mcpu=mips32r2", + ] + } else { + cflags += [ + "-mips32r2", + "-Wa,-mips32r2", + ] + if (mips_float_abi == "hard" && mips_fpu_mode != "") { + cflags += [ "-m$mips_fpu_mode" ] + } + } + } else if (mips_arch_variant == "r1") { + ldflags += [ "-mips32" ] + if (is_clang) { + cflags += [ + "-march=mipsel", + "-mcpu=mips32", + ] + } else { + cflags += [ + "-mips32", + "-Wa,-mips32", + ] + } + } else if (mips_arch_variant == "loongson3") { + defines += [ "_MIPS_ARCH_LOONGSON" ] + cflags += [ + "-march=loongson3a", + "-mno-branch-likely", + "-Wa,-march=loongson3a", + ] + } + + if (mips_dsp_rev == 1) { + cflags += [ "-mdsp" ] + } else if (mips_dsp_rev == 2) { + cflags += [ "-mdspr2" ] + } + + cflags += [ "-m${mips_float_abi}-float" ] + } else if (current_cpu == "mips" && !is_nacl) { + ldflags += [ "-Wl,--hash-style=sysv" ] + if (custom_toolchain == "") { + if (is_clang) { + cflags += [ "--target=mips-linux-gnu" ] + ldflags += [ "--target=mips-linux-gnu" ] + } else { + cflags += [ "-EB" ] + ldflags += [ "-EB" ] + } + } + + if (mips_arch_variant == "r6") { + cflags += [ + "-mips32r6", + "-Wa,-mips32r6", + ] + if (mips_use_msa == true) { + cflags += [ + "-mmsa", + "-mfp64", + ] + } + } else if (mips_arch_variant == "r2") { + cflags += [ + "-mips32r2", + "-Wa,-mips32r2", + ] + if (mips_float_abi == "hard" && mips_fpu_mode != "") { + cflags += [ "-m$mips_fpu_mode" ] + } + } else if (mips_arch_variant == "r1") { + cflags += [ + "-mips32", + "-Wa,-mips32", + ] + } + + if (mips_dsp_rev == 1) { + cflags += [ "-mdsp" ] + } else if (mips_dsp_rev == 2) { + cflags += [ "-mdspr2" ] + } + + cflags += [ "-m${mips_float_abi}-float" ] + } else if (current_cpu == "mips64el") { + cflags += [ "-D__SANE_USERSPACE_TYPES__" ] + ldflags += [ "-Wl,--hash-style=sysv" ] + if (custom_toolchain == "") { + if (is_clang) { + if (is_android) { + cflags += [ "--target=mips64el-linux-android" ] + ldflags += [ "--target=mips64el-linux-android" ] + } else { + cflags += [ "--target=mips64el-linux-gnuabi64" ] + ldflags += [ "--target=mips64el-linux-gnuabi64" ] + } + } else { + cflags += [ + "-EL", + "-mabi=64", + ] + ldflags += [ + "-EL", + "-mabi=64", + ] + } + } + + if (mips_arch_variant == "r6") { + if (is_clang) { + cflags += [ + "-march=mips64el", + "-mcpu=mips64r6", + ] + } else { + cflags += [ + "-mips64r6", + "-Wa,-mips64r6", + ] + ldflags += [ "-mips64r6" ] + } + if (mips_use_msa == true) { + cflags += [ + "-mmsa", + "-mfp64", + ] + } + } else if (mips_arch_variant == "r2") { + ldflags += [ "-mips64r2" ] + if (is_clang) { + cflags += [ + "-march=mips64el", + "-mcpu=mips64r2", + ] + } else { + cflags += [ + "-mips64r2", + "-Wa,-mips64r2", + ] + } + } else if (mips_arch_variant == "loongson3") { + defines += [ "_MIPS_ARCH_LOONGSON" ] + cflags += [ + "-march=loongson3a", + "-mno-branch-likely", + "-Wa,-march=loongson3a", + ] + } + } else if (current_cpu == "mips64") { + ldflags += [ "-Wl,--hash-style=sysv" ] + if (custom_toolchain == "") { + if (is_clang) { + cflags += [ "--target=mips64-linux-gnuabi64" ] + ldflags += [ "--target=mips64-linux-gnuabi64" ] + } else { + cflags += [ + "-EB", + "-mabi=64", + ] + ldflags += [ + "-EB", + "-mabi=64", + ] + } + } + + if (mips_arch_variant == "r6") { + cflags += [ + "-mips64r6", + "-Wa,-mips64r6", + ] + ldflags += [ "-mips64r6" ] + + if (mips_use_msa == true) { + cflags += [ + "-mmsa", + "-mfp64", + ] + } + } else if (mips_arch_variant == "r2") { + cflags += [ + "-mips64r2", + "-Wa,-mips64r2", + ] + ldflags += [ "-mips64r2" ] + } + } else if (current_cpu == "ppc64") { + if (current_os == "aix") { + cflags += [ "-maix64" ] + ldflags += [ "-maix64" ] + } else { + cflags += [ "-m64" ] + ldflags += [ "-m64" ] + } + } else if (current_cpu == "riscv64") { + if (is_clang) { + cflags += [ "--target=riscv64-linux-gnu" ] + ldflags += [ "--target=riscv64-linux-gnu" ] + } + cflags += [ "-mabi=lp64d" ] + } else if (current_cpu == "loong64") { + if (is_clang) { + cflags += [ "--target=loongarch64-linux-gnu" ] + ldflags += [ "--target=loongarch64-linux-gnu" ] + } + cflags += [ + "-mabi=lp64d", + "-mcmodel=medium", + ] + } else if (current_cpu == "s390x") { + cflags += [ "-m64" ] + ldflags += [ "-m64" ] + } + } + + asmflags = cflags +} + +# This provides options to tweak code generation that are necessary +# for particular Chromium code or for working around particular +# compiler bugs (or the combination of the two). +config("compiler_codegen") { + configs = [] + cflags = [] + ldflags = [] + + if (is_nacl) { + configs += [ "//build/config/nacl:compiler_codegen" ] + } + + if (current_cpu == "arm64" && !is_win && is_clang) { + # Disable outlining everywhere on arm64 except Win. For more information see + # crbug.com/931297 for Android and crbug.com/1410297 for iOS. + # TODO(crbug.com/1411363): Enable this on Windows if possible. + cflags += [ "-mno-outline" ] + + # This can be removed once https://bugs.llvm.org/show_bug.cgi?id=40348 + # has been resolved, and -mno-outline is obeyed by the linker during + # ThinLTO. + ldflags += [ "-Wl,-mllvm,-enable-machine-outliner=never" ] + } + + asmflags = cflags +} + +# This provides options that make the build deterministic, so that the same +# revision produces the same output, independent of the name of the build +# directory and of the computer the build is done on. +# The relative path from build dir to source dir makes it into the build +# outputs, so it's recommended that you use a build dir two levels deep +# (e.g. "out/Release") so that you get the same "../.." path as all the bots +# in your build outputs. +config("compiler_deterministic") { + cflags = [] + ldflags = [] + swiftflags = [] + + # Eliminate build metadata (__DATE__, __TIME__ and __TIMESTAMP__) for + # deterministic build. See https://crbug.com/314403 + if (!is_official_build) { + if (is_win && !is_clang) { + cflags += [ + "/wd4117", # Trying to define or undefine a predefined macro. + "/D__DATE__=", + "/D__TIME__=", + "/D__TIMESTAMP__=", + ] + } else { + cflags += [ + "-Wno-builtin-macro-redefined", + "-D__DATE__=", + "-D__TIME__=", + "-D__TIMESTAMP__=", + ] + } + } + + # Makes builds independent of absolute file path. + if (is_clang && strip_absolute_paths_from_debug_symbols) { + # If debug option is given, clang includes $cwd in debug info by default. + # For such build, this flag generates reproducible obj files even we use + # different build directory like "out/feature_a" and "out/feature_b" if + # we build same files with same compile flag. + # Other paths are already given in relative, no need to normalize them. + if (is_nacl) { + # TODO(https://crbug.com/1231236): Use -ffile-compilation-dir= here. + cflags += [ + "-Xclang", + "-fdebug-compilation-dir", + "-Xclang", + ".", + ] + } else { + # -ffile-compilation-dir is an alias for both -fdebug-compilation-dir= + # and -fcoverage-compilation-dir=. + cflags += [ "-ffile-compilation-dir=." ] + swiftflags += [ "-file-compilation-dir=." ] + } + if (!is_win) { + # We don't use clang -cc1as on Windows (yet? https://crbug.com/762167) + asmflags = [ "-Wa,-fdebug-compilation-dir,." ] + } + + if (is_win && use_lld) { + if (symbol_level == 2 || (is_clang && using_sanitizer)) { + # Absolutize source file paths for PDB. Pass the real build directory + # if the pdb contains source-level debug information and if linker + # reproducibility is not critical. + ldflags += [ "/PDBSourcePath:" + rebase_path(root_build_dir) ] + } else { + # Use a fake fixed base directory for paths in the pdb to make the pdb + # output fully deterministic and independent of the build directory. + ldflags += [ "/PDBSourcePath:o:\fake\prefix" ] + } + } + } + + # Tells the compiler not to use absolute paths when passing the default + # paths to the tools it invokes. We don't want this because we don't + # really need it and it can mess up the goma cache entries. + if (is_clang && (!is_nacl || is_nacl_saigo)) { + cflags += [ "-no-canonical-prefixes" ] + + # Same for links: Let the compiler driver invoke the linker + # with a relative path and pass relative paths to built-in + # libraries. Not needed on Windows because we call the linker + # directly there, not through the compiler driver. + # We don't link on goma, so this change is just for cleaner + # internal linker invocations, for people who work on the build. + if (!is_win) { + ldflags += [ "-no-canonical-prefixes" ] + } + } +} + +config("clang_revision") { + if (is_clang && clang_base_path == default_clang_base_path) { + update_args = [ + "--print-revision", + "--verify-version=$clang_version", + ] + if (llvm_force_head_revision) { + update_args += [ "--llvm-force-head-revision" ] + } + clang_revision = exec_script("//tools/clang/scripts/update.py", + update_args, + "trim string") + + # This is here so that all files get recompiled after a clang roll and + # when turning clang on or off. (defines are passed via the command line, + # and build system rebuild things when their commandline changes). Nothing + # should ever read this define. + defines = [ "CR_CLANG_REVISION=\"$clang_revision\"" ] + } +} + +config("rustc_revision") { + if (rustc_revision != "") { + # Similar to the above config, this is here so that all files get recompiled + # after a rustc roll. Nothing should ever read this cfg. This will not be + # set if a custom toolchain is used. + rustflags = [ + "--cfg", + "cr_rustc_revision=\"$rustc_revision\"", + ] + } +} + +config("compiler_arm_fpu") { + if (current_cpu == "arm" && !is_ios && !is_nacl) { + cflags = [ "-mfpu=$arm_fpu" ] + if (!arm_use_thumb) { + cflags += [ "-marm" ] + } + asmflags = cflags + } +} + +config("compiler_arm_thumb") { + if (current_cpu == "arm" && arm_use_thumb && is_posix && + !(is_apple || is_nacl)) { + cflags = [ "-mthumb" ] + } +} + +config("compiler_arm") { + if (current_cpu == "arm" && is_chromeos) { + # arm is normally the default mode for clang, but on chromeos a wrapper + # is used to pass -mthumb, and therefor change the default. + cflags = [ "-marm" ] + } +} + +# runtime_library ------------------------------------------------------------- +# +# Sets the runtime library and associated options. +# +# How do you determine what should go in here vs. "compiler" above? Consider if +# a target might choose to use a different runtime library (ignore for a moment +# if this is possible or reasonable on your system). If such a target would want +# to change or remove your option, put it in the runtime_library config. If a +# target wants the option regardless, put it in the compiler config. + +config("runtime_library") { + configs = [] + + # The order of this config is important: it must appear before + # android:runtime_library. This is to ensure libc++ appears before + # libandroid_support in the -isystem include order. Otherwise, there will be + # build errors related to symbols declared in math.h. + if (use_custom_libcxx) { + configs += [ "//build/config/c++:runtime_library" ] + } + + # TODO(crbug.com/830987): Come up with a better name for is POSIX + Fuchsia + # configuration. + if (is_posix || is_fuchsia) { + configs += [ "//build/config/posix:runtime_library" ] + } + + # System-specific flags. If your compiler flags apply to one of the + # categories here, add it to the associated file to keep this shared config + # smaller. + if (is_win) { + configs += [ "//build/config/win:runtime_library" ] + } else if (is_linux || is_chromeos) { + configs += [ "//build/config/linux:runtime_library" ] + if (is_chromeos) { + configs += [ "//build/config/chromeos:runtime_library" ] + } + } else if (is_ios) { + configs += [ "//build/config/ios:runtime_library" ] + } else if (is_mac) { + configs += [ "//build/config/mac:runtime_library" ] + } else if (is_android) { + configs += [ "//build/config/android:runtime_library" ] + } + + if (is_component_build) { + defines = [ "COMPONENT_BUILD" ] + } +} + +# default_warnings ------------------------------------------------------------ +# +# Collects all warning flags that are used by default. This is used as a +# subconfig of both chromium_code and no_chromium_code. This way these +# flags are guaranteed to appear on the compile command line after -Wall. +config("default_warnings") { + cflags = [] + cflags_c = [] + cflags_cc = [] + ldflags = [] + + if (is_win) { + if (treat_warnings_as_errors) { + cflags += [ "/WX" ] + } + if (fatal_linker_warnings) { + arflags = [ "/WX" ] + ldflags = [ "/WX" ] + } + defines = [ + # Without this, Windows headers warn that functions like wcsnicmp + # should be spelled _wcsnicmp. But all other platforms keep spelling + # it wcsnicmp, making this warning unhelpful. We don't want it. + "_CRT_NONSTDC_NO_WARNINGS", + + # TODO(thakis): winsock wants us to use getaddrinfo instead of + # gethostbyname. Fires mostly in non-Chromium code. We probably + # want to remove this define eventually. + "_WINSOCK_DEPRECATED_NO_WARNINGS", + ] + if (!is_clang) { + # TODO(thakis): Remove this once + # https://swiftshader-review.googlesource.com/c/SwiftShader/+/57968 has + # rolled into angle. + cflags += [ "/wd4244" ] + } + } else { + if (is_apple && !is_nacl) { + # When compiling Objective-C, warns if a method is used whose + # availability is newer than the deployment target. + cflags += [ "-Wunguarded-availability" ] + } + + if (is_ios) { + # When compiling Objective-C, warns if a selector named via @selector has + # not been defined in any visible interface. + cflags += [ "-Wundeclared-selector" ] + } + + # Suppress warnings about ABI changes on ARM (Clang doesn't give this + # warning). + if (current_cpu == "arm" && !is_clang) { + cflags += [ "-Wno-psabi" ] + } + + if (!is_clang) { + cflags_cc += [ + # See comment for -Wno-c++11-narrowing. + "-Wno-narrowing", + ] + + # -Wno-class-memaccess warns about hash table and vector in blink. + # But the violation is intentional. + if (!is_nacl) { + cflags_cc += [ "-Wno-class-memaccess" ] + } + + # -Wunused-local-typedefs is broken in gcc, + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63872 + cflags += [ "-Wno-unused-local-typedefs" ] + + # Don't warn about "maybe" uninitialized. Clang doesn't include this + # in -Wall but gcc does, and it gives false positives. + cflags += [ "-Wno-maybe-uninitialized" ] + cflags += [ "-Wno-deprecated-declarations" ] + + # -Wcomment gives too many false positives in the case a + # backslash ended comment line is followed by a new line of + # comments + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61638 + cflags += [ "-Wno-comments" ] + + # -Wpacked-not-aligned complains all generated mojom-shared-internal.h + # files. + cflags += [ "-Wno-packed-not-aligned" ] + } + } + + # Common Clang and GCC warning setup. + if (!is_win || is_clang) { + cflags += [ + # Disables. + "-Wno-missing-field-initializers", # "struct foo f = {0};" + "-Wno-unused-parameter", # Unused function parameters. + ] + + if (!is_nacl || is_nacl_saigo) { + cflags += [ + # An ABI compat warning we don't care about, https://crbug.com/1102157 + # TODO(thakis): Push this to the (few) targets that need it, + # instead of having a global flag. + "-Wno-psabi", + ] + } + } + + if (is_clang) { + cflags += [ + "-Wloop-analysis", + + # TODO(thakis): This used to be implied by -Wno-unused-function, + # which we no longer use. Check if it makes sense to remove + # this as well. http://crbug.com/316352 + "-Wno-unneeded-internal-declaration", + ] + + if (!is_nacl || is_nacl_saigo) { + if (is_win) { + # TODO(thakis): https://crbug.com/617318 + # Currently goma can not handle case sensitiveness for windows well. + cflags += [ "-Wno-nonportable-include-path" ] + } + + cflags += [ + "-Wenum-compare-conditional", + + # Ignore warnings about MSVC optimization pragmas. + # TODO(thakis): Only for no_chromium_code? http://crbug.com/912662 + "-Wno-ignored-pragma-optimize", + ] + + if (!is_nacl) { + cflags += [ + # TODO(crbug.com/1343975) Evaluate and possibly enable. + "-Wno-deprecated-builtins", + + # TODO(crbug.com/1352183) Evaluate and possibly enable. + "-Wno-bitfield-constant-conversion", + + # TODO(crbug.com/1412713) Evaluate and possibly enable. + "-Wno-deprecated-this-capture", + ] + } + } + } +} + +# prevent_unsafe_narrowing ---------------------------------------------------- +# +# Warnings that prevent narrowing or comparisons of integer types that are +# likely to cause out-of-bound read/writes or Undefined Behaviour. In +# particular, size_t is used for memory sizes, allocation, indexing, and +# offsets. Using other integer types along with size_t produces risk of +# memory-safety bugs and thus security exploits. +# +# In order to prevent these bugs, allocation sizes were historically limited to +# sizes that can be represented within 31 bits of information, allowing `int` to +# be safely misused instead of `size_t` (https://crbug.com/169327). In order to +# support increasing the allocation limit we require strictly adherence to +# using the correct types, avoiding lossy conversions, and preventing overflow. +# To do so, enable this config and fix errors by converting types to be +# `size_t`, which is both large enough and unsigned, when dealing with memory +# sizes, allocations, indices, or offsets.In cases where type conversion is not +# possible or is superfluous, use base::strict_cast<> or base::checked_cast<> +# to convert to size_t as needed. +# See also: https://docs.google.com/document/d/1CTbQ-5cQjnjU8aCOtLiA7G6P0i5C6HpSDNlSNq6nl5E +# +# To enable in a GN target, use: +# configs += [ "//build/config/compiler:prevent_unsafe_narrowing" ] + +config("prevent_unsafe_narrowing") { + if (is_clang) { + cflags = [ + "-Wshorten-64-to-32", + "-Wimplicit-int-conversion", + "-Wsign-compare", + "-Wsign-conversion", + ] + if (!is_nacl) { + cflags += [ + # Avoid bugs of the form `if (size_t i = size; i >= 0; --i)` while + # fixing types to be sign-correct. + "-Wtautological-unsigned-zero-compare", + ] + } + } +} + +# chromium_code --------------------------------------------------------------- +# +# Toggles between higher and lower warnings for code that is (or isn't) +# part of Chromium. + +config("chromium_code") { + if (is_win) { + if (is_clang) { + cflags = [ "/W4" ] # Warning level 4. + + # Opt in to additional [[nodiscard]] on standard library methods. + defines = [ "_HAS_NODISCARD" ] + } + } else { + cflags = [ "-Wall" ] + if (treat_warnings_as_errors) { + cflags += [ "-Werror" ] + + # The compiler driver can sometimes (rarely) emit warnings before calling + # the actual linker. Make sure these warnings are treated as errors as + # well. + ldflags = [ "-Werror" ] + } + if (is_clang) { + # Enable extra warnings for chromium_code when we control the compiler. + cflags += [ "-Wextra" ] + } + + if (treat_warnings_as_errors) { + # Turn rustc warnings into the "deny" lint level, which produce compiler + # errors. The equivalent of -Werror for clang/gcc. + # + # Note we apply the actual lint flags in config("compiler"). All warnings + # are suppressed in third-party crates. + rustflags = [ "-Dwarnings" ] + } + + # In Chromium code, we define __STDC_foo_MACROS in order to get the + # C99 macros on Mac and Linux. + defines = [ + "__STDC_CONSTANT_MACROS", + "__STDC_FORMAT_MACROS", + ] + + if (!is_debug && !using_sanitizer && current_cpu != "s390x" && + current_cpu != "s390" && current_cpu != "ppc64" && + current_cpu != "mips" && current_cpu != "mips64" && + current_cpu != "riscv64" && current_cpu != "loong64") { + # Non-chromium code is not guaranteed to compile cleanly with + # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are + # disabled, so only do that for Release build. + defines += [ "_FORTIFY_SOURCE=2" ] + } + + if (is_apple) { + cflags_objc = [ "-Wimplicit-retain-self" ] + cflags_objcc = [ "-Wimplicit-retain-self" ] + } + + if (is_mac) { + cflags_objc += [ "-Wobjc-missing-property-synthesis" ] + cflags_objcc += [ "-Wobjc-missing-property-synthesis" ] + } + } + + if (is_clang) { + cflags += [ + # Warn on missing break statements at the end of switch cases. + # For intentional fallthrough, use [[fallthrough]]. + "-Wimplicit-fallthrough", + + # Warn on unnecessary extra semicolons outside of function definitions. + "-Wextra-semi", + ] + + # TODO(thakis): Enable this more often, https://crbug.com/346399 + # use_fuzzing_engine_with_lpm: https://crbug.com/1063180 + if ((!is_nacl || is_nacl_saigo) && !use_fuzzing_engine_with_lpm) { + cflags += [ "-Wunreachable-code-aggressive" ] + } + + # Thread safety analysis is broken under nacl: https://crbug.com/982423. + if (!is_nacl || is_nacl_saigo) { + cflags += [ + # Thread safety analysis. See base/thread_annotations.h and + # https://clang.llvm.org/docs/ThreadSafetyAnalysis.html + "-Wthread-safety", + ] + } + } + + configs = [ + ":default_warnings", + ":noshadowing", + ] +} + +config("no_chromium_code") { + cflags = [] + cflags_cc = [] + defines = [] + + if (is_win) { + if (is_clang) { + cflags += [ "/W3" ] # Warning level 3. + } + cflags += [ + "/wd4800", # Disable warning when forcing value to bool. + "/wd4267", # TODO(jschuh): size_t to int. + ] + } else { + # GCC may emit unsuppressible warnings so don't add -Werror for no chromium + # code. crbug.com/589724 + if (treat_warnings_as_errors && is_clang) { + cflags += [ "-Werror" ] + ldflags = [ "-Werror" ] + } + if (is_clang && !is_nacl) { + # TODO(thakis): Remove !is_nacl once + # https://codereview.webrtc.org/1552863002/ made its way into chromium. + cflags += [ "-Wall" ] + } + } + + if (is_clang) { + cflags += [ + # Lots of third-party libraries have unused variables. Instead of + # suppressing them individually, we just blanket suppress them here. + "-Wno-unused-variable", + + # Similarly, we're not going to fix all the C++11 narrowing issues in + # third-party libraries. + "-Wno-c++11-narrowing", + ] + if (!is_nacl) { + cflags += [ + # Disabled for similar reasons as -Wunused-variable. + "-Wno-unused-but-set-variable", + + # TODO(https://crbug.com/1202159): Clean up and enable. + "-Wno-misleading-indentation", + ] + } + } + + # Suppress all warnings in third party, as Cargo does: + # https://doc.rust-lang.org/rustc/lints/levels.html#capping-lints + rustflags = [ "--cap-lints=allow" ] + + configs = [ ":default_warnings" ] +} + +# noshadowing ----------------------------------------------------------------- +# +# Allows turning -Wshadow on. + +config("noshadowing") { + # This flag has to be disabled for nacl because the nacl compiler is too + # strict about shadowing. + if (is_clang && (!is_nacl || is_nacl_saigo)) { + cflags = [ "-Wshadow" ] + } +} + +# rtti ------------------------------------------------------------------------ +# +# Allows turning Run-Time Type Identification on or off. + +config("rtti") { + if (is_win) { + cflags_cc = [ "/GR" ] + } else { + cflags_cc = [ "-frtti" ] + } +} + +config("no_rtti") { + # Some sanitizer configs may require RTTI to be left enabled globally + if (!use_rtti) { + if (is_win) { + cflags_cc = [ "/GR-" ] + } else { + cflags_cc = [ "-fno-rtti" ] + cflags_objcc = cflags_cc + } + } +} + +# export_dynamic --------------------------------------------------------------- +# +# Ensures all exported symbols are added to the dynamic symbol table. This is +# necessary to expose Chrome's custom operator new() and operator delete() (and +# other memory-related symbols) to libraries. Otherwise, they might +# (de)allocate memory on a different heap, which would spell trouble if pointers +# to heap-allocated memory are passed over shared library boundaries. +config("export_dynamic") { + # TODO(crbug.com/1052397): Revisit after target_os flip is completed. + if (is_linux || is_chromeos_lacros || export_libcxxabi_from_executables) { + ldflags = [ "-rdynamic" ] + } +} + +# thin_archive ----------------------------------------------------------------- +# +# Enables thin archives on posix, and on windows when the lld linker is used. +# Regular archives directly include the object files used to generate it. +# Thin archives merely reference the object files. +# This makes building them faster since it requires less disk IO, but is +# inappropriate if you wish to redistribute your static library. +# This config is added to the global config, so thin archives should already be +# enabled. If you want to make a distributable static library, you need to do 2 +# things: +# 1. Set complete_static_lib so that all dependencies of the library make it +# into the library. See `gn help complete_static_lib` for details. +# 2. Remove the thin_archive config, so that the .a file actually contains all +# .o files, instead of just references to .o files in the build directoy +config("thin_archive") { + # The macOS and iOS default linker ld64 does not support reading thin + # archives. + # TODO(crbug.com/1221615): Enable on is_apple if use_lld once that no longer + # confuses lldb. + if ((is_posix && !is_nacl && !is_apple) || is_fuchsia) { + arflags = [ "-T" ] + } else if (is_win && use_lld) { + arflags = [ "/llvmlibthin" ] + } +} + +# exceptions ------------------------------------------------------------------- +# +# Allows turning Exceptions on or off. +# Note: exceptions are disallowed in Google code. + +config("exceptions") { + if (is_win) { + # Enables exceptions in the STL. + if (!use_custom_libcxx) { + defines = [ "_HAS_EXCEPTIONS=1" ] + } + cflags_cc = [ "/EHsc" ] + } else { + cflags_cc = [ "-fexceptions" ] + cflags_objcc = cflags_cc + } +} + +config("no_exceptions") { + if (is_win) { + # Disables exceptions in the STL. + # libc++ uses the __has_feature macro to control whether to use exceptions, + # so defining this macro is unnecessary. Defining _HAS_EXCEPTIONS to 0 also + # breaks libc++ because it depends on MSVC headers that only provide certain + # declarations if _HAS_EXCEPTIONS is 1. Those MSVC headers do not use + # exceptions, despite being conditional on _HAS_EXCEPTIONS. + if (!use_custom_libcxx) { + defines = [ "_HAS_EXCEPTIONS=0" ] + } + } else { + cflags_cc = [ "-fno-exceptions" ] + cflags_objcc = cflags_cc + } +} + +# Warnings --------------------------------------------------------------------- + +# Generate a warning for code that might emit a static initializer. +# See: //docs/static_initializers.md +# See: https://groups.google.com/a/chromium.org/d/topic/chromium-dev/B9Q5KTD7iCo/discussion +config("wglobal_constructors") { + if (is_clang) { + cflags = [ "-Wglobal-constructors" ] + } +} + +# This will generate warnings when using Clang if code generates exit-time +# destructors, which will slow down closing the program. +# TODO(thakis): Make this a blocklist instead, http://crbug.com/101600 +config("wexit_time_destructors") { + if (is_clang) { + cflags = [ "-Wexit-time-destructors" ] + } +} + +# Some code presumes that pointers to structures/objects are compatible +# regardless of whether what they point to is already known to be valid. +# gcc 4.9 and earlier had no way of suppressing this warning without +# suppressing the rest of them. Here we centralize the identification of +# the gcc 4.9 toolchains. +config("no_incompatible_pointer_warnings") { + cflags = [] + if (is_clang) { + cflags += [ "-Wno-incompatible-pointer-types" ] + } else if (current_cpu == "mipsel" || current_cpu == "mips64el") { + cflags += [ "-w" ] + } else if (is_chromeos_ash && current_cpu == "arm") { + cflags += [ "-w" ] + } +} + +# Optimization ----------------------------------------------------------------- +# +# The BUILDCONFIG file sets the "default_optimization" config on targets by +# default. It will be equivalent to either "optimize" (release) or +# "no_optimize" (debug) optimization configs. +# +# You can override the optimization level on a per-target basis by removing the +# default config and then adding the named one you want: +# +# configs -= [ "//build/config/compiler:default_optimization" ] +# configs += [ "//build/config/compiler:optimize_max" ] + +# Shared settings for both "optimize" and "optimize_max" configs. +# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags. +if (is_win) { + common_optimize_on_cflags = [ + "/Ob2", # Both explicit and auto inlining. + "/Oy-", # Disable omitting frame pointers, must be after /O2. + "/Zc:inline", # Remove unreferenced COMDAT (faster links). + ] + if (!is_asan) { + common_optimize_on_cflags += [ + # Put data in separate COMDATs. This allows the linker + # to put bit-identical constants at the same address even if + # they're unrelated constants, which saves binary size. + # This optimization can't be used when ASan is enabled because + # it is not compatible with the ASan ODR checker. + "/Gw", + ] + } + common_optimize_on_ldflags = [] + + # /OPT:ICF is not desirable in Debug builds, since code-folding can result in + # misleading symbols in stack traces. + if (!is_debug && !is_component_build) { + common_optimize_on_ldflags += [ "/OPT:ICF" ] # Redundant COMDAT folding. + } + + if (is_official_build) { + common_optimize_on_ldflags += [ "/OPT:REF" ] # Remove unreferenced data. + # TODO(thakis): Add LTO/PGO clang flags eventually, https://crbug.com/598772 + } +} else { + common_optimize_on_cflags = [] + common_optimize_on_ldflags = [] + + if (is_android) { + # TODO(jdduke) Re-enable on mips after resolving linking + # issues with libc++ (crbug.com/456380). + if (current_cpu != "mipsel" && current_cpu != "mips64el") { + common_optimize_on_ldflags += [ + # Warn in case of text relocations. + "-Wl,--warn-shared-textrel", + ] + } + } + + if (is_apple) { + common_optimize_on_ldflags += [ "-Wl,-dead_strip" ] + + if (is_official_build) { + common_optimize_on_ldflags += [ + "-Wl,-no_data_in_code_info", + "-Wl,-no_function_starts", + ] + } + } else if (current_os != "aix" && current_os != "zos") { + # Non-Mac Posix flags. + # Aix does not support these. + + common_optimize_on_cflags += [ + # Put data and code in their own sections, so that unused symbols + # can be removed at link time with --gc-sections. + "-fdata-sections", + "-ffunction-sections", + ] + if ((!is_nacl || is_nacl_saigo) && is_clang) { + # We don't care about unique section names, this makes object files a bit + # smaller. + common_optimize_on_cflags += [ "-fno-unique-section-names" ] + } + + common_optimize_on_ldflags += [ + # Specifically tell the linker to perform optimizations. + # See http://lwn.net/Articles/192624/ . + # -O2 enables string tail merge optimization in gold and lld. + "-Wl,-O2", + "-Wl,--gc-sections", + ] + } +} + +config("default_stack_frames") { + if (!is_win) { + if (enable_frame_pointers) { + cflags = [ "-fno-omit-frame-pointer" ] + + # Omit frame pointers for leaf functions on x86, otherwise building libyuv + # gives clang's register allocator issues, see llvm.org/PR15798 / + # crbug.com/233709 + if (is_clang && current_cpu == "x86" && !is_apple) { + cflags += [ "-momit-leaf-frame-pointer" ] + } + } else { + cflags = [ "-fomit-frame-pointer" ] + } + } + # On Windows, the flag to enable framepointers "/Oy-" must always come after + # the optimization flag [e.g. "/O2"]. The optimization flag is set by one of + # the "optimize" configs, see rest of this file. The ordering that cflags are + # applied is well-defined by the GN spec, and there is no way to ensure that + # cflags set by "default_stack_frames" is applied after those set by an + # "optimize" config. Similarly, there is no way to propagate state from this + # config into the "optimize" config. We always apply the "/Oy-" config in the + # definition for common_optimize_on_cflags definition, even though this may + # not be correct. +} + +# Default "optimization on" config. +config("optimize") { + if (is_win) { + if (chrome_pgo_phase != 2) { + # Favor size over speed, /O1 must be before the common flags. + # /O1 implies /Os and /GF. + cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ] + rustflags = [ "-Copt-level=s" ] + } else { + # PGO requires all translation units to be compiled with /O2. The actual + # optimization level will be decided based on the profiling data. + cflags = [ "/O2" ] + common_optimize_on_cflags + [ "/Oi" ] + + # https://doc.rust-lang.org/rustc/profile-guided-optimization.html#usage + # suggests not using an explicit `-Copt-level` at all, and the default is + # to optimize for performance like `/O2` for clang. + rustflags = [] + } + } else if (optimize_for_size) { + # Favor size over speed. + if (is_clang) { + cflags = [ "-Oz" ] + common_optimize_on_cflags + + if (use_ml_inliner && is_a_target_toolchain) { + cflags += [ + "-mllvm", + "-enable-ml-inliner=release", + ] + } + } else { + cflags = [ "-Os" ] + common_optimize_on_cflags + } + + # Like with `-Oz` on Clang, `-Copt-level=z` will also turn off loop + # vectorization. + rustflags = [ "-Copt-level=z" ] + } else if (is_chromeos) { + # TODO(gbiv): This is partially favoring size over speed. CrOS exclusively + # uses clang, and -Os in clang is more of a size-conscious -O2 than "size at + # any cost" (AKA -Oz). It'd be nice to: + # - Make `optimize_for_size` apply to all platforms where we're optimizing + # for size by default (so, also Windows) + # - Investigate -Oz here, maybe just for ARM? + cflags = [ "-Os" ] + common_optimize_on_cflags + + # Similar to clang, we optimize with `-Copt-level=s` to keep loop + # vectorization while otherwise optimizing for size. + rustflags = [ "-Copt-level=s" ] + } else { + cflags = [ "-O2" ] + common_optimize_on_cflags + + # The `-O3` for clang turns on extra optimizations compared to the standard + # `-O2`. But for rust, `-Copt-level=3` is the default and is thus reliable + # to use. + rustflags = [ "-Copt-level=3" ] + } + ldflags = common_optimize_on_ldflags +} + +# Turn off optimizations. +config("no_optimize") { + if (is_win) { + cflags = [ + "/Od", # Disable optimization. + "/Ob0", # Disable all inlining (on by default). + "/GF", # Enable string pooling (off by default). + ] + + if (target_cpu == "arm64") { + # Disable omitting frame pointers for no_optimize build because stack + # traces on Windows ARM64 rely on it. + cflags += [ "/Oy-" ] + } + } else if (is_android && !android_full_debug) { + # On Android we kind of optimize some things that don't affect debugging + # much even when optimization is disabled to get the binary size down. + if (is_clang) { + cflags = [ "-Oz" ] + common_optimize_on_cflags + } else { + cflags = [ "-Os" ] + common_optimize_on_cflags + } + + if (!is_component_build) { + # Required for library partitions. Without this all symbols just end up + # in the base partition. + ldflags = [ "-Wl,--gc-sections" ] + } + } else if (is_fuchsia) { + # On Fuchsia, we optimize for size here to reduce the size of debug build + # packages so they can be run in a KVM. See crbug.com/910243 for details. + cflags = [ "-Og" ] + } else { + cflags = [ "-O0" ] + ldflags = [] + } +} + +# Turns up the optimization level. On Windows, this implies whole program +# optimization and link-time code generation which is very expensive and should +# be used sparingly. +config("optimize_max") { + if (is_nacl && is_nacl_irt) { + # The NaCl IRT is a special case and always wants its own config. + # Various components do: + # if (!is_debug) { + # configs -= [ "//build/config/compiler:default_optimization" ] + # configs += [ "//build/config/compiler:optimize_max" ] + # } + # So this config has to have the selection logic just like + # "default_optimization", below. + configs = [ "//build/config/nacl:irt_optimize" ] + } else { + ldflags = common_optimize_on_ldflags + if (is_win) { + # Favor speed over size, /O2 must be before the common flags. + # /O2 implies /Ot, /Oi, and /GF. + cflags = [ "/O2" ] + common_optimize_on_cflags + } else if (optimize_for_fuzzing) { + cflags = [ "-O1" ] + common_optimize_on_cflags + } else { + cflags = [ "-O2" ] + common_optimize_on_cflags + } + rustflags = [ "-Copt-level=3" ] + } +} + +# This config can be used to override the default settings for per-component +# and whole-program optimization, optimizing the particular target for speed +# instead of code size. This config is exactly the same as "optimize_max" +# except that we use -O3 instead of -O2 on non-win, non-IRT platforms. +# +# TODO(crbug.com/621335) - rework how all of these configs are related +# so that we don't need this disclaimer. +config("optimize_speed") { + if (is_nacl && is_nacl_irt) { + # The NaCl IRT is a special case and always wants its own config. + # Various components do: + # if (!is_debug) { + # configs -= [ "//build/config/compiler:default_optimization" ] + # configs += [ "//build/config/compiler:optimize_max" ] + # } + # So this config has to have the selection logic just like + # "default_optimization", below. + configs = [ "//build/config/nacl:irt_optimize" ] + } else { + ldflags = common_optimize_on_ldflags + if (is_win) { + # Favor speed over size, /O2 must be before the common flags. + # /O2 implies /Ot, /Oi, and /GF. + cflags = [ "/O2" ] + common_optimize_on_cflags + } else if (optimize_for_fuzzing) { + cflags = [ "-O1" ] + common_optimize_on_cflags + } else { + cflags = [ "-O3" ] + common_optimize_on_cflags + } + rustflags = [ "-Copt-level=3" ] + } +} + +config("optimize_fuzzing") { + cflags = [ "-O1" ] + common_optimize_on_cflags + rustflags = [ "-Copt-level=1" ] + ldflags = common_optimize_on_ldflags + visibility = [ ":default_optimization" ] +} + +# The default optimization applied to all targets. This will be equivalent to +# either "optimize" or "no_optimize", depending on the build flags. +config("default_optimization") { + if (is_nacl && is_nacl_irt) { + # The NaCl IRT is a special case and always wants its own config. + # It gets optimized the same way regardless of the type of build. + configs = [ "//build/config/nacl:irt_optimize" ] + } else if (is_debug) { + configs = [ ":no_optimize" ] + } else if (optimize_for_fuzzing) { + assert(!is_win, "Fuzzing optimize level not supported on Windows") + + # Coverage build is quite slow. Using "optimize_for_fuzzing" makes it even + # slower as it uses "-O1" instead of "-O3". Prevent that from happening. + assert(!use_clang_coverage, + "optimize_for_fuzzing=true should not be used with " + + "use_clang_coverage=true.") + configs = [ ":optimize_fuzzing" ] + } else { + configs = [ ":optimize" ] + } +} + +_clang_sample_profile = "" +if (is_clang && is_a_target_toolchain) { + if (clang_sample_profile_path != "") { + _clang_sample_profile = clang_sample_profile_path + } else if (clang_use_default_sample_profile) { + assert(build_with_chromium, + "Our default profiles currently only apply to Chromium") + assert(is_android || is_chromeos || is_castos, + "The current platform has no default profile") + if (is_android || is_castos) { + _clang_sample_profile = "//chrome/android/profiles/afdo.prof" + } else { + assert( + chromeos_afdo_platform == "atom" || + chromeos_afdo_platform == "bigcore" || + chromeos_afdo_platform == "arm" || + chromeos_afdo_platform == "arm-exp", + "Only 'atom', 'bigcore', 'arm' and 'arm-exp' are valid ChromeOS profiles.") + _clang_sample_profile = + "//chromeos/profiles/${chromeos_afdo_platform}.afdo.prof" + } + } +} + +# Clang offers a way to assert that AFDO profiles are accurate, which causes it +# to optimize functions not represented in a profile more aggressively for size. +# This config can be toggled in cases where shaving off binary size hurts +# performance too much. +config("afdo_optimize_size") { + if (_clang_sample_profile != "" && sample_profile_is_accurate) { + cflags = [ "-fprofile-sample-accurate" ] + } +} + +# GCC and clang support a form of profile-guided optimization called AFDO. +# There are some targeted places that AFDO regresses, so we provide a separate +# config to allow AFDO to be disabled per-target. +config("afdo") { + if (is_clang) { + cflags = [] + if (clang_emit_debug_info_for_profiling) { + # Add the following flags to generate debug info for profiling. + cflags += [ "-gline-tables-only" ] + if (!is_nacl) { + cflags += [ "-fdebug-info-for-profiling" ] + } + } + if (_clang_sample_profile != "") { + assert(chrome_pgo_phase == 0, "AFDO can't be used in PGO builds") + rebased_clang_sample_profile = + rebase_path(_clang_sample_profile, root_build_dir) + cflags += [ "-fprofile-sample-use=${rebased_clang_sample_profile}" ] + if (use_profi) { + cflags += [ "-fsample-profile-use-profi" ] + } + inputs = [ _clang_sample_profile ] + } + } else if (auto_profile_path != "" && is_a_target_toolchain) { + cflags = [ "-fauto-profile=${auto_profile_path}" ] + inputs = [ auto_profile_path ] + } +} + +# Symbols ---------------------------------------------------------------------- + +# The BUILDCONFIG file sets the "default_symbols" config on targets by +# default. It will be equivalent to one the three specific symbol levels. +# +# You can override the symbol level on a per-target basis by removing the +# default config and then adding the named one you want: +# +# configs -= [ "//build/config/compiler:default_symbols" ] +# configs += [ "//build/config/compiler:symbols" ] + +# A helper config that all configs passing /DEBUG to the linker should +# include as sub-config. +config("win_pdbaltpath") { + visibility = [ + ":minimal_symbols", + ":symbols", + ] + + # /DEBUG causes the linker to generate a pdb file, and to write the absolute + # path to it in the executable file it generates. This flag turns that + # absolute path into just the basename of the pdb file, which helps with + # build reproducibility. Debuggers look for pdb files next to executables, + # so there's minimal downside to always using this. However, post-mortem + # debugging of Chromium crash dumps and ETW tracing can be complicated by this + # switch so an option to omit it is important. + if (!use_full_pdb_paths) { + ldflags = [ "/pdbaltpath:%_PDB%" ] + } +} + +# Full symbols. +config("symbols") { + rustflags = [] + if (is_win) { + if (is_clang) { + cflags = [ + # Debug information in the .obj files. + "/Z7", + + # Disable putting the compiler command line into the debug info to + # prevent some types of non-determinism. + "-gno-codeview-command-line", + ] + } else { + cflags = [ "/Zi" ] # Produce PDB file, no edit and continue. + } + + if (is_clang && use_lld && use_ghash) { + cflags += [ "-gcodeview-ghash" ] + ldflags = [ "/DEBUG:GHASH" ] + } else { + ldflags = [ "/DEBUG" ] + } + + # All configs using /DEBUG should include this: + configs = [ ":win_pdbaltpath" ] + } else { + cflags = [] + if (is_mac && enable_dsyms) { + # If generating dSYMs, specify -fno-standalone-debug. This was + # originally specified for https://crbug.com/479841 because dsymutil + # could not handle a 4GB dSYM file. But dsymutil from Xcodes prior to + # version 7 also produces debug data that is incompatible with Breakpad + # dump_syms, so this is still required (https://crbug.com/622406). + cflags += [ "-fno-standalone-debug" ] + } + + # On aix -gdwarf causes linker failures due to thread_local variables. + if (!is_nacl && current_os != "aix") { + if (use_dwarf5) { + cflags += [ "-gdwarf-5" ] + rustflags += [ "-Zdwarf-version=5" ] + } else if (!is_apple) { + # Recent clang versions default to DWARF5 on Linux, and Android is about + # to switch. TODO: Adopt that in controlled way. + # Apple platforms still default to 4, so the flag is not needed there. + cflags += [ "-gdwarf-4" ] + rustflags += [ "-Zdwarf-version=4" ] + } + } + + # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see + # elsewhere in this file), so they can't have build-dir-independent output. + # Moreover pnacl does not support newer flags such as -fdebug-prefix-map + # Disable symbols for nacl object files to get deterministic, + # build-directory-independent output. + # Keeping -g2 for saigo as it's the only toolchain whose artifacts that are + # part of chromium release (other nacl toolchains are used only for tests). + if ((!is_nacl || is_nacl_saigo) && current_os != "zos") { + cflags += [ "-g2" ] + } + + if (!is_nacl && is_clang && !is_tsan && !is_asan) { + # gcc generates dwarf-aranges by default on -g1 and -g2. On clang it has + # to be manually enabled. + # + # It is skipped in tsan and asan because enabling it causes some + # formatting changes in the output which would require fixing bunches + # of expectation regexps. + cflags += [ "-gdwarf-aranges" ] + } + + if (is_apple) { + swiftflags = [ "-g" ] + } + + if (use_debug_fission) { + cflags += [ "-gsplit-dwarf" ] + } + asmflags = cflags + ldflags = [] + + # Split debug info with all thinlto builds except nacl and apple. + # thinlto requires -gsplit-dwarf in ldflags. + if (use_debug_fission && use_thin_lto && !is_nacl && !is_apple) { + ldflags += [ "-gsplit-dwarf" ] + } + + # TODO(thakis): Figure out if there's a way to make this go for 32-bit, + # currently we get "warning: + # obj/native_client/src/trusted/service_runtime/sel_asm/nacl_switch_32.o: + # DWARF info may be corrupt; offsets in a range list entry are in different + # sections" there. Maybe just a bug in nacl_switch_32.S. + _enable_gdb_index = + symbol_level == 2 && !is_apple && !is_nacl && current_cpu != "x86" && + current_os != "zos" && (use_gold || use_lld) && + # Disable on non-fission 32-bit Android because it pushes + # libcomponents_unittests over the 4gb size limit. + !(is_android && !use_debug_fission && current_cpu != "x64" && + current_cpu != "arm64") + if (_enable_gdb_index) { + if (is_clang) { + # This flag enables the GNU-format pubnames and pubtypes sections, + # which lld needs in order to generate a correct GDB index. + # TODO(pcc): Try to make lld understand non-GNU-format pubnames + # sections (llvm.org/PR34820). + cflags += [ "-ggnu-pubnames" ] + } + ldflags += [ "-Wl,--gdb-index" ] + } + } + + configs = [] + + # Compress debug on 32-bit ARM to stay under 4GB for ChromeOS + # https://b/243982712. + if (symbol_level == 2 && is_chromeos_device && !use_debug_fission && + !is_nacl && current_cpu == "arm") { + configs += [ "//build/config:compress_debug_sections" ] + } + + if (is_clang && (!is_nacl || is_nacl_saigo) && current_os != "zos") { + if (is_apple) { + # TODO(https://crbug.com/1050118): Investigate missing debug info on mac. + # Make sure we don't use constructor homing on mac. + cflags += [ + "-Xclang", + "-debug-info-kind=limited", + ] + } else { + # Use constructor homing for debug info. This option reduces debug info + # by emitting class type info only when constructors are emitted. + cflags += [ + "-Xclang", + "-fuse-ctor-homing", + ] + } + } + rustflags += [ "-g" ] +} + +# Minimal symbols. +# This config guarantees to hold symbol for stack trace which are shown to user +# when crash happens in unittests running on buildbot. +config("minimal_symbols") { + if (is_win) { + # Functions, files, and line tables only. + cflags = [] + + if (is_clang && use_lld && use_ghash) { + cflags += [ "-gcodeview-ghash" ] + ldflags = [ "/DEBUG:GHASH" ] + } else { + ldflags = [ "/DEBUG" ] + } + + # All configs using /DEBUG should include this: + configs = [ ":win_pdbaltpath" ] + + # Enable line tables for clang. MSVC doesn't have an equivalent option. + if (is_clang) { + # -gline-tables-only is the same as -g1, but clang-cl only exposes the + # former. + cflags += [ "-gline-tables-only" ] + } + } else { + cflags = [] + if (is_mac && !use_dwarf5) { + # clang defaults to DWARF2 on macOS unless mac_deployment_target is + # at least 10.11. + # TODO(thakis): Remove this once mac_deployment_target is 10.11. + cflags += [ "-gdwarf-4" ] + } else if (!use_dwarf5 && !is_nacl && current_os != "aix") { + # On aix -gdwarf causes linker failures due to thread_local variables. + # Recent clang versions default to DWARF5 on Linux, and Android is about + # to switch. TODO: Adopt that in controlled way. + cflags += [ "-gdwarf-4" ] + } + + if (use_dwarf5 && !is_nacl) { + cflags += [ "-gdwarf-5" ] + } + + # The gcc-based nacl compilers don't support -fdebug-compilation-dir (see + # elsewhere in this file), so they can't have build-dir-independent output. + # Moreover pnacl does not support newer flags such as -fdebug-prefix-map + # Disable symbols for nacl object files to get deterministic, + # build-directory-independent output. + # Keeping -g1 for saigo as it's the only toolchain whose artifacts that are + # part of chromium release (other nacl toolchains are used only for tests). + if (!is_nacl || is_nacl_saigo) { + cflags += [ "-g1" ] + } + + if (!is_nacl && is_clang && !is_tsan && !is_asan) { + # See comment for -gdwarf-aranges in config("symbols"). + cflags += [ "-gdwarf-aranges" ] + } + + ldflags = [] + if (is_android && is_clang) { + # Android defaults to symbol_level=1 builds, but clang, unlike gcc, + # doesn't emit DW_AT_linkage_name in -g1 builds. + # -fdebug-info-for-profiling enables that (and a bunch of other things we + # don't need), so that we get qualified names in stacks. + # TODO(thakis): Consider making clang emit DW_AT_linkage_name in -g1 mode; + # failing that consider doing this on non-Android too. + cflags += [ "-fdebug-info-for-profiling" ] + } + + asmflags = cflags + } + rustflags = [ "-Cdebuginfo=1" ] +} + +# This configuration contains function names only. That is, the compiler is +# told to not generate debug information and the linker then just puts function +# names in the final debug information. +config("no_symbols") { + if (is_win) { + ldflags = [ "/DEBUG" ] + + # All configs using /DEBUG should include this: + configs = [ ":win_pdbaltpath" ] + } else { + cflags = [ "-g0" ] + asmflags = cflags + } +} + +# Default symbols. +config("default_symbols") { + if (symbol_level == 0) { + configs = [ ":no_symbols" ] + } else if (symbol_level == 1) { + configs = [ ":minimal_symbols" ] + } else if (symbol_level == 2) { + configs = [ ":symbols" ] + } else { + assert(false) + } + + # This config is removed by base unittests apk. + if (is_android && is_clang && strip_debug_info) { + configs += [ ":strip_debug" ] + } +} + +config("strip_debug") { + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += [ "-Wl,--strip-debug" ] +} + +if (is_apple) { + # On Mac and iOS, this enables support for ARC (automatic ref-counting). + # See http://clang.llvm.org/docs/AutomaticReferenceCounting.html. + config("enable_arc") { + common_flags = [ "-fobjc-arc" ] + cflags_objc = common_flags + cflags_objcc = common_flags + } +} + +if (is_chromeos_ash && is_chromeos_device) { + # This config is intended to be a temporary to facilitate + # the transition to use orderfile in Chrome OS. Once orderfile + # use becomes a default in Chrome OS, this config should not + # be needed. + config("use_orderfile_for_hugepage") { + if (chrome_orderfile_path != "") { + defines = [ "CHROMEOS_ORDERFILE_USE" ] + } + } +} + +if (is_android || (is_chromeos_ash && is_chromeos_device)) { + # Use orderfile for linking Chrome on Android and Chrome OS. + # This config enables using an orderfile for linking in LLD. + config("chrome_orderfile_config") { + # Don't try to use an orderfile with call graph sorting, except on Android, + # where we care about memory used by code, so we still want to mandate + # ordering. + if (chrome_orderfile_path != "" && + (is_android || !enable_call_graph_profile_sort)) { + assert(use_lld) + _rebased_orderfile = rebase_path(chrome_orderfile_path, root_build_dir) + ldflags = [ + "-Wl,--symbol-ordering-file", + "-Wl,$_rebased_orderfile", + "-Wl,--no-warn-symbol-ordering", + ] + inputs = [ chrome_orderfile_path ] + } + } +} + +# Initialize all variables on the stack if needed. +config("default_init_stack_vars") { + cflags = [] + if (init_stack_vars && is_clang && !is_nacl && !using_sanitizer) { + if (init_stack_vars_zero) { + cflags += [ "-ftrivial-auto-var-init=zero" ] + } else { + cflags += [ "-ftrivial-auto-var-init=pattern" ] + } + } +} + +buildflag_header("compiler_buildflags") { + header = "compiler_buildflags.h" + + flags = [ + "CLANG_PGO=$chrome_pgo_phase", + "SYMBOL_LEVEL=$symbol_level", + ] +} + +config("cet_shadow_stack") { + if (enable_cet_shadow_stack && is_win) { + assert(target_cpu == "x64") + ldflags = [ "/CETCOMPAT" ] + } +} diff --git a/config/compiler/compiler.gni b/config/compiler/compiler.gni new file mode 100644 index 000000000000..4738ee80d307 --- /dev/null +++ b/config/compiler/compiler.gni @@ -0,0 +1,343 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/c++/c++.gni") +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/chromeos/args.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/compiler/pgo/pgo.gni") +import("//build/config/cronet/config.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/cc_wrapper.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +if (is_android) { + import("//build/config/android/abi.gni") +} +if (current_cpu == "arm" || current_cpu == "arm64") { + import("//build/config/arm.gni") +} + +if (is_apple) { + import("//build/config/apple/symbols.gni") +} + +if (is_ios) { + import("//build/config/ios/config.gni") +} + +declare_args() { + # Set to true to use lld, the LLVM linker. + # In late bring-up on macOS (see docs/mac_lld.md). + # Tentatively used on iOS. + # The default linker everywhere else. + use_lld = is_clang && current_os != "zos" + + # If true, optimize for size. + # Default to favoring speed over size for platforms not listed below. + optimize_for_size = + !is_high_end_android && (is_android || is_ios || is_castos) +} + +declare_args() { + # Default to warnings as errors for default workflow, where we catch + # warnings with known toolchains. Allow overriding this e.g. for Chromium + # builds on Linux that could use a different version of the compiler. + # With GCC, warnings in no-Chromium code are always not treated as errors. + treat_warnings_as_errors = true + + # How many symbols to include in the build. This affects the performance of + # the build since the symbols are large and dealing with them is slow. + # 2 means regular build with symbols. + # 1 means minimal symbols, usually enough for backtraces only. Symbols with + # internal linkage (static functions or those in anonymous namespaces) may not + # appear when using this level. + # 0 means no symbols. + # -1 means auto-set according to debug/release and platform. + symbol_level = -1 + + # Android-only: Strip the debug info of libraries within lib.unstripped to + # reduce size. As long as symbol_level > 0, this will still allow stacks to be + # symbolized. + strip_debug_info = false + + # Compile in such a way as to enable profiling of the generated code. For + # example, don't omit the frame pointer and leave in symbols. + enable_profiling = false + + # use_debug_fission: whether to use split DWARF debug info + # files. This can reduce link time significantly, but is incompatible + # with some utilities such as icecc and ccache. Requires gold and + # gcc >= 4.8 or clang. + # http://gcc.gnu.org/wiki/DebugFission + # + # This is a placeholder value indicating that the code below should set + # the default. This is necessary to delay the evaluation of the default + # value expression until after its input values such as use_gold have + # been set, e.g. by a toolchain_args() block. + use_debug_fission = "default" + + # Enables support for ThinLTO, which links 3x-10x faster than full LTO. See + # also http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html + # Use it by default on official-optimized android and Chrome OS builds, but + # not ARC or linux-chromeos since it's been seen to not play nicely with + # Chrome's clang. crbug.com/1033839 + # Disabled in iOS cronet builds since build step cronet_static_complete + # wants to build a .a file consumable by external clients, and they won't + # have the same LLVM revisions as us, making bitcode useless to them. + use_thin_lto = + is_cfi || (is_clang && is_official_build && chrome_pgo_phase != 1 && + (is_linux || is_win || is_mac || + (is_ios && use_lld && !is_cronet_build) || + (is_android && target_os != "chromeos") || + (is_chromeos && is_chromeos_device))) + + # If true, use Goma for ThinLTO code generation where applicable. + use_goma_thin_lto = false + + # Whether we're using a sample profile collected on an architecture different + # than the one we're compiling for. + # + # It's currently not possible to collect AFDO profiles on anything but + # x86{,_64}. + using_mismatched_sample_profile = current_cpu != "x64" && current_cpu != "x86" + + # Whether an error should be raised on attempts to make debug builds with + # is_component_build=false. Very large debug symbols can have unwanted side + # effects so this is enforced by default for chromium. + forbid_non_component_debug_builds = build_with_chromium + + # Exclude unwind tables by default for official builds as unwinding can be + # done from stack dumps produced by Crashpad at a later time "offline" in the + # crash server. Since this increases binary size, we don't recommend including + # them in shipping builds. + # For unofficial (e.g. development) builds and non-Chrome branded (e.g. Cronet + # which doesn't use Crashpad, crbug.com/479283) builds it's useful to be able + # to unwind at runtime. + # Include the unwind tables on Android even for official builds, as otherwise + # the crash dumps generated by Android's debuggerd are largely useless, and + # having this additional mechanism to understand issues is particularly helpful + # to WebView. + exclude_unwind_tables = is_official_build && !is_android + + # Where to redirect clang crash diagnoses + clang_diagnostic_dir = + rebase_path("//tools/clang/crashreports", root_build_dir) + + # Mark binaries as compatible with Shadow Stack of Control-flow Enforcement + # Technology (CET). If Windows version and hardware supports the feature and + # it's enabled by OS then additional validation of return address will be + # performed as mitigation against Return-oriented programming (ROP). + # https://chromium.googlesource.com/chromium/src/+/main/docs/design/sandbox.md#cet-shadow-stack + enable_cet_shadow_stack = target_cpu == "x64" + + # Set to true to enable using the ML inliner in LLVM. This currently only + # enables the ML inliner when targeting Android. + # Currently the ML inliner is only supported on linux hosts + use_ml_inliner = host_os == "linux" && is_android + + # Set to true to use the android unwinder V2 implementation. + use_android_unwinder_v2 = true + + # Whether we should consider the profile we're using to be accurate. Accurate + # profiles have the benefit of (potentially substantial) binary size + # reductions, by instructing the compiler to optimize cold and uncovered + # functions heavily for size. This often comes at the cost of performance. + sample_profile_is_accurate = optimize_for_size + + # Use offsets rather than pointers in vtables in order to reduce the number of + # relocations. This is safe to enable only when all C++ code is built with the + # flag set to the same value. + use_relative_vtables_abi = is_android && current_cpu == "arm64" && + use_custom_libcxx && !is_component_build +} + +# To try out this combination, delete this assert. +assert( + !use_relative_vtables_abi || !is_cfi, + "is_cfi=true is known to conflict with use_relative_vtables_abi=true.\n" + + "See https://bugs.chromium.org/p/chromium/issues/detail?id=1375035#c53") + +assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO") +assert(!enable_profiling || !is_component_build, + "Cannot profile component builds (crbug.com/1199271).") + +if (use_thin_lto && is_debug) { + print("WARNING: ThinLTO (use_thin_lto=true) doesn't work with debug" + + " (is_debug=true) build.") +} + +# Determine whether to enable or disable frame pointers, based on the platform +# and build arguments. +if (is_chromeos) { + # ChromeOS generally prefers frame pointers, to support CWP. + # However, Clang does not currently generate usable frame pointers in ARM + # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them + # there to avoid the unnecessary overhead. + enable_frame_pointers = current_cpu != "arm" +} else if (is_apple || is_linux) { + enable_frame_pointers = true +} else if (is_win) { + # 64-bit Windows ABI doesn't support frame pointers. + # NOTE: This setting is actually not used in the BUILD.gn for Windows, + # but it still reflects correctly that we don't emit frame pointers on x64. + if (current_cpu == "x64") { + enable_frame_pointers = false + } else { + enable_frame_pointers = true + } +} else if (is_android) { + enable_frame_pointers = + enable_profiling || + # Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706). + current_cpu == "arm64" || + # For x86 Android, unwind tables are huge without frame pointers + # (crbug.com/762629). Enabling frame pointers grows the code size slightly + # but overall shrinks binaries considerably by avoiding huge unwind + # tables. + (current_cpu == "x86" && !exclude_unwind_tables && optimize_for_size) || + using_sanitizer || + # For caller-callee instrumentation version which needs frame pointers to + # get the caller address. + use_call_graph +} else if (is_fuchsia) { + # Fuchsia on arm64 could use shadow call stack for unwinding. + enable_frame_pointers = current_cpu != "arm64" +} else { + # Explicitly ask for frame pointers, otherwise stacks may be missing for + # sanitizer and profiling builds. + enable_frame_pointers = using_sanitizer || enable_profiling || is_debug +} + +# In general assume that if we have frame pointers then we can use them to +# unwind the stack. However, this requires that they are enabled by default for +# most translation units, that they are emitted correctly, and that the +# compiler or platform provides a way to access them. +can_unwind_with_frame_pointers = enable_frame_pointers +if (current_cpu == "arm" && arm_use_thumb) { + # We cannot currently unwind ARM Thumb frame pointers correctly. + # See https://bugs.llvm.org/show_bug.cgi?id=18505 + can_unwind_with_frame_pointers = false +} else if (is_win) { + # Windows 32-bit does provide frame pointers, but the compiler does not + # provide intrinsics to access them, so we don't use them. + can_unwind_with_frame_pointers = false +} + +assert(!can_unwind_with_frame_pointers || enable_frame_pointers) + +# Unwinding with CFI table is only possible on static library builds and +# requried only when frame pointers are not enabled. +can_unwind_with_cfi_table = is_android && !is_component_build && + !enable_frame_pointers && current_cpu == "arm" + +# Whether or not cfi table should be enabled on arm. +# TODO(crbug.com/1090409): Replace can_unwind_with_cfi_table with this once +# sampling profiler is enabled on android. +enable_arm_cfi_table = is_android && !is_component_build && current_cpu == "arm" + +declare_args() { + # Whether to use the gold linker from binutils instead of lld or bfd. + use_gold = !use_lld && !(is_castos && + (current_cpu == "arm" || current_cpu == "mipsel")) && + (((is_linux || is_chromeos_lacros) && + (current_cpu == "x64" || current_cpu == "x86" || + current_cpu == "arm" || current_cpu == "arm64" || + current_cpu == "mipsel" || current_cpu == "mips64el")) || + (is_android && (current_cpu == "x86" || current_cpu == "x64" || + current_cpu == "arm" || current_cpu == "arm64"))) +} + +# Use relative paths for debug info. This is important to make the build +# results independent of the checkout and build directory names, which +# in turn is important for goma compile hit rate. +# Setting this to true may make it harder to debug binaries on Linux, see +# https://chromium.googlesource.com/chromium/src/+/main/docs/linux/debugging.md#Source-level-debug-with-fdebug_compilation_dir +# It's not clear if the crash server will correctly handle dSYMs with relative +# paths, so we disable this feature for official benefit. The main benefit is +# deterministic builds to reduce compile times, so this is less relevant for +# official builders. +strip_absolute_paths_from_debug_symbols_default = + is_android || is_fuchsia || is_nacl || (is_win && use_lld) || is_linux || + is_chromeos || (is_apple && !enable_dsyms) + +# If the platform uses stripped absolute paths by default, then we don't expose +# it as a configuration option. If this is causing problems, please file a bug. +if (strip_absolute_paths_from_debug_symbols_default) { + strip_absolute_paths_from_debug_symbols = true +} else { + declare_args() { + strip_absolute_paths_from_debug_symbols = false + } +} + +# If it wasn't manually set, then default use_debug_fission to false. +assert( + use_debug_fission == "default" || use_debug_fission || !use_debug_fission, + "Invalid use_debug_fission.") +if (use_debug_fission == "default") { + use_debug_fission = is_debug && !is_android && !is_fuchsia && !is_apple && + !is_win && (use_gold || use_lld) && cc_wrapper == "" +} + +# If it wasn't manually set, set to an appropriate default. +assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level") +if (symbol_level == -1) { + if (is_android && !is_component_build && !use_debug_fission) { + # Prefer faster & smaller release builds. + symbol_level = 1 + } else if (is_chromeos_device) { + # Use lower symbol level in Simple Chrome build for faster link time. + # For Simple Chrome, this should take precedence over is_official_build, + # turned on by --internal. + if ((target_cpu == "x64" || target_cpu == "x86") && !is_debug) { + # For release x86/x64 build, specify symbol_level=0 for faster link time. + # x86/x64 shows backtraces with symbol_level=0 (arm requires + # symbol_level=1). + symbol_level = 0 + } else { + symbol_level = 1 + } + } else if (using_sanitizer) { + # Sanitizers need line table info for stack traces. They don't need type + # info or variable info, so we can leave that out to speed up the build. + # Sanitizers also require symbols for filename suppressions to work. + symbol_level = 1 + } else if ((!is_nacl && !is_linux && !is_chromeos && !is_fuchsia && + current_os != "aix") || is_debug || is_official_build || + is_castos || is_cast_android) { + # Linux builds slower by having symbols as part of the target binary, + # whereas Mac and Windows have them separate, so in Release Linux, default + # them off, but keep them on for Official builds and Chromecast builds. + symbol_level = 2 + } else { + symbol_level = 0 + } +} + +# Split dwarf works only for symbol_level == 2. +use_debug_fission = use_debug_fission && symbol_level == 2 + +# Non-component debug builds with symbol_level = 2 are an undesirable (very slow +# build times) and unsupported (some test binaries will fail with > 4 GB PDBs) +# combination. This is only checked when current_toolchain == default_toolchain +# because the is_component_build flag is set to false in various components of +# the build (like nacl) and we don't want to assert on those. +# iOS does not support component builds so add an exception for this platform. +if (forbid_non_component_debug_builds) { + assert( + symbol_level != 2 || current_toolchain != default_toolchain || + is_component_build || !is_debug || is_ios || use_debug_fission, + "Can't do non-component debug builds at symbol_level=2 without use_debug_fission=true") +} + +# TODO(crbug.com/1341436) For Windows, to assemble lzma_sdk's assembly files, +# ml64.exe needs to be utilized as llvm-ml cannot yet assemble it. Once llvm-ml +# is able to assemble lzma_sdk assembly files, remove this. +# LzmaDecOpt.asm only works on x64 and not x86. +# https://sourceforge.net/p/sevenzip/discussion/45797/thread/768932e9dd/?limit=25#0d6c +disable_llvm_ml = host_os == "win" && target_cpu == "x64" && !is_msan diff --git a/config/compiler/pgo/BUILD.gn b/config/compiler/pgo/BUILD.gn new file mode 100644 index 000000000000..86e76a41b6b7 --- /dev/null +++ b/config/compiler/pgo/BUILD.gn @@ -0,0 +1,135 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/compiler/pgo/pgo.gni") +import("//build/toolchain/toolchain.gni") + +# Configuration that enables PGO instrumentation. +config("pgo_instrumentation_flags") { + visibility = [ ":default_pgo_flags" ] + + # Only add flags when chrome_pgo_phase == 1, so that variables we would use + # are not required to be defined when we're not actually using PGO. + if (chrome_pgo_phase == 1 && is_clang && !is_nacl && is_a_target_toolchain) { + cflags = [ "-fprofile-generate" ] + if (!is_win) { + # Windows directly calls link.exe instead of the compiler driver when + # linking, and embeds the path to the profile runtime library as + # dependent library into each object file. + ldflags = [ "-fprofile-generate" ] + } + } +} + +# Configuration that enables optimization using profile data. +config("pgo_optimization_flags") { + visibility = [ ":default_pgo_flags" ] + + # Only add flags when chrome_pgo_phase == 2, so that variables we would use + # are not required to be defined when we're not actually using PGO. + if (chrome_pgo_phase == 2 && is_clang && !is_nacl && is_a_target_toolchain) { + _pgo_target = "" + + # There are txt files used by //tools/update_pgo_profiles.py to decide which + # profiles to use, adding them as inputs so that analyzer recognizes the + # dependencies. + inputs = [] + + if (is_win) { + if (target_cpu == "x64") { + _pgo_target = "win64" + } else { + _pgo_target = "win32" + } + } else if (is_mac) { + if (target_cpu == "arm64") { + _pgo_target = "mac-arm" + } else { + _pgo_target = "mac" + } + } else if (is_linux) { + _pgo_target = "linux" + } else if (is_chromeos_lacros) { + if (target_cpu == "arm") { + _pgo_target = "lacros-arm" + } else if (target_cpu == "arm64") { + _pgo_target = "lacros-arm64" + } else { + _pgo_target = "lacros64" + } + } else if (is_android) { + # Temporarily use mac-arm profile until Android native PGO support works. + # TODO(crbug.com/1308749): fix this. + _pgo_target = "mac-arm" + } else if (is_fuchsia) { + if (target_cpu == "arm64") { + _pgo_target = "mac-arm" + } else { + _pgo_target = "mac" + } + } + + if (_pgo_target == "win64") { + inputs = [ "//chrome/build/win64.pgo.txt" ] + } else if (_pgo_target == "win32") { + inputs = [ "//chrome/build/win32.pgo.txt" ] + } else if (_pgo_target == "mac-arm") { + inputs = [ "//chrome/build/mac-arm.pgo.txt" ] + } else if (_pgo_target == "mac") { + inputs = [ "//chrome/build/mac.pgo.txt" ] + } else if (_pgo_target == "linux") { + inputs = [ "//chrome/build/linux.pgo.txt" ] + } else if (_pgo_target == "lacros64") { + inputs = [ "//chrome/build/lacros64.pgo.txt" ] + } else if (_pgo_target == "lacros-arm") { + inputs = [ "//chrome/build/lacros-arm.pgo.txt" ] + } else if (_pgo_target == "lacros-arm64") { + inputs = [ "//chrome/build/lacros-arm64.pgo.txt" ] + } + + if (_pgo_target != "" && pgo_data_path == "") { + pgo_data_path = exec_script("//tools/update_pgo_profiles.py", + [ + "--target", + _pgo_target, + "get_profile_path", + ], + "value") + } + assert(pgo_data_path != "", + "Please set pgo_data_path to point at the profile data") + cflags = [ + "-fprofile-use=" + rebase_path(pgo_data_path, root_build_dir), + + # It's possible to have some profile data legitimately missing, + # and at least some profile data always ends up being considered + # out of date, so make sure we don't error for those cases. + "-Wno-profile-instr-unprofiled", + "-Wno-profile-instr-out-of-date", + + # Some hashing conflict results in a lot of warning like this when doing + # a PGO build: + # warning: foo.cc: Function control flow change detected (hash mismatch) + # [-Wbackend-plugin] + # See https://crbug.com/978401 + "-Wno-backend-plugin", + ] + } +} + +# Applies flags necessary when profile-guided optimization is used. +# Flags are only added if PGO is enabled, so that this config is safe to +# include by default. +config("default_pgo_flags") { + if (chrome_pgo_phase == 0) { + # Nothing. This config should be a no-op when chrome_pgo_phase == 0. + } else if (chrome_pgo_phase == 1) { + configs = [ ":pgo_instrumentation_flags" ] + } else if (chrome_pgo_phase == 2) { + configs = [ ":pgo_optimization_flags" ] + } +} diff --git a/config/compiler/pgo/pgo.gni b/config/compiler/pgo/pgo.gni new file mode 100644 index 000000000000..9e9a0c524992 --- /dev/null +++ b/config/compiler/pgo/pgo.gni @@ -0,0 +1,34 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/dcheck_always_on.gni") + +declare_args() { + # Specify the current PGO phase. + # Here's the different values that can be used: + # 0 : Means that PGO is turned off. + # 1 : Used during the PGI (instrumentation) phase. + # 2 : Used during the PGO (optimization) phase. + # PGO profiles are generated from `dcheck_always_on = false` builds. Mixing + # those profiles with `dcheck_always_on = true` builds can cause the compiler + # to think some code is hotter than it actually is, potentially causing very + # bad compile times. + chrome_pgo_phase = 0 + if (!dcheck_always_on && is_official_build && + # TODO(crbug.com/1052397): Remove chromeos_is_browser_only once + # target_os switch for lacros-chrome is completed. + # TODO(crbug.com/1336055): Update this now-outdated condition with regard + # to chromecast and determine whether chromeos_is_browser_only is + # obsolete. + (is_high_end_android || is_win || is_mac || is_fuchsia || + (is_linux && !is_castos && !chromeos_is_browser_only))) { + chrome_pgo_phase = 2 + } + + # When using chrome_pgo_phase = 2, read profile data from this path. + pgo_data_path = "" +} diff --git a/config/compute_inputs_for_analyze.gni b/config/compute_inputs_for_analyze.gni new file mode 100644 index 000000000000..1e322949fd13 --- /dev/null +++ b/config/compute_inputs_for_analyze.gni @@ -0,0 +1,14 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Enable this flag when running "gn analyze". + # + # This causes some gn actions to compute inputs immediately (via exec_script) + # where they would normally compute them only when executed (and write them to + # a depfile). + # + # This flag will slow down GN, but is required for analyze to work properly. + compute_inputs_for_analyze = false +} diff --git a/config/coverage/BUILD.gn b/config/coverage/BUILD.gn new file mode 100644 index 000000000000..59941c3cd15e --- /dev/null +++ b/config/coverage/BUILD.gn @@ -0,0 +1,43 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/coverage/coverage.gni") +import("//build/config/rust.gni") + +config("default_coverage") { + if (use_clang_coverage) { + ldflags = [] + if (!is_win) { + # Windows directly calls link.exe instead of the compiler driver when + # linking, and embeds the path to the profile runtime library as + # dependent library into each object file. + ldflags += [ "-fprofile-instr-generate" ] + } + + cflags = [ + "-fprofile-instr-generate", + "-fcoverage-mapping", + + # Following experimental flags removes unused header functions from the + # coverage mapping data embedded in the test binaries, and the reduction + # of binary size enables building Chrome's large unit test targets on + # MacOS. Please refer to crbug.com/796290 for more details. + "-mllvm", + "-limited-coverage-experimental=true", + ] + + # Rust coverage is gated on using the Chromium-built Rust toolchain as it + # needs to have a compatible LLVM version with the C++ compiler and the LLVM + # tools that will be used to process the coverage output. This is because + # the coverage file format is not stable. + if (use_chromium_rust_toolchain) { + rustflags = [ "-Cinstrument-coverage" ] + } + + if (is_linux || is_chromeos) { + # TODO(crbug.com/1194301): Remove this flag. + cflags += [ "-fno-use-cxa-atexit" ] + } + } +} diff --git a/config/coverage/OWNERS b/config/coverage/OWNERS new file mode 100644 index 000000000000..7b0fe275df1e --- /dev/null +++ b/config/coverage/OWNERS @@ -0,0 +1 @@ +pasthana@google.com diff --git a/config/coverage/coverage.gni b/config/coverage/coverage.gni new file mode 100644 index 000000000000..2e5b7ab741ac --- /dev/null +++ b/config/coverage/coverage.gni @@ -0,0 +1,40 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/toolchain.gni") +if (is_fuchsia) { + import("//third_party/fuchsia-sdk/sdk/build/component.gni") +} + +# There are two ways to enable code coverage instrumentation: +# 1. When |use_clang_coverage| or |use_jacoco_coverage| is true and +# |coverage_instrumentation_input_file| is empty, all source files or +# Java class files are instrumented. +# 2. When |use_clang_coverage| or |use_jacoco_coverage| is true and +# |coverage_instrumentation_input_file| is NOT empty and points to +# a text file on the file system, ONLY source files specified in the +# input file or Java class files related to source files are instrumented. +declare_args() { + # Enable Clang's Source-based Code Coverage. + if (is_fuchsia) { + use_clang_coverage = fuchsia_code_coverage + } else { + use_clang_coverage = false + } + + # Enables JaCoCo Java code coverage. + use_jacoco_coverage = false + + # The path to the coverage instrumentation input file should be a source root + # absolute path (e.g. //out/Release/coverage_instrumentation_input.txt), and + # the file consists of multiple lines where each line represents a path to a + # source file, and the paths must be relative to the root build directory. + # e.g. ../../base/task/post_task.cc for build directory 'out/Release'. + # + # NOTE that this arg will be non-op if use_clang_coverage is false. + coverage_instrumentation_input_file = "" +} + +assert(!use_clang_coverage || is_clang, + "Clang Source-based Code Coverage requires clang.") diff --git a/config/cronet/OWNERS b/config/cronet/OWNERS new file mode 100644 index 000000000000..78c2d8081e45 --- /dev/null +++ b/config/cronet/OWNERS @@ -0,0 +1 @@ +file://components/cronet/OWNERS diff --git a/config/cronet/config.gni b/config/cronet/config.gni new file mode 100644 index 000000000000..1468ec17a05e --- /dev/null +++ b/config/cronet/config.gni @@ -0,0 +1,10 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Control whether cronet is built (this is usually set by the script + # components/cronet/tools/cr_cronet.py as cronet requires specific + # gn args to build correctly). + is_cronet_build = false +} diff --git a/config/dcheck_always_on.gni b/config/dcheck_always_on.gni new file mode 100644 index 000000000000..cca3a547cd55 --- /dev/null +++ b/config/dcheck_always_on.gni @@ -0,0 +1,39 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# TODO(crbug.com/1233050): Until the bug is resolved we need to include +# gclient_args for the definition of build_with_chromium and build_overrides +# for client overrides of that flag. The latter should go away. +import("//build/config/gclient_args.gni") +import("//build_overrides/build.gni") +declare_args() { + # Enables DCHECKs to be built-in, but to default to being non-fatal/log-only. + # DCHECKS can then be set as fatal/non-fatal via the "DcheckIsFatal" feature. + # See https://bit.ly/dcheck-albatross for details on how this is used. + dcheck_is_configurable = false +} + +declare_args() { + # Set to false to disable DCHECK in Release builds. This is enabled by default + # for non-official builds on the below platforms. + # This default only affects Chromium as indicated by build_with_chromium. + # Other clients typically set this to false. If another client wants to use + # the same default value as Chromium, we'd need to add a separate gclient + # variable to replace build_with_chromium here. + dcheck_always_on = + (build_with_chromium && !is_official_build) || dcheck_is_configurable +} + +declare_args() { + # Set to false to disable EXPENSIVE_DCHECK()s or to true to enable them in + # official builds. These are generally used for really useful DCHECKs that are + # too expensive to be enabled in user-facing official+DCHECK builds. + enable_expensive_dchecks = + is_debug || (dcheck_always_on && !is_official_build) +} + +assert(!dcheck_is_configurable || (dcheck_always_on || is_debug), + "dcheck_is_configurable only makes sense with DCHECKs enabled") +assert(!enable_expensive_dchecks || (dcheck_always_on || is_debug), + "enable_expensive_dchecks only makes sense with DCHECKs enabled") diff --git a/config/devtools.gni b/config/devtools.gni new file mode 100644 index 000000000000..4338e25550f3 --- /dev/null +++ b/config/devtools.gni @@ -0,0 +1,37 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build_overrides/build.gni") + +declare_args() { + if (build_with_chromium) { + # devtools_location is used in DevTools to resolve to the correct location + # for any script/file referenced in the DevTools build scripts. Since + # DevTools supports both a standalone build and build integration with + # Chromium, we need to differentiate between the two versions. + # devtools_location points to the Chromium version in both Chrome-branded + # and not Chrome-branded builds. devtools_root_location points to the root + # of the Chrome-branded version when is_chrome_branded is true and to the root + # of the Chromium version when is_chrome_branded is false. + # devtools_grd_location is the location of the GRD file listing all DevTools + # resources. + if (is_chrome_branded) { + devtools_root_location = "third_party/devtools-frontend-internal" + devtools_location = "$devtools_root_location/devtools-frontend/" + devtools_grd_location = + "$devtools_root_location/chrome_devtools_resources.grd" + } else { + devtools_root_location = "third_party/devtools-frontend/src" + devtools_location = "third_party/devtools-frontend/src/" + devtools_grd_location = + "$devtools_root_location/front_end/devtools_resources.grd" + } + } else { + # DevTools is building a standalone version + devtools_location = "" + devtools_root_location = "" + devtools_grd_location = "" + } +} diff --git a/config/features.gni b/config/features.gni new file mode 100644 index 000000000000..852ac56a850e --- /dev/null +++ b/config/features.gni @@ -0,0 +1,47 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# These flags are effectively global. Your feature flag should go near the +# code it controls. Most of these items are here now because they control +# legacy global #defines passed to the compiler (now replaced with generated +# buildflag headers -- see //build/buildflag_header.gni). +# +# There is more advice on where to put build flags in the "Build flag" section +# of //build/config/BUILDCONFIG.gn. + +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") + +declare_args() { + # Enables proprietary codecs and demuxers; e.g. H264, AAC, MP3, and MP4. + # We always build Google Chrome and Chromecast with proprietary codecs. + # + # Note: this flag is used by WebRTC which is DEPSed into Chrome. Moving it + # out of //build will require using the build_overrides directory. + # + # Do not add any other conditions to the following line. + # + # TODO(crbug.com/1314528): Remove chromecast-related conditions and force + # builds to explicitly specify this. + proprietary_codecs = is_chrome_branded || is_castos || is_cast_android + + # libudev usage. This currently only affects the content layer. + use_udev = (is_linux && !is_castos) || is_chromeos + + use_dbus = is_linux || is_chromeos + + use_gio = is_linux && !is_castos + + use_blink = !is_ios +} +# +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# See comment at the top. diff --git a/config/freetype/BUILD.gn b/config/freetype/BUILD.gn new file mode 100644 index 000000000000..88a9c59f0a31 --- /dev/null +++ b/config/freetype/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/freetype/freetype.gni") + +group("freetype") { + if (use_system_freetype) { + public_configs = [ "//build/linux:freetype_from_pkgconfig" ] + } else { + public_deps = [ "//third_party:freetype_harfbuzz" ] + } +} diff --git a/config/freetype/OWNERS b/config/freetype/OWNERS new file mode 100644 index 000000000000..3277f87312e5 --- /dev/null +++ b/config/freetype/OWNERS @@ -0,0 +1,2 @@ +bungeman@chromium.org +drott@chromium.org diff --git a/config/freetype/freetype.gni b/config/freetype/freetype.gni new file mode 100644 index 000000000000..60aeb0452d70 --- /dev/null +++ b/config/freetype/freetype.gni @@ -0,0 +1,14 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Blink needs a recent and properly build-configured FreeType version to + # support OpenType variations, color emoji and avoid security bugs. By default + # we ship and link such a version as part of Chrome. For distributions that + # prefer to keep linking to the version the system, FreeType must be newer + # than version 2.7.1 and have color bitmap support compiled in. WARNING: + # System FreeType configurations other than as described WILL INTRODUCE TEXT + # RENDERING AND SECURITY REGRESSIONS. + use_system_freetype = false +} diff --git a/config/fuchsia/BUILD.gn b/config/fuchsia/BUILD.gn new file mode 100644 index 000000000000..bbcd70886fe9 --- /dev/null +++ b/config/fuchsia/BUILD.gn @@ -0,0 +1,100 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromecast_build.gni") +import("//build/config/clang/clang.gni") +import("//build/config/fuchsia/generate_runner_scripts.gni") +import("//third_party/fuchsia-sdk/sdk/build/config/config.gni") + +assert(is_fuchsia) +assert(!is_posix, "Fuchsia is not POSIX.") + +config("compiler") { + configs = [ "//third_party/fuchsia-sdk/sdk/build/config:compiler" ] + + # TODO(https://crbug.com/706592): The stack defaults to 256k on Fuchsia (see + # https://fuchsia.googlesource.com/zircon/+/master/system/private/zircon/stack.h#9), + # but on other platforms it's much higher, so a variety of code assumes more + # will be available. Raise to 8M which matches e.g. macOS. + ldflags = [ + "-Wl,-z,stack-size=0x800000", + "-fexperimental-relative-c++-abi-vtables", + ] + cflags_cc = [ "-fexperimental-relative-c++-abi-vtables" ] +} + +# Files required to run on Fuchsia on isolated swarming clients. +group("deployment_resources") { + data = [ + "//build/fuchsia/", + "//build/util/lib/", + "//third_party/fuchsia-sdk/sdk/.build-id/", + "//third_party/fuchsia-sdk/sdk/bin/fuchsia-common.sh", + "//third_party/fuchsia-sdk/sdk/meta/manifest.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/ffx", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/ffx-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvm", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvm-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/merkleroot", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/merkleroot-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/pm", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/pm-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolizer", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/symbolizer-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/zbi", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/zbi-meta.json", + ] + + if (fuchsia_additional_boot_images == []) { + data += [ "${boot_image_root}" ] + } + + foreach(fuchsia_additional_boot_image, fuchsia_additional_boot_images) { + data += [ "${fuchsia_additional_boot_image}/" ] + } + + if (test_isolate_uses_emulator) { + data += [ + "//third_party/fuchsia-sdk/sdk/bin/device_launcher.version", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/fvdl", + ] + if (test_host_cpu == "x64") { + data += [ + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/aemu_internal", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/aemu_internal-meta.json", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/qemu_internal", + "//third_party/fuchsia-sdk/sdk/tools/${test_host_cpu}/qemu_internal-meta.json", + ] + } else if (test_host_cpu == "arm64") { + data += [ + "//third_party/qemu-${host_os}-${test_host_cpu}/", + + # TODO(https://crbug.com/1336776): remove when ffx has native support + # for starting emulator on arm64 host. + "//third_party/fuchsia-sdk/sdk/tools/x64/qemu_internal-meta.json", + ] + } + } +} + +# Copy the loader to place it at the expected path in the final package. +copy("sysroot_asan_libs") { + sources = + [ "${fuchsia_sdk}/arch/${target_cpu}/sysroot/dist/lib/asan/ld.so.1" ] + outputs = [ "${root_out_dir}/lib/asan/{{source_file_part}}" ] +} + +# Copy the loader to place it at the expected path in the final package. +copy("sysroot_asan_runtime_libs") { + sources = [ "$clang_base_path/lib/clang/$clang_version/lib/x86_64-unknown-fuchsia/libclang_rt.asan.so" ] + outputs = [ "${root_out_dir}/lib/{{source_file_part}}" ] +} + +# This adds the runtime deps for Fuchsia ASAN builds. +group("asan_runtime_library") { + data_deps = [ + ":sysroot_asan_libs", + ":sysroot_asan_runtime_libs", + ] +} diff --git a/config/fuchsia/DIR_METADATA b/config/fuchsia/DIR_METADATA new file mode 100644 index 000000000000..210aa6a954b8 --- /dev/null +++ b/config/fuchsia/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/config/fuchsia/OWNERS b/config/fuchsia/OWNERS new file mode 100644 index 000000000000..565fda1e097d --- /dev/null +++ b/config/fuchsia/OWNERS @@ -0,0 +1,5 @@ +file://build/fuchsia/OWNERS + +chonggu@google.com +rohpavone@chromium.org +zijiehe@google.com diff --git a/config/fuchsia/build_symbol_archive.py b/config/fuchsia/build_symbol_archive.py new file mode 100755 index 000000000000..a595ed8a7a4f --- /dev/null +++ b/config/fuchsia/build_symbol_archive.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a compressed archive of unstripped binaries cataloged by +"ids.txt".""" + +import argparse +import os +import subprocess +import sys +import tarfile + + +def main(args): + parser = argparse.ArgumentParser() + parser.add_argument('ids_txt', type=str, nargs=1, + help='Path to ids.txt files.') + parser.add_argument('-o', '--output_tarball', nargs=1, type=str, + help='Path which the tarball will be written to.') + parser.add_argument('--fuchsia-build-id-dir', type=str, required=True, + help='Directory containing symbols for SDK prebuilts.') + args = parser.parse_args(args) + + ids_txt = args.ids_txt[0] + build_ids_archive = tarfile.open(args.output_tarball[0], 'w:bz2') + for line in open(ids_txt, 'r'): + build_id, binary_path = line.strip().split(' ') + + # Look for prebuilt symbols in the SDK first. + symbol_source_path = os.path.join(args.fuchsia_build_id_dir, + build_id[:2], + build_id[2:] + '.debug') + if not os.path.exists(symbol_source_path): + symbol_source_path = os.path.abspath( + os.path.join(os.path.dirname(ids_txt), binary_path)) + + if os.path.getsize(symbol_source_path) == 0: + # This is a prebuilt which wasn't accompanied by SDK symbols. + continue + + # Exclude stripped binaries (indicated by their lack of symbol tables). + readelf_output = subprocess.check_output( + ['readelf', '-S', symbol_source_path], universal_newlines=True) + if not '.symtab' in readelf_output: + continue + + # Archive the unstripped ELF binary, placing it in a hierarchy keyed to the + # GNU build ID. The binary resides in a directory whose name is the first + # two characters of the build ID, with the binary file itself named after + # the remaining characters of the build ID. So, a binary file with the build + # ID "deadbeef" would be located at the path 'de/adbeef.debug'. + build_ids_archive.add(symbol_source_path, + '%s/%s.debug' % (build_id[:2], build_id[2:])) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/config/fuchsia/config.gni b/config/fuchsia/config.gni new file mode 100644 index 000000000000..1efe24cc70f7 --- /dev/null +++ b/config/fuchsia/config.gni @@ -0,0 +1,8 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_fuchsia) + +# Compute the path to the arch-specific boot image directory. +boot_image_root = "//third_party/fuchsia-sdk/images/" diff --git a/config/fuchsia/extend_fvm.py b/config/fuchsia/extend_fvm.py new file mode 100644 index 000000000000..ae95f6736c0a --- /dev/null +++ b/config/fuchsia/extend_fvm.py @@ -0,0 +1,26 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copies a FVM file and extends it by a specified amount. + +Arg #1: path to 'fvm'. + #2: the path to the source fvm.blk. + #3: the path that the extended FVM file will be written to. + #4: the additional number of bytes to grow fvm.blk by.""" + +import os +import shutil +import subprocess +import sys + +def ExtendFVM(fvm_tool_path, src_path, dest_path, delta): + old_size = os.path.getsize(src_path) + new_size = old_size + int(delta) + shutil.copyfile(src_path, dest_path) + subprocess.check_call([fvm_tool_path, dest_path, 'extend', '--length', + str(new_size)]) + return 0 + +if __name__ == '__main__': + sys.exit(ExtendFVM(*sys.argv[1:])) diff --git a/config/fuchsia/fuchsia_package_metadata.gni b/config/fuchsia/fuchsia_package_metadata.gni new file mode 100644 index 000000000000..fb33bb2bd40e --- /dev/null +++ b/config/fuchsia/fuchsia_package_metadata.gni @@ -0,0 +1,38 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_fuchsia) + +# Generates a metadata file under root_gen_dir which provides information about +# a Fuchsia package. +# Parameters: +# package_deps: An array of package_paths which specify the location of all +# .far files that the package depends on. +template("fuchsia_package_metadata") { + _pkg_dir = "$root_out_dir/gen/" + get_label_info(invoker.package, "dir") + + "/" + target_name + _pkg_path = "$_pkg_dir/${target_name}.far" + pkg_dep_paths = [ rebase_path(_pkg_path, root_build_dir) ] + if (defined(invoker.package_deps)) { + foreach(package_dep, invoker.package_deps) { + _pkg_dep_target = package_dep[0] + _pkg_dep_name = package_dep[1] + pkg_dep_path = + rebase_path(get_label_info(_pkg_dep_target, "target_gen_dir") + "/" + + _pkg_dep_name + "/" + _pkg_dep_name + ".far", + root_build_dir) + pkg_dep_paths += [ pkg_dep_path ] + } + } + + pkg_metadata = "${target_name}_script_meta" + generated_file(pkg_metadata) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + contents = { + packages = pkg_dep_paths + } + output_conversion = "json" + outputs = [ "$root_gen_dir/package_metadata/${invoker.target_name}.meta" ] + } +} diff --git a/config/fuchsia/generate_runner_scripts.gni b/config/fuchsia/generate_runner_scripts.gni new file mode 100644 index 000000000000..cf01659fdd5d --- /dev/null +++ b/config/fuchsia/generate_runner_scripts.gni @@ -0,0 +1,250 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromecast_build.gni") +import("//build/config/fuchsia/config.gni") +import("//build/config/fuchsia/fuchsia_package_metadata.gni") +import("//build/config/gclient_args.gni") +import("//build/config/sysroot.gni") +import("//build/util/generate_wrapper.gni") + +assert(is_fuchsia) + +declare_args() { + # Sets the Fuchsia Amber repository which will be used by default by the + # generated installation scripts. If not specified, then no default directory + # will be used. + default_fuchsia_out_dir = "" + + # Sets the Fuchsia device node name which will be used by default by the + # generated runner scripts. If not specficed, then no default node name will + # be used. + default_fuchsia_device_node_name = "" + + # CPU architecture of the host used to run the tests. + test_host_cpu = host_cpu + + # Sets whether emulators need to be included in the test isolates + test_isolate_uses_emulator = true + + # A list of additional Fuchsia boot images to include in the test isolates. + fuchsia_additional_boot_images = [] + + # This variable controls the browser included in the Telemetry based test + # targets. + fuchsia_browser_type = "web_engine_shell" +} + +# Generates a wrapper script under root_build_dir/bin that performs an +# operation, such as deployment or execution, using a package and its +# dependencies. +# +# Parameters: +# output_name_format: The format string for the generated script's filename. +# The placeholder string %package% will be substituted +# with |package| (or |package_name|, if set). +# Examples: "run_%package%", "install_%package%" +# package: The package() target to run. +# package_name: Specifies the name of the generated package, if its +# filename is different than the |package| target name. This value must +# match package_name in the |package| target. +# package_deps: An array of [package, package_name] array pairs +# which specify additional dependency packages to be installed +# prior to execution. +# executable: The underlying script to be called by the script. +# executable_args: The list of arguments to pass to |executable|. +# Runtime commandline arguments can be passed to +# |executable| using the placeholder %args%. +# +# In addition, the script is passed the following +# executable_args: +# --package - the path to a .FAR package to install. +# --package_name - the name of the package to use as an +# entry point. +# include_fuchsia_out_dir: If true, adds |default_fuchsia_out_dir| +# to executable_args (when set in GN args). +template("fuchsia_run_script_with_packages") { + if (defined(invoker.package_name)) { + _pkg_shortname = invoker.package_name + } else { + _pkg_shortname = get_label_info(invoker.package, "name") + } + + _generated_script_path = + "$root_build_dir/bin/" + + string_replace(invoker.output_name_format, "%package%", _pkg_shortname) + + generate_wrapper(target_name) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "executable", + "executable_args", + "data", + "include_fuchsia_out_dir", + "target", + ]) + + wrapper_script = _generated_script_path + deps = [ invoker.package ] + + if (!defined(data_deps)) { + data_deps = [] + } + data_deps += [ "//build/config/fuchsia:deployment_resources" ] + + _combined_package_list = [ invoker.package ] + + if (defined(invoker.package_deps)) { + foreach(package_dep, invoker.package_deps) { + _combined_package_list += [ package_dep[0] ] + } + } + foreach(package_dep, _combined_package_list) { + data_deps += [ + package_dep, + package_dep + "__archive-manifest", + package_dep + "__archive-metadata", + ] + } + + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + + # Compute the list of full paths to package files, including dependencies. + if (defined(invoker.package_deps)) { + foreach(package_dep, invoker.package_deps) { + package_dep_target = package_dep[0] + deps += [ package_dep_target ] + data_deps += [ package_dep_target ] + } + } + + # Include package information inside the wrapper script. + if (!defined(executable_args)) { + executable_args = [] + } + + if (defined(include_fuchsia_out_dir) && include_fuchsia_out_dir && + default_fuchsia_out_dir != "") { + executable_args += [ + "--fuchsia-out-dir", + default_fuchsia_out_dir, + ] + } + } + + # Create a wrapper script rather than using a group() in order to ensure + # "ninja $target_name" always works. + if (defined(invoker.executable_wrapper)) { + generate_wrapper(invoker.executable_wrapper) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + executable = _generated_script_path + wrapper_script = "$root_build_dir/${invoker.executable_wrapper}" + deps = [ ":${invoker._run_target}" ] + } + } +} + +# Generates a script which deploys a package to the TUF repo of a Fuchsia +# build output directory. +template("fuchsia_package_installer") { + if (defined(invoker.package_name)) { + pkg_shortname = invoker.package_name + } else { + pkg_shortname = get_label_info(invoker.package, "name") + } + fuchsia_package_metadata(pkg_shortname) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "package", + "package_deps", + ]) + } + fuchsia_run_script_with_packages(target_name) { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ "executable_args" ]) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + executable = rebase_path("//build/fuchsia/test/deploy_to_fuchsia.py") + executable_args = [ + "--out-dir", + "@WrappedPath(.)", + pkg_shortname, + ] + output_name_format = "deploy_%package%" + include_fuchsia_out_dir = true + } +} + +# Generates scripts for installing and running test packages. +# See fuchsia_run_script_with_packages() for the full list of parameters. +template("fuchsia_test_runner") { + _run_target = "${target_name}__runner" + _install_target = "${target_name}__installer" + + fuchsia_run_script_with_packages(_run_target) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "data", + "data_deps", + "package", + "package_name", + "package_deps", + ]) + + _test_runner_py = "//build/fuchsia/test/run_test.py" + + executable = rebase_path(_test_runner_py) + + if (defined(invoker.is_test_exe) && invoker.is_test_exe) { + data += [ "//.vpython3" ] + } + output_name_format = "run_%package%" + executable_wrapper = invoker.target_name + + # Populate the arguments used by the test runner, defined at build-time. + executable_args = [ + "--out-dir", + "@WrappedPath(.)", + ] + + executable_args += [ package_name ] + + if (defined(invoker.use_test_server) && invoker.use_test_server) { + executable_args += [ "--enable-test-server" ] + } + + if (default_fuchsia_device_node_name != "") { + executable_args += [ + "--target-id", + default_fuchsia_device_node_name, + ] + } + + # Declare the files that are needed for test execution on LUCI swarming + # test clients, both directly (via data) or indirectly (via data_deps). + if (!defined(data)) { + data = [] + } + data += [ + _test_runner_py, + "$root_gen_dir/package_metadata/${invoker.package_name}.meta", + ] + + # TODO(crbug.com/1256870): Remove this once all out-of-tree references + # to "package_name_override" are migrated to "package_name". + if (defined(invoker.package_name_override)) { + package_name = invoker.package_name_override + } + } + fuchsia_package_installer(_install_target) { + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "package", + "package_name", + "package_deps", + ]) + } +} diff --git a/config/fuchsia/packaged_content_embedder_excluded_dirs.gni b/config/fuchsia/packaged_content_embedder_excluded_dirs.gni new file mode 100644 index 000000000000..f179a66d792e --- /dev/null +++ b/config/fuchsia/packaged_content_embedder_excluded_dirs.gni @@ -0,0 +1,16 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/devtools.gni") + +assert(is_fuchsia) + +# List of transitively included directories that should be stripped from +# released packages for size reasons. For use with the |excluded_dirs| variable +# of fuchsia_package(). +FUCHSIA_PACKAGED_CONTENT_EMBEDDER_EXCLUDED_DIRS = [ + # These are mistakenly being shipped in both PAK form and runtime data deps. + # TODO(crbug.com/1265660): Remove when DevTools stops leaking its source list. + devtools_root_location, +] diff --git a/config/fuchsia/size_optimized_cast_receiver_args.gn b/config/fuchsia/size_optimized_cast_receiver_args.gn new file mode 100644 index 000000000000..9a366c7bfead --- /dev/null +++ b/config/fuchsia/size_optimized_cast_receiver_args.gn @@ -0,0 +1,43 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains feature and optimization overrides that are commonly +# required or useful for Cast Receiver implementations. +# It prioritizes size and disables unneeded features that may add size. +# +# To use it do one of the following: +# * Add the following to your `gn args`: +# import("//build/config/fuchsia/size_optimized_cast_receiver_args.gn") +# * Add the following to `gn_args` in a bot recipe: +# 'args_file': '//build/config/fuchsia/size_optimized_cast_receiver_args.gn' + +# There is no reason these values couldn't be used on other platforms, but this +# file is in a fuchsia/ directory and some refactoring would probably be +# appropriate before reusing this file. +# It is not possible to assert the platform because `target_os` is not defined +# when this file is imported. + +enable_printing = false +enable_cast_receiver = true +cast_streaming_enable_remoting = true +enable_dav1d_decoder = false +enable_v8_compile_hints = false + +# //chrome makes many assumptions that Extensions are enabled. +# TODO(crbug.com/1363742): Fix theses assumptions or avoid building it. +# enable_extensions = false + +enable_hidpi = false +enable_libaom = false +enable_library_cdms = false +enable_logging_override = true +enable_pdf = false +enable_plugins = false +optimize_for_size = true +optional_trace_events_enabled = false + +# Ensure PGO and ThinLTO are disabled as these optimizations increase the binary +# size (see crbug.com/1322959). +chrome_pgo_phase = 0 +use_thin_lto = false diff --git a/config/fuchsia/size_optimized_cast_receiver_args_internal.gn b/config/fuchsia/size_optimized_cast_receiver_args_internal.gn new file mode 100644 index 000000000000..b59ce96a6ef0 --- /dev/null +++ b/config/fuchsia/size_optimized_cast_receiver_args_internal.gn @@ -0,0 +1,18 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is a version of size_optimized_cast_receiver_args.gn that is intended for +# internal builds and requires src-internal. +# +# To use it do one of the following: +# * Add the following to your `gn args`: +# import("build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn") +# * Add the following to `gn_args` in a bot recipe: +# 'args_file': '//build/config/fuchsia/size_optimized_cast_receiver_args_internal.gn' + +import("//build/config/fuchsia/size_optimized_cast_receiver_args.gn") + +enable_widevine = true +use_internal_isolated_origins = true +use_official_google_api_keys = false diff --git a/config/fuchsia/sizes.gni b/config/fuchsia/sizes.gni new file mode 100644 index 000000000000..fc9767622355 --- /dev/null +++ b/config/fuchsia/sizes.gni @@ -0,0 +1,51 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_fuchsia) + +import("//build/util/generate_wrapper.gni") + +template("compute_fuchsia_package_sizes") { + generate_wrapper(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + ]) + testonly = true + executable = "//build/fuchsia/binary_sizes.py" + wrapper_script = "$root_out_dir/bin/run_${target_name}" + + assert(target_cpu == "arm64" || target_cpu == "x64", + "target_cpu must be arm64 or x64") + + if (!defined(data)) { + data = [] + } + + if (!defined(data_deps)) { + data_deps = [] + } + + # Declares the files that are needed for test execution on the + # swarming test client. + # TODO(crbug.com/1347172): Remove arm64 once the execution of fuchsia_sizes + # has been migrated to x64 machines. + data += [ + "//build/fuchsia/", + "//tools/fuchsia/size_tests/", + "//third_party/fuchsia-sdk/sdk/arch/", + "//third_party/fuchsia-sdk/sdk/tools/arm64/", + "//third_party/fuchsia-sdk/sdk/tools/x64/", + ] + + executable_args = [ + "--output-directory", + "@WrappedPath(.)", + ] + if (defined(invoker.executable_args)) { + executable_args += invoker.executable_args + } + } +} diff --git a/config/fuchsia/symbol_archive.gni b/config/fuchsia/symbol_archive.gni new file mode 100644 index 000000000000..e05af1155c0b --- /dev/null +++ b/config/fuchsia/symbol_archive.gni @@ -0,0 +1,47 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_fuchsia) + +# Creates a tarball of unstripped binaries, structured according to the +# ".build_ids" convention used by the symbolizer and GNU GDB. +# +# Parameters: +# deps: Must all be fuchsia_package() targets. +# ids_txt: The "ids.txt" file which lists the relative paths to unstripped +# executables and libraries, along with their build IDs. +# archive_name: The path to the compressed tarball that will be generated. +template("symbol_archive") { + assert(!is_debug) + + action(target_name) { + _ids_txt = invoker.ids_txt + _build_ids = invoker.archive_name + + script = "//build/config/fuchsia/build_symbol_archive.py" + + inputs = [ _ids_txt ] + + outputs = [ _build_ids ] + + # For each package in |deps| it is necessary to additionally depend upon + # the corresponding archive-manifest target, which is what creates the + # ids.txt file. + deps = [] + foreach(package, invoker.deps) { + deps += [ + package, + package + "__archive-manifest", + ] + } + + args = [ + rebase_path(_ids_txt), + "-o", + rebase_path(_build_ids), + "--fuchsia-build-id-dir", + rebase_path("//third_party/fuchsia-sdk/sdk/.build-id"), + ] + } +} diff --git a/config/fuchsia/test/OWNERS b/config/fuchsia/test/OWNERS new file mode 100644 index 000000000000..ac711c0605b7 --- /dev/null +++ b/config/fuchsia/test/OWNERS @@ -0,0 +1,7 @@ +file://build/fuchsia/OWNERS + +per-file *.test-cml=set noparent +per-file *.test-cml=ddorwin@chromium.org +per-file *.test-cml=wez@chromium.org +# Please prefer the above when possible. +per-file *.test-cml=file://build/fuchsia/SECURITY_OWNERS diff --git a/config/fuchsia/test/README.md b/config/fuchsia/test/README.md new file mode 100644 index 000000000000..d21cdb79a412 --- /dev/null +++ b/config/fuchsia/test/README.md @@ -0,0 +1,112 @@ +## Manifest Fragments + +This directory contains the manifest fragments that are required for running +Fuchsia tests hermetically. Tests start from `minimum.shard.test-cml` and add +additional capabilities as necessary by providing the +`additional_manifest_fragments` argument. Some fragments are explained in detail +below: + +### General Purpose Fragments + +#### archivist.shard.test-cml +Runs an `archivist-without-attribution` with custom protocol routing for tests +that want to intercept events written to a `LogSink` by a component. + +#### chromium_test_facet.shard.test-cml +Runs tests in the `chromium` test realm, which is mostly hermetic but has access +to specific system services that cannot (currently) be faked. For more +information, see https://fxbug.dev/91934. This is generally required for all +Chromium tests not using the +[`chromium_system_test_facet`](#chromium_system_test_facetshardtest-cml). + +#### fonts.shard.test-cml +For tests that test fonts by providing `fuchsia.fonts.Provider`. This shard +runs an isolated font provider, but serves the fonts present on the system. + +#### test_fonts.shard.test-cml +For tests that use the fonts in `//third_party/test_fonts` by way of +`//skia:test_fonts_cfv2`. + +#### mark_vmo_executable.shard.test-cml +Required by tests that execute JavaScript. Should only be required in a small +number of tests. + +#### minimum.shard.test-cml +Capabilities required by anything that uses `//base/test` when running in the +(default) `chromium` test realm. It is the default base fragment for most +`test()` Components. + +The system-wide `config-data` directory capability is routed to tests running in +the realm so that individual tests may route subdirectories as needed. +TODO(crbug.com/1360077): Remove this after migrating to the new mechanism. + +#### logger.shard.test-cml +For tests that test logging functionality by providing `fuchsia.logger.Log`. + +#### sysmem.shard.test-cml +For tests that depend on the sysmem service (e.g. to allocate image buffers to +share with Vulkan and Scenic). + +#### system_test_minimum.shard.test-cml +Capabilities required by anything that uses `//base/test` when running as a +system test in the `chromium-system` test realm. It is the base fragment for +`test()` Components that use the +[`chromium_system_test_facet`](#chromium_system_test_facetshardtest-cml). + +Most tests use the [`minimum`](#minimumshardtest-cml) shard. + +#### chromium_system_test_facet.shard.test-cml +Runs tests in the `chromium-system` test realm. This is required for Chromium +tests that are intended to run against the actual system and its real system +services. This is required for, for example, performance tests intended to +measure system performance. Another overlapping use case is tests that need to +be run in environments without access to the packages containing fake +implementations of required protocols that other tests use. +(https://crbug.com/1408597 should make that use case obsolete.) + +Most tests should use the +[`chromium_test_facet`](#chromium_test_facetshardtest-cml). + +#### test_ui_stack.shard.test-cml +For tests that need an isolated UI subsystem, that supports the Flatland +API set. This allows tests to e.g. run with view-focus unaffected by any +other tests running concurrently on the device, as well as providing test-only +functionality such as input-injection support. + +#### gfx_test_ui_stack.shard.test-cml +For tests that need an isolated display subsystem supporting the legacy +Scenic/GFX APIs. + +### WebEngine Fragments +The following fragments are specific to WebEngine functionality as documented +documentation at +https://fuchsia.dev/reference/fidl/fuchsia.web#CreateContextParams and +https://fuchsia.dev/reference/fidl/fuchsia.web#ContextFeatureFlags. +Any test-specific exceptions are documented for each file. + +#### audio_output.shard.test-cml +Required by tests that need to enable audio output. + +#### platform_video_codecs.shard.test-cml +Required by tests that need accelerated (e.g., hardware) video codecs. A private +(semi-isolated) instance of codec_factory is run for tests using this shard in +support of running on system images that don't run it. + +#### network.shard.test-cml +For tests that need access to network services, including those that access a +local HTTP server. + +#### network.shard.test-cml +Corresponds to the `NETWORK` flag. Required for enabling network access. Note +that access to the root SSL certificates is not needed if ContextProvider is +used to launch the `Context`. The `fuchsia.device.NameProvider` dependency comes +from fdio. + +#### present_view.shard.test-cml +Services that are needed to render web content in a Scenic view and present it. +Most services are required per the FIDL documentation. + +#### web_instance.shard.test-cml +Contains services that need to be present when creating a `fuchsia.web.Context`. +Note that the `fuchsia.scheduler.ProfileProvider` service is only used in tests +that encounter memory pressure code. diff --git a/config/fuchsia/test/archivist.shard.test-cml b/config/fuchsia/test/archivist.shard.test-cml new file mode 100644 index 000000000000..b85162f6d05c --- /dev/null +++ b/config/fuchsia/test/archivist.shard.test-cml @@ -0,0 +1,28 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "isolated_archivist", + url: "fuchsia-pkg://fuchsia.com/archivist-without-attribution#meta/archivist-without-attribution.cm", + }, + ], + use: [ + { + protocol: "fuchsia.logger.Log", + path: "/svc/fuchsia.logger.Log.isolated", + from: "#isolated_archivist", + }, + { + protocol: "fuchsia.logger.LogSink", + path: "/svc/fuchsia.logger.LogSink.isolated", + from: "#isolated_archivist", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "archivist-without-attribution" ], + }, + }, +} diff --git a/config/fuchsia/test/audio_output.shard.test-cml b/config/fuchsia/test/audio_output.shard.test-cml new file mode 100644 index 000000000000..9176f6cdf436 --- /dev/null +++ b/config/fuchsia/test/audio_output.shard.test-cml @@ -0,0 +1,16 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + protocol: [ + // TODO(crbug.com/1348174): Rather than require the system to provide + // capabilities straight from audio_core, we should run Chromium tests + // against an audio stack with fake device(s). + "fuchsia.media.Audio", + "fuchsia.media.AudioDeviceEnumerator", + ] + }, + ], +} diff --git a/config/fuchsia/test/chromium_system_test_facet.shard.test-cml b/config/fuchsia/test/chromium_system_test_facet.shard.test-cml new file mode 100644 index 000000000000..cdf9ca7a0b7f --- /dev/null +++ b/config/fuchsia/test/chromium_system_test_facet.shard.test-cml @@ -0,0 +1,8 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + facets: { + "fuchsia.test": { type: "chromium-system" }, + }, +} diff --git a/config/fuchsia/test/chromium_test_facet.shard.test-cml b/config/fuchsia/test/chromium_test_facet.shard.test-cml new file mode 100644 index 000000000000..3628cf400da2 --- /dev/null +++ b/config/fuchsia/test/chromium_test_facet.shard.test-cml @@ -0,0 +1,8 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + facets: { + "fuchsia.test": { type: "chromium" }, + }, +} diff --git a/config/fuchsia/test/context_provider.shard.test-cml b/config/fuchsia/test/context_provider.shard.test-cml new file mode 100644 index 000000000000..e5db2f1f6fdf --- /dev/null +++ b/config/fuchsia/test/context_provider.shard.test-cml @@ -0,0 +1,30 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "context_provider", + url: "fuchsia-pkg://fuchsia.com/web_engine#meta/context_provider.cm", + }, + ], + use: [ + { + protocol: [ + "fuchsia.web.ContextProvider", + ], + from: "#context_provider", + dependency: "weak", + }, + ], + offer: [ + { + protocol: [ + "fuchsia.feedback.ComponentDataRegister", + "fuchsia.feedback.CrashReportingProductRegister", + ], + from: "parent", + to: "#context_provider", + }, + ], +} diff --git a/config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml b/config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml new file mode 100644 index 000000000000..c9328c56f6cf --- /dev/null +++ b/config/fuchsia/test/elf_test_ambient_exec_runner.shard.test-cml @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + program: { + runner: "elf_test_ambient_exec_runner", + }, + capabilities: [ + { protocol: "fuchsia.test.Suite" }, + ], + expose: [ + { + protocol: "fuchsia.test.Suite", + from: "self", + }, + ], +} diff --git a/config/fuchsia/test/elf_test_runner.shard.test-cml b/config/fuchsia/test/elf_test_runner.shard.test-cml new file mode 100644 index 000000000000..c97e6d7c0d33 --- /dev/null +++ b/config/fuchsia/test/elf_test_runner.shard.test-cml @@ -0,0 +1,17 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + program: { + runner: "elf_test_runner", + }, + capabilities: [ + { protocol: "fuchsia.test.Suite" }, + ], + expose: [ + { + protocol: "fuchsia.test.Suite", + from: "self", + }, + ], +} diff --git a/config/fuchsia/test/fonts.shard.test-cml b/config/fuchsia/test/fonts.shard.test-cml new file mode 100644 index 000000000000..80fb0cae12be --- /dev/null +++ b/config/fuchsia/test/fonts.shard.test-cml @@ -0,0 +1,38 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "isolated_font_provider", + url: "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cm", + }, + ], + use: [ + { + protocol: "fuchsia.fonts.Provider", + from: "#isolated_font_provider", + }, + ], + offer: [ + { + directory: "config-data", + from: "parent", + to: "#isolated_font_provider", + subdir: "fonts", + }, + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.tracing.provider.Registry", + ], + from: "parent", + to: "#isolated_font_provider", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "fonts" ], + }, + }, +} diff --git a/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml b/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml new file mode 100644 index 000000000000..2e51f033fe73 --- /dev/null +++ b/config/fuchsia/test/gfx_test_ui_stack.shard.test-cml @@ -0,0 +1,49 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Used in tests which are hard-coded for the Scenic/GFX API-set. +// Use test_ui_stack.shard.test-cml when testing for Flatland, or when the +// choice of API-set is not important. +{ + include: [ + "//build/config/fuchsia/test/sysmem.shard.test-cml", + ], + children: [ + { + name: "test_ui_stack", + url: "fuchsia-pkg://fuchsia.com/gfx-scene-manager-test-ui-stack#meta/test-ui-stack.cm", + }, + ], + offer: [ + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.scheduler.ProfileProvider", + "fuchsia.sysmem.Allocator", + "fuchsia.tracing.provider.Registry", + "fuchsia.vulkan.loader.Loader", + ], + from: "parent", + to: "#test_ui_stack", + }, + ], + use: [ + { + protocol: [ + "fuchsia.accessibility.semantics.SemanticsManager", + "fuchsia.element.GraphicalPresenter", + "fuchsia.ui.composition.Allocator", + "fuchsia.ui.composition.Flatland", + "fuchsia.ui.input3.Keyboard", + "fuchsia.ui.scenic.Scenic", + ], + from: "#test_ui_stack", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "gfx-scene-manager-test-ui-stack" ], + }, + }, +} diff --git a/config/fuchsia/test/logger.shard.test-cml b/config/fuchsia/test/logger.shard.test-cml new file mode 100644 index 000000000000..be0881dd424f --- /dev/null +++ b/config/fuchsia/test/logger.shard.test-cml @@ -0,0 +1,8 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { protocol: [ "fuchsia.logger.Log" ] }, + ], +} diff --git a/config/fuchsia/test/mark_vmo_executable.shard.test-cml b/config/fuchsia/test/mark_vmo_executable.shard.test-cml new file mode 100644 index 000000000000..ac07c1bde181 --- /dev/null +++ b/config/fuchsia/test/mark_vmo_executable.shard.test-cml @@ -0,0 +1,12 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + protocol: [ + "fuchsia.kernel.VmexResource", + ], + }, + ], +} diff --git a/config/fuchsia/test/minimum.shard.test-cml b/config/fuchsia/test/minimum.shard.test-cml new file mode 100644 index 000000000000..17b49278ad9b --- /dev/null +++ b/config/fuchsia/test/minimum.shard.test-cml @@ -0,0 +1,78 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "syslog/client.shard.cml", + ], + // Add capability providers. + children: [ + { + name: "build-info-service", + url: "fuchsia-pkg://fuchsia.com/fake-build-info#meta/fake_build_info.cm", + }, + { + name: "intl_property_manager", + url: "fuchsia-pkg://fuchsia.com/intl_property_manager#meta/intl_property_manager.cm", + }, + ], + offer: [ + { + protocol: "fuchsia.logger.LogSink", + from: "parent", + to: [ "#intl_property_manager" ], + } + ], + use: [ + { + directory: "config-data", + rights: [ "r*" ], + path: "/config/data", + }, + { + storage: "cache", + path: "/cache", + }, + { + storage: "custom_artifacts", + path: "/custom_artifacts", + }, + { + storage: "data", + path: "/data", + }, + { + storage: "tmp", + path: "/tmp", + }, + { + protocol: [ "fuchsia.buildinfo.Provider" ], + from: "#build-info-service", + }, + { + protocol: [ "fuchsia.intl.PropertyProvider" ], + from: "#intl_property_manager", + }, + { + protocol: [ + "fuchsia.hwinfo.Product", + "fuchsia.media.ProfileProvider", + "fuchsia.process.Launcher", + ], + }, + { + protocol: [ + "fuchsia.tracing.perfetto.ProducerConnector", + ], + availability: "optional", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ + "fake-build-info", + "intl_property_manager", + ], + }, + }, +} diff --git a/config/fuchsia/test/network.shard.test-cml b/config/fuchsia/test/network.shard.test-cml new file mode 100644 index 000000000000..1fd4fa7cf4ac --- /dev/null +++ b/config/fuchsia/test/network.shard.test-cml @@ -0,0 +1,20 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + directory: "root-ssl-certificates", + rights: [ "r*" ], + path: "/config/ssl", + }, + { + protocol: [ + "fuchsia.device.NameProvider", // Required by FDIO. + "fuchsia.net.interfaces.State", + "fuchsia.net.name.Lookup", + "fuchsia.posix.socket.Provider", + ], + }, + ], +} diff --git a/config/fuchsia/test/platform_video_codecs.shard.test-cml b/config/fuchsia/test/platform_video_codecs.shard.test-cml new file mode 100644 index 000000000000..13b5a1b7947f --- /dev/null +++ b/config/fuchsia/test/platform_video_codecs.shard.test-cml @@ -0,0 +1,48 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "//build/config/fuchsia/test/sysmem.shard.test-cml", + ], + children: [ + { + // Run an isolated instance of codec_factory so that tests can run on + // system images that don't run it. + name: "isolated_codec_factory", + url: "fuchsia-pkg://fuchsia.com/codec_factory#meta/codec_factory.cm", + }, + ], + offer: [ + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.sysinfo.SysInfo", + "fuchsia.sysmem.Allocator", + ], + from: "parent", + to: "#isolated_codec_factory", + }, + { + directory: "dev-mediacodec", + from: "parent", + to: "#isolated_codec_factory", + }, + { + directory: "dev-gpu", + from: "parent", + to: "#isolated_codec_factory", + }, + ], + use: [ + { + protocol: "fuchsia.mediacodec.CodecFactory", + from: "#isolated_codec_factory", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "codec_factory" ], + }, + }, +} diff --git a/config/fuchsia/test/present_view.shard.test-cml b/config/fuchsia/test/present_view.shard.test-cml new file mode 100644 index 000000000000..4e15ad50b4b2 --- /dev/null +++ b/config/fuchsia/test/present_view.shard.test-cml @@ -0,0 +1,42 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "isolated_a11y_manager", + url: "fuchsia-pkg://fuchsia.com/a11y-manager#meta/a11y-manager.cm", + }, + { + name: "isolated_text_manager", + url: "fuchsia-pkg://fuchsia.com/text_manager#meta/text_manager.cm", + }, + ], + offer: [ + { + protocol: "fuchsia.logger.LogSink", + from: "parent", + to: [ + "#isolated_a11y_manager", + "#isolated_text_manager", + ], + }, + ], + use: [ + { + protocol: [ + "fuchsia.ui.composition.Allocator", + "fuchsia.ui.composition.Flatland", + "fuchsia.ui.scenic.Scenic", + ], + }, + { + protocol: "fuchsia.accessibility.semantics.SemanticsManager", + from: "#isolated_a11y_manager", + }, + { + protocol: "fuchsia.ui.input3.Keyboard", + from: "#isolated_text_manager", + }, + ], +} diff --git a/config/fuchsia/test/sysmem.shard.test-cml b/config/fuchsia/test/sysmem.shard.test-cml new file mode 100644 index 000000000000..8bebd998b956 --- /dev/null +++ b/config/fuchsia/test/sysmem.shard.test-cml @@ -0,0 +1,10 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + use: [ + { + protocol: "fuchsia.sysmem.Allocator", + }, + ], +} diff --git a/config/fuchsia/test/system_test_minimum.shard.test-cml b/config/fuchsia/test/system_test_minimum.shard.test-cml new file mode 100644 index 000000000000..6efde20f7708 --- /dev/null +++ b/config/fuchsia/test/system_test_minimum.shard.test-cml @@ -0,0 +1,46 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "syslog/client.shard.cml", + ], + use: [ + { + directory: "config-data", + rights: [ "r*" ], + path: "/config/data", + }, + { + storage: "cache", + path: "/cache", + }, + { + storage: "custom_artifacts", + path: "/custom_artifacts", + }, + { + storage: "data", + path: "/data", + }, + { + storage: "tmp", + path: "/tmp", + }, + { + protocol: [ + "fuchsia.buildinfo.Provider", + "fuchsia.hwinfo.Product", + "fuchsia.intl.PropertyProvider", + "fuchsia.media.ProfileProvider", + "fuchsia.process.Launcher", + ], + }, + { + protocol: [ + "fuchsia.tracing.perfetto.ProducerConnector", + ], + availability: "optional", + }, + ], +} diff --git a/config/fuchsia/test/test_fonts.shard.test-cml b/config/fuchsia/test/test_fonts.shard.test-cml new file mode 100644 index 000000000000..6610e31a2c5f --- /dev/null +++ b/config/fuchsia/test/test_fonts.shard.test-cml @@ -0,0 +1,37 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + children: [ + { + name: "test_fonts", + url: "fuchsia-pkg://fuchsia.com/fonts#meta/fonts.cm", + }, + ], + offer: [ + { + protocol: "fuchsia.logger.LogSink", + from: "parent", + to: "#test_fonts", + }, + { + directory: "pkg", + subdir: "test_fonts", + from: "framework", + to: "#test_fonts", + as: "config-data", + rights: [ "r*" ], + } + ], + use: [ + { + protocol: "fuchsia.fonts.Provider", + from: "#test_fonts", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "fonts" ], + }, + }, +} diff --git a/config/fuchsia/test/test_ui_stack.shard.test-cml b/config/fuchsia/test/test_ui_stack.shard.test-cml new file mode 100644 index 000000000000..102867cf1ae1 --- /dev/null +++ b/config/fuchsia/test/test_ui_stack.shard.test-cml @@ -0,0 +1,48 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ "//build/config/fuchsia/test/sysmem.shard.test-cml" ], + children: [ + { + name: "test_ui_stack", + url: "fuchsia-pkg://fuchsia.com/flatland-scene-manager-test-ui-stack#meta/test-ui-stack.cm", + }, + ], + use: [ + { + protocol: [ + "fuchsia.accessibility.semantics.SemanticsManager", + "fuchsia.element.GraphicalPresenter", + "fuchsia.ui.composition.Allocator", + "fuchsia.ui.composition.Flatland", + "fuchsia.ui.input3.Keyboard", + "fuchsia.ui.scenic.Scenic", + ], + from: "#test_ui_stack", + }, + ], + offer: [ + { + storage: "tmp", + from: "parent", + to: "#test_ui_stack", + }, + { + protocol: [ + "fuchsia.logger.LogSink", + "fuchsia.scheduler.ProfileProvider", + "fuchsia.sysmem.Allocator", + "fuchsia.tracing.provider.Registry", + "fuchsia.vulkan.loader.Loader", + ], + from: "parent", + to: "#test_ui_stack", + }, + ], + facets: { + "fuchsia.test": { + "deprecated-allowed-packages": [ "flatland-scene-manager-test-ui-stack" ], + }, + }, +} diff --git a/config/fuchsia/test/web_instance.shard.test-cml b/config/fuchsia/test/web_instance.shard.test-cml new file mode 100644 index 000000000000..b996f4ab6826 --- /dev/null +++ b/config/fuchsia/test/web_instance.shard.test-cml @@ -0,0 +1,21 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +{ + include: [ + "//build/config/fuchsia/test/audio_output.shard.test-cml", + "//build/config/fuchsia/test/fonts.shard.test-cml", + "//build/config/fuchsia/test/mark_vmo_executable.shard.test-cml", + "//build/config/fuchsia/test/network.shard.test-cml", + "//build/config/fuchsia/test/platform_video_codecs.shard.test-cml", + "//build/config/fuchsia/test/test_ui_stack.shard.test-cml", + "vulkan/client.shard.cml", + ], + use: [ + { + protocol: [ + "fuchsia.memorypressure.Provider", + ], + }, + ], +} diff --git a/config/gcc/BUILD.gn b/config/gcc/BUILD.gn new file mode 100644 index 000000000000..147ebfc53426 --- /dev/null +++ b/config/gcc/BUILD.gn @@ -0,0 +1,117 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/c++/c++.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/sysroot.gni") +import("//build/toolchain/toolchain.gni") + +declare_args() { + # When non empty, overrides the target rpath value. This allows a user to + # make a Chromium build where binaries and shared libraries are meant to be + # installed into separate directories, like /usr/bin/chromium and + # /usr/lib/chromium for instance. It is useful when a build system that + # generates a whole target root filesystem (like Yocto) is used on top of gn, + # especially when cross-compiling. + # Note: this gn arg is similar to gyp target_rpath generator flag. + gcc_target_rpath = "" + ldso_path = "" +} + +# This config causes functions not to be automatically exported from shared +# libraries. By default, all symbols are exported but this means there are +# lots of exports that slow everything down. In general we explicitly mark +# which functions we want to export from components. +# +# Some third_party code assumes all functions are exported so this is separated +# into its own config so such libraries can remove this config to make symbols +# public again. +# +# See http://gcc.gnu.org/wiki/Visibility +config("symbol_visibility_hidden") { + cflags = [ "-fvisibility=hidden" ] + + # Visibility attribute is not supported on AIX. + if (current_os != "aix") { + cflags_cc = [ "-fvisibility-inlines-hidden" ] + cflags_objcc = cflags_cc + } +} + +# This config is usually set when :symbol_visibility_hidden is removed. +# It's often a good idea to set visibility explicitly, as there're flags +# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast) +config("symbol_visibility_default") { + cflags = [ "-fvisibility=default" ] +} + +# The rpath is the dynamic library search path. Setting this config on a link +# step will put the directory where the build generates shared libraries into +# the rpath. +# +# This is required for component builds since the build generates many shared +# libraries in the build directory that we expect to be automatically loaded. +# It will be automatically applied in this case by :executable_config. +# +# In non-component builds, certain test binaries may expect to load dynamic +# libraries from the current directory. As long as these aren't distributed, +# this is OK. For these cases use something like this: +# +# if ((is_linux || is_chromeos) && !is_component_build) { +# configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ] +# } +config("rpath_for_built_shared_libraries") { + if (!is_android && current_os != "aix" && !is_castos) { + # Note: Android, Aix don't support rpath. Chromecast has its own logic for + # setting the rpath in //build/config/chromecast. + if (current_toolchain != default_toolchain || gcc_target_rpath == "") { + ldflags = [ + # Want to pass "\$". GN will re-escape as required for ninja. + "-Wl,-rpath=\$ORIGIN", + ] + } else { + ldflags = [ "-Wl,-rpath=${gcc_target_rpath}" ] + } + if (current_toolchain == default_toolchain && ldso_path != "") { + ldflags += [ "-Wl,--dynamic-linker=${ldso_path}" ] + } + } +} + +if (is_component_build && !is_android) { + # See the rpath_for... config above for why this is necessary for component + # builds. + executable_and_shared_library_configs_ = + [ ":rpath_for_built_shared_libraries" ] +} else { + executable_and_shared_library_configs_ = [] +} + +# Settings for executables. +config("executable_config") { + configs = executable_and_shared_library_configs_ + ldflags = [ "-pie" ] + if (is_android) { + ldflags += [ + "-Bdynamic", + "-Wl,-z,nocopyreloc", + ] + } + + if (!is_android && current_os != "aix") { + ldflags += [ + # TODO(GYP): Do we need a check on the binutils version here? + # + # Newer binutils don't set DT_RPATH unless you disable "new" dtags + # and the new DT_RUNPATH doesn't work without --no-as-needed flag. + "-Wl,--disable-new-dtags", + ] + } +} + +# Settings for shared libraries. +config("shared_library_config") { + configs = executable_and_shared_library_configs_ +} diff --git a/config/get_host_byteorder.py b/config/get_host_byteorder.py new file mode 100755 index 000000000000..7cc0cdff804f --- /dev/null +++ b/config/get_host_byteorder.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Get Byteorder of host architecture""" + + +import sys + +print(sys.byteorder) diff --git a/config/host_byteorder.gni b/config/host_byteorder.gni new file mode 100644 index 000000000000..1c3c72dd6a24 --- /dev/null +++ b/config/host_byteorder.gni @@ -0,0 +1,27 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This header file defines the "host_byteorder" variable. +# Not that this is currently used only for building v8. +# The chromium code generally assumes little-endianness. +declare_args() { + host_byteorder = "undefined" +} + +# Detect host byteorder +# ppc64 can be either BE or LE +if (host_cpu == "ppc64") { + if (current_os == "aix") { + host_byteorder = "big" + } else { + # Only use the script when absolutely necessary + host_byteorder = + exec_script("//build/config/get_host_byteorder.py", [], "trim string") + } +} else if (host_cpu == "ppc" || host_cpu == "s390" || host_cpu == "s390x" || + host_cpu == "mips" || host_cpu == "mips64") { + host_byteorder = "big" +} else { + host_byteorder = "little" +} diff --git a/config/ios/BUILD.gn b/config/ios/BUILD.gn new file mode 100644 index 000000000000..863d1d0ec75f --- /dev/null +++ b/config/ios/BUILD.gn @@ -0,0 +1,292 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/ios_sdk.gni") +import("//build/toolchain/apple/toolchain.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic. +config("compiler") { + # These flags are shared between the C compiler and linker. + common_flags = [] + + # CPU architecture. + if (current_cpu == "x64") { + triplet_cpu = "x86_64" + } else if (current_cpu == "x86") { + triplet_cpu = "i386" + } else if (current_cpu == "arm" || current_cpu == "armv7") { + triplet_cpu = "armv7" + } else if (current_cpu == "arm64") { + triplet_cpu = "arm64" + } else { + assert(false, "unsupported cpu: $current_cpu") + } + + # Environment. + if (target_environment == "simulator") { + triplet_environment = "-simulator" + } else if (target_environment == "device") { + triplet_environment = "" + } else if (target_environment == "catalyst") { + triplet_environment = "-macabi" + } else { + assert(false, "unsupported environment: $target_environment") + } + + # OS. + triplet_os = "apple-ios" + + # Set target. + common_flags = [ + "-target", + "$triplet_cpu-$triplet_os$ios_deployment_target$triplet_environment", + ] + + # This is here so that all files get recompiled after an Xcode update. + # (defines are passed via the command line, and build system rebuild things + # when their commandline changes). Nothing should ever read this define. + defines = [ "CR_XCODE_VERSION=$xcode_version" ] + + asmflags = common_flags + cflags = common_flags + swiftflags = common_flags + + swiftflags += [ + "-swift-version", + "5", + ] + + cflags_objcc = [ + # Without this, the constructors and destructors of a C++ object inside + # an Objective C struct won't be called, which is very bad. + "-fobjc-call-cxx-cdtors", + + # When using -std=c++20 or higher, clang automatically returns true for + # `__has_feature(modules)` as it enables cxx modules. This is problematic + # because Objective-C code uses this to detect whether `@import` can be + # used (this feature is also named modules). + # + # Since Chromium does not yet enable cxx modules, nor clang modules, + # force disable the cxx modules, which cause `__has_features(modules)` + # to return false unless clang modules are explicitly enabled. + "-Xclang", + "-fno-cxx-modules", + ] + + ldflags = common_flags +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is iOS-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + # The variable ios_sdk_path is relative to root_build_dir when using Goma RBE + # and system Xcode (since Goma RBE only supports paths relative to source). + # Rebase the value in that case since gn does not convert paths in compiler + # flags (since it is not aware they are paths). + _sdk_root = ios_sdk_path + if (use_system_xcode && (use_goma || use_remoteexec)) { + _sdk_root = rebase_path(ios_sdk_path, root_build_dir) + } + + common_flags = [ + "-isysroot", + _sdk_root, + ] + swiftflags = [ + "-sdk", + _sdk_root, + ] + + if (target_environment == "catalyst") { + common_flags += [ + "-isystem", + "$_sdk_root/System/iOSSupport/usr/include", + "-iframework", + "$_sdk_root/System/iOSSupport/System/Library/Frameworks", + ] + + swiftflags += [ + "-isystem", + "$_sdk_root/System/iOSSupport/usr/include", + "-Fsystem", + "$_sdk_root/System/iOSSupport/System/Library/Frameworks", + ] + } + + asmflags = common_flags + cflags = common_flags + ldflags = common_flags +} + +config("ios_executable_flags") { + ldflags = [] + + # On "catalyst", the bundle structure is different (uses the same structure + # as a regular macOS app), so an additional -rpath is required. + if (target_environment == "catalyst") { + ldflags += [ "-Wl,-rpath,@loader_path/../Frameworks" ] + } + + ldflags += [ "-Wl,-rpath,@executable_path/Frameworks" ] +} + +config("ios_extension_executable_flags") { + configs = default_executable_configs + + ldflags = [ + "-e", + "_NSExtensionMain", + "-fapplication-extension", + ] + + # On "catalyst", the bundle structure is different (uses the same structure + # as a regular macOS app), so an additional -rpath is required. + if (target_environment == "catalyst") { + ldflags += [ "-Wl,-rpath,@loader_path/../../../../Frameworks" ] + } + + ldflags += [ "-Wl,-rpath,@executable_path/../../Frameworks" ] +} + +config("ios_dynamic_flags") { + ldflags = [ + # Always load Objective-C categories and class. + "-Wl,-ObjC", + ] + + # The path to the Swift compatibility libraries (required to run code built + # with version N of the SDK on older version of the OS) is relative to the + # toolchains directory and changes with the environment when using the + # system toolchain. When using the hermetic swift toolchain instead, those + # libraries are relative to $swift_toolchain_path. + if (swift_toolchain_path == "") { + _swift_compatibility_libs_prefix = ios_toolchains_path + } else { + _swift_compatibility_libs_prefix = swift_toolchain_path + } + + if (target_environment == "simulator") { + _swift_compatibility_libs_suffix = "iphonesimulator" + } else if (target_environment == "device") { + _swift_compatibility_libs_suffix = "iphoneos" + } else if (target_environment == "catalyst") { + # The Swift compatibility libraries have changed location starting with + # Xcode 13.0, so check the version of Xcode when deciding which path to + # use. + if (xcode_version_int >= 1300) { + _swift_compatibility_libs_suffix = "macosx" + } else { + _swift_compatibility_libs_suffix = "maccatalyst" + } + } + + lib_dirs = [ + "$ios_sdk_path/usr/lib/swift", + "$_swift_compatibility_libs_prefix/usr/lib/swift/" + + "$_swift_compatibility_libs_suffix", + ] + + # When building for catalyst, some Swift support libraries are in a + # different directory which needs to be added to the search path. + if (target_environment == "catalyst") { + lib_dirs += [ "$ios_sdk_path/System/iOSSupport/usr/lib/swift" ] + } +} + +config("ios_shared_library_flags") { + ldflags = [ + "-Wl,-rpath,@executable_path/Frameworks", + "-Wl,-rpath,@loader_path/Frameworks", + ] +} + +config("xctest_config") { + # Add some directories to the system framework search path to make + # them available to the compiler while silencing warnings in the + # framework headers. This is required for XCTest. + common_flags = [ + "-iframework", + rebase_path("$ios_sdk_platform_path/Developer/Library/Frameworks", + root_build_dir), + "-iframework", + rebase_path("$ios_sdk_path/Developer/Library/Frameworks", root_build_dir), + ] + cflags = common_flags + ldflags = common_flags + swiftflags = common_flags + + include_dirs = [ "$ios_sdk_platform_path/Developer/usr/lib" ] + lib_dirs = [ "$ios_sdk_platform_path/Developer/usr/lib" ] + frameworks = [ + "Foundation.framework", + "XCTest.framework", + ] +} + +config("enable_swift_cxx_interop") { + swiftflags = [ "-enable-experimental-cxx-interop" ] +} + +group("xctest") { + public_configs = [ ":xctest_config" ] +} + +_xctrunner_path = + "$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app" + +# When building with Goma RBE, $ios_sdk_platform_path corresponds to a symlink +# below $root_build_dir that points to the real SDK to use. Because the files +# are below $root_build_dir, it is not possible to list them as a target input +# without gn complaining (as it can't find a target creating those files). +# +# The symlinks are created by //build/config/apple/sdk_info.py script invoked +# via exec_script() from //build/config/{ios/ios_sdk.gni,mac/mac_sdk.gni}. +# As the invocation is done by exec_script, there is no target that can list +# those files as output. +# +# To workaround this, add a target that pretends to create those files +# (but does nothing). See https://crbug.com/1061487 for why this is needed. +if (use_system_xcode && (use_goma || use_remoteexec)) { + action("copy_xctrunner_app") { + testonly = true + script = "//build/noop.py" + outputs = [ + "$_xctrunner_path/Info.plist", + "$_xctrunner_path/PkgInfo", + "$_xctrunner_path/XCTRunner", + ] + } +} + +# When creating the test runner for an XCUITest, the arm64e slice of the binary +# must be removed (at least until the app ships with arm64e slice which is not +# yet supported by Apple). +action("xctest_runner_without_arm64e") { + testonly = true + script = "//build/config/ios/strip_arm64e.py" + sources = [ "$_xctrunner_path/XCTRunner" ] + outputs = [ "$target_out_dir/XCTRunner" ] + args = [ + "--output", + rebase_path(outputs[0], root_build_dir), + "--input", + rebase_path(sources[0], root_build_dir), + "--xcode-version", + xcode_version, + ] + + # When running under ASan, the ASan runtime library must be packaged alongside + # the test runner binary. + deps = [ "//build/config/sanitizers:deps" ] + if (use_system_xcode && (use_goma || use_remoteexec)) { + deps += [ ":copy_xctrunner_app" ] + } +} diff --git a/config/ios/BuildInfo.plist b/config/ios/BuildInfo.plist new file mode 100644 index 000000000000..3595e5aefbb2 --- /dev/null +++ b/config/ios/BuildInfo.plist @@ -0,0 +1,35 @@ + + + + + BuildMachineOSBuild + ${BUILD_MACHINE_OS_BUILD} + CFBundleSupportedPlatforms + + ${IOS_SUPPORTED_PLATFORM} + + DTCompiler + ${GCC_VERSION} + DTPlatformName + ${IOS_PLATFORM_NAME} + DTPlatformVersion + ${IOS_PLATFORM_VERSION} + DTPlatformBuild + ${IOS_PLATFORM_BUILD} + DTSDKBuild + ${IOS_SDK_BUILD} + DTSDKName + ${IOS_SDK_NAME} + MinimumOSVersion + ${IOS_DEPLOYMENT_TARGET} + DTXcode + ${XCODE_VERSION} + DTXcodeBuild + ${XCODE_BUILD} + UIDeviceFamily + + 1 + 2 + + + diff --git a/config/ios/Host-Info.plist b/config/ios/Host-Info.plist new file mode 100644 index 000000000000..6898c15fa1c5 --- /dev/null +++ b/config/ios/Host-Info.plist @@ -0,0 +1,126 @@ + + + + + CFBundleDevelopmentRegion + en + CFBundleDisplayName + ${PRODUCT_NAME} + CFBundleExecutable + ${EXECUTABLE_NAME} + CFBundleIdentifier + ${BUNDLE_IDENTIFIER} + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + ${PRODUCT_NAME} + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1.0 + LSRequiresIPhoneOS + + NSAppTransportSecurity + + NSAllowsArbitraryLoads + + + UIRequiredDeviceCapabilities + + armv7 + + UILaunchImages + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {320, 480} + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {320, 568} + + + UILaunchImageMinimumOSVersion + 8.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {375, 667} + + + UILaunchImageMinimumOSVersion + 8.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {414, 736} + + + UILaunchImageMinimumOSVersion + 8.0 + UILaunchImageName + Default + UILaunchImageOrientation + Landscape + UILaunchImageSize + {414, 736} + + + UILaunchImages~ipad + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Portrait + UILaunchImageSize + {768, 1024} + + + UILaunchImageMinimumOSVersion + 7.0 + UILaunchImageName + Default + UILaunchImageOrientation + Landscape + UILaunchImageSize + {768, 1024} + + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/config/ios/Module-Info.plist b/config/ios/Module-Info.plist new file mode 100644 index 000000000000..e1b09841541a --- /dev/null +++ b/config/ios/Module-Info.plist @@ -0,0 +1,26 @@ + + + + + CFBundleDevelopmentRegion + en + CFBundleExecutable + ${EXECUTABLE_NAME} + CFBundleIdentifier + ${BUNDLE_IDENTIFIER} + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + ${PRODUCT_NAME} + CFBundlePackageType + BNDL + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1 + NSPrincipalClass + ${XCTEST_BUNDLE_PRINCIPAL_CLASS} + + diff --git a/config/ios/OWNERS b/config/ios/OWNERS new file mode 100644 index 000000000000..6f3324f07c8f --- /dev/null +++ b/config/ios/OWNERS @@ -0,0 +1 @@ +file://build/apple/OWNERS diff --git a/config/ios/asset_catalog.gni b/config/ios/asset_catalog.gni new file mode 100644 index 000000000000..8695bf7f9e3e --- /dev/null +++ b/config/ios/asset_catalog.gni @@ -0,0 +1,150 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/ios_sdk.gni") + +# This template declares a bundle_data target that references an asset +# catalog so that it is compiled to the asset catalog of the generated +# bundle. +# +# The create_bundle target requires that all asset catalogs are part of an +# .xcasset bundle. This requirement comes from actool that only receives +# the path to the .xcasset bundle directory and not to the individual +# assets directories. +# +# The requirement is a bit problematic as it prevents compiling only a +# subset of the asset catakig that are contained in a .xcasset. This template +# fixes that by instead copying the content of the asset catalog to temporary +# .xcasset directory (below $root_out_dir) and defining a bundle_data +# target that refers to those copies (this is efficient as the "copy" is +# implemented by hardlinking if possible on macOS). +# +# Since the create_data target will only refer to the .xcasset directory +# and additional "action" target that runs a dummy script is defined. It +# does nothing but pretends to generate the .xcassets directory (while +# it is really created as a side-effect of the "copy" step). This allows +# to workaround the check in "gn" that all inputs below $root_out_dir have +# to be outputs of another target with a public dependency path. +# +# This template also ensures that the file are only copied once when the +# build targets multiple architectures at the same time (aka "fat build"). +# +# Arguments +# +# sources: +# required, list of strings, paths to the file contained in the +# asset catalog directory; this must contain the Contents.json file +# and all the image referenced by it (not enforced by the template). +# +# asset_type: +# required, string, type of the asset catalog, that is the extension +# of the directory containing the images and the Contents.json file. +# +template("asset_catalog") { + assert(defined(invoker.sources) && invoker.sources != [], + "sources must be defined and not empty for $target_name") + + assert(defined(invoker.asset_type) && invoker.asset_type != "", + "asset_type must be defined and not empty for $target_name") + + _copy_target_name = target_name + "__copy" + _data_target_name = target_name + + _sources = invoker.sources + _outputs = [] + + # The compilation of resources into Assets.car is enabled automatically + # by the "create_bundle" target if any of the "bundle_data" sources's + # path is in a .xcassets directory and matches one of the know asset + # catalog type. + _xcassets_dir = "$target_gen_dir/${target_name}.xcassets" + _output_dir = "$_xcassets_dir/" + + get_path_info(get_path_info(_sources[0], "dir"), "file") + + foreach(_source, invoker.sources) { + _dir = get_path_info(_source, "dir") + _outputs += [ "$_output_dir/" + get_path_info(_source, "file") ] + + assert(get_path_info(_dir, "extension") == invoker.asset_type, + "$_source dirname must have .${invoker.asset_type} extension") + } + + action(_copy_target_name) { + # Forward "deps", "public_deps" and "testonly" in case some of the + # source files are generated. + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "testonly", + ]) + + script = "//build/config/ios/hardlink.py" + + visibility = [ ":$_data_target_name" ] + sources = _sources + outputs = _outputs + [ _xcassets_dir ] + + args = [ + rebase_path(get_path_info(_sources[0], "dir"), root_build_dir), + rebase_path(_output_dir, root_build_dir), + ] + } + + bundle_data(_data_target_name) { + forward_variables_from(invoker, + "*", + [ + "deps", + "outputs", + "public_deps", + "sources", + ]) + + sources = _outputs + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + public_deps = [ ":$_copy_target_name" ] + } +} + +# Those templates are specialisation of the asset_catalog template for known +# types of asset catalog types (imageset, launchimage, appiconset). +# +# Arguments +# +# sources: +# required, list of strings, paths to the file contained in the +# asset catalog directory; this must contain the Contents.json file +# and all the image referenced by it (not enforced by the template). +# +template("appiconset") { + asset_catalog(target_name) { + forward_variables_from(invoker, "*", [ "asset_type" ]) + asset_type = "appiconset" + } +} +template("colorset") { + asset_catalog(target_name) { + forward_variables_from(invoker, "*", [ "asset_type" ]) + asset_type = "colorset" + } +} +template("imageset") { + asset_catalog(target_name) { + forward_variables_from(invoker, "*", [ "asset_type" ]) + asset_type = "imageset" + } +} +template("launchimage") { + asset_catalog(target_name) { + forward_variables_from(invoker, "*", [ "asset_type" ]) + asset_type = "launchimage" + } +} +template("symbolset") { + asset_catalog(target_name) { + forward_variables_from(invoker, "*", [ "asset_type" ]) + asset_type = "symbolset" + } +} diff --git a/config/ios/bundle_data_from_filelist.gni b/config/ios/bundle_data_from_filelist.gni new file mode 100644 index 000000000000..763dc8673620 --- /dev/null +++ b/config/ios/bundle_data_from_filelist.gni @@ -0,0 +1,24 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(current_os == "ios") + +template("bundle_data_from_filelist") { + assert(defined(invoker.filelist_name), "Requires setting filelist_name") + + _filelist_content = read_file(invoker.filelist_name, "list lines") + bundle_data(target_name) { + forward_variables_from(invoker, + "*", + [ + "filelist_name", + "sources", + ]) + sources = filter_exclude(_filelist_content, [ "#*" ]) + if (!defined(outputs)) { + outputs = [ "{{bundle_resources_dir}}/" + + "{{source_root_relative_dir}}/{{source_file_part}}" ] + } + } +} diff --git a/config/ios/codesign.py b/config/ios/codesign.py new file mode 100644 index 000000000000..fd96f312d658 --- /dev/null +++ b/config/ios/codesign.py @@ -0,0 +1,722 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import codecs +import datetime +import fnmatch +import glob +import json +import os +import plistlib +import shutil +import subprocess +import stat +import sys +import tempfile + +if sys.version_info.major < 3: + basestring_compat = basestring +else: + basestring_compat = str + + +def GetProvisioningProfilesDir(): + """Returns the location of the installed mobile provisioning profiles. + + Returns: + The path to the directory containing the installed mobile provisioning + profiles as a string. + """ + return os.path.join( + os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') + + +def ReadPlistFromString(plist_bytes): + """Parse property list from given |plist_bytes|. + + Args: + plist_bytes: contents of property list to load. Must be bytes in python 3. + + Returns: + The contents of property list as a python object. + """ + if sys.version_info.major == 2: + return plistlib.readPlistFromString(plist_bytes) + else: + return plistlib.loads(plist_bytes) + + +def LoadPlistFile(plist_path): + """Loads property list file at |plist_path|. + + Args: + plist_path: path to the property list file to load. + + Returns: + The content of the property list file as a python object. + """ + if sys.version_info.major == 2: + return plistlib.readPlistFromString( + subprocess.check_output( + ['xcrun', 'plutil', '-convert', 'xml1', '-o', '-', plist_path])) + else: + with open(plist_path, 'rb') as fp: + return plistlib.load(fp) + + +def CreateSymlink(value, location): + """Creates symlink with value at location if the target exists.""" + target = os.path.join(os.path.dirname(location), value) + if os.path.exists(location): + os.unlink(location) + os.symlink(value, location) + + +class Bundle(object): + """Wraps a bundle.""" + + def __init__(self, bundle_path, platform): + """Initializes the Bundle object with data from bundle Info.plist file.""" + self._path = bundle_path + self._kind = Bundle.Kind(platform, os.path.splitext(bundle_path)[-1]) + self._data = None + + def Load(self): + self._data = LoadPlistFile(self.info_plist_path) + + @staticmethod + def Kind(platform, extension): + if platform == 'iphonesimulator' or platform == 'iphoneos': + return 'ios' + if platform == 'macosx': + if extension == '.framework': + return 'mac_framework' + return 'mac' + raise ValueError('unknown bundle type %s for %s' % (extension, platform)) + + @property + def kind(self): + return self._kind + + @property + def path(self): + return self._path + + @property + def contents_dir(self): + if self._kind == 'mac': + return os.path.join(self.path, 'Contents') + if self._kind == 'mac_framework': + return os.path.join(self.path, 'Versions/A') + return self.path + + @property + def executable_dir(self): + if self._kind == 'mac': + return os.path.join(self.contents_dir, 'MacOS') + return self.contents_dir + + @property + def resources_dir(self): + if self._kind == 'mac' or self._kind == 'mac_framework': + return os.path.join(self.contents_dir, 'Resources') + return self.path + + @property + def info_plist_path(self): + if self._kind == 'mac_framework': + return os.path.join(self.resources_dir, 'Info.plist') + return os.path.join(self.contents_dir, 'Info.plist') + + @property + def signature_dir(self): + return os.path.join(self.contents_dir, '_CodeSignature') + + @property + def identifier(self): + return self._data['CFBundleIdentifier'] + + @property + def binary_name(self): + return self._data['CFBundleExecutable'] + + @property + def binary_path(self): + return os.path.join(self.executable_dir, self.binary_name) + + def Validate(self, expected_mappings): + """Checks that keys in the bundle have the expected value. + + Args: + expected_mappings: a dictionary of string to object, each mapping will + be looked up in the bundle data to check it has the same value (missing + values will be ignored) + + Returns: + A dictionary of the key with a different value between expected_mappings + and the content of the bundle (i.e. errors) so that caller can format the + error message. The dictionary will be empty if there are no errors. + """ + errors = {} + for key, expected_value in expected_mappings.items(): + if key in self._data: + value = self._data[key] + if value != expected_value: + errors[key] = (value, expected_value) + return errors + + +class ProvisioningProfile(object): + """Wraps a mobile provisioning profile file.""" + + def __init__(self, provisioning_profile_path): + """Initializes the ProvisioningProfile with data from profile file.""" + self._path = provisioning_profile_path + self._data = ReadPlistFromString( + subprocess.check_output([ + 'xcrun', 'security', 'cms', '-D', '-u', 'certUsageAnyCA', '-i', + provisioning_profile_path + ])) + + @property + def path(self): + return self._path + + @property + def team_identifier(self): + return self._data.get('TeamIdentifier', [''])[0] + + @property + def name(self): + return self._data.get('Name', '') + + @property + def application_identifier_pattern(self): + return self._data.get('Entitlements', {}).get('application-identifier', '') + + @property + def application_identifier_prefix(self): + return self._data.get('ApplicationIdentifierPrefix', [''])[0] + + @property + def entitlements(self): + return self._data.get('Entitlements', {}) + + @property + def expiration_date(self): + return self._data.get('ExpirationDate', datetime.datetime.now()) + + def ValidToSignBundle(self, bundle_identifier): + """Checks whether the provisioning profile can sign bundle_identifier. + + Args: + bundle_identifier: the identifier of the bundle that needs to be signed. + + Returns: + True if the mobile provisioning profile can be used to sign a bundle + with the corresponding bundle_identifier, False otherwise. + """ + return fnmatch.fnmatch( + '%s.%s' % (self.application_identifier_prefix, bundle_identifier), + self.application_identifier_pattern) + + def Install(self, installation_path): + """Copies mobile provisioning profile info to |installation_path|.""" + shutil.copy2(self.path, installation_path) + st = os.stat(installation_path) + os.chmod(installation_path, st.st_mode | stat.S_IWUSR) + + +class Entitlements(object): + """Wraps an Entitlement plist file.""" + + def __init__(self, entitlements_path): + """Initializes Entitlements object from entitlement file.""" + self._path = entitlements_path + self._data = LoadPlistFile(self._path) + + @property + def path(self): + return self._path + + def ExpandVariables(self, substitutions): + self._data = self._ExpandVariables(self._data, substitutions) + + def _ExpandVariables(self, data, substitutions): + if isinstance(data, basestring_compat): + for key, substitution in substitutions.items(): + data = data.replace('$(%s)' % (key,), substitution) + return data + + if isinstance(data, dict): + for key, value in data.items(): + data[key] = self._ExpandVariables(value, substitutions) + return data + + if isinstance(data, list): + for i, value in enumerate(data): + data[i] = self._ExpandVariables(value, substitutions) + + return data + + def LoadDefaults(self, defaults): + for key, value in defaults.items(): + if key not in self._data: + self._data[key] = value + + def WriteTo(self, target_path): + with open(target_path, 'wb') as fp: + if sys.version_info.major == 2: + plistlib.writePlist(self._data, fp) + else: + plistlib.dump(self._data, fp) + + +def FindProvisioningProfile(provisioning_profile_paths, bundle_identifier, + required): + """Finds mobile provisioning profile to use to sign bundle. + + Args: + bundle_identifier: the identifier of the bundle to sign. + + Returns: + The ProvisioningProfile object that can be used to sign the Bundle + object or None if no matching provisioning profile was found. + """ + if not provisioning_profile_paths: + provisioning_profile_paths = glob.glob( + os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision')) + + # Iterate over all installed mobile provisioning profiles and filter those + # that can be used to sign the bundle, ignoring expired ones. + now = datetime.datetime.now() + valid_provisioning_profiles = [] + one_hour = datetime.timedelta(0, 3600) + for provisioning_profile_path in provisioning_profile_paths: + provisioning_profile = ProvisioningProfile(provisioning_profile_path) + if provisioning_profile.expiration_date - now < one_hour: + sys.stderr.write( + 'Warning: ignoring expired provisioning profile: %s.\n' % + provisioning_profile_path) + continue + if provisioning_profile.ValidToSignBundle(bundle_identifier): + valid_provisioning_profiles.append(provisioning_profile) + + if not valid_provisioning_profiles: + if required: + sys.stderr.write( + 'Error: no mobile provisioning profile found for "%s" in %s.\n' % + (bundle_identifier, provisioning_profile_paths)) + sys.exit(1) + return None + + # Select the most specific mobile provisioning profile, i.e. the one with + # the longest application identifier pattern (prefer the one with the latest + # expiration date as a secondary criteria). + selected_provisioning_profile = max( + valid_provisioning_profiles, + key=lambda p: (len(p.application_identifier_pattern), p.expiration_date)) + + one_week = datetime.timedelta(7) + if selected_provisioning_profile.expiration_date - now < 2 * one_week: + sys.stderr.write( + 'Warning: selected provisioning profile will expire soon: %s' % + selected_provisioning_profile.path) + return selected_provisioning_profile + + +def CodeSignBundle(bundle_path, identity, extra_args): + process = subprocess.Popen( + ['xcrun', 'codesign', '--force', '--sign', identity, '--timestamp=none'] + + list(extra_args) + [bundle_path], + stderr=subprocess.PIPE, + universal_newlines=True) + _, stderr = process.communicate() + if process.returncode: + sys.stderr.write(stderr) + sys.exit(process.returncode) + for line in stderr.splitlines(): + if line.endswith(': replacing existing signature'): + # Ignore warning about replacing existing signature as this should only + # happen when re-signing system frameworks (and then it is expected). + continue + sys.stderr.write(line) + sys.stderr.write('\n') + + +def InstallSystemFramework(framework_path, bundle_path, args): + """Install framework from |framework_path| to |bundle| and code-re-sign it.""" + installed_framework_path = os.path.join( + bundle_path, 'Frameworks', os.path.basename(framework_path)) + + if os.path.isfile(framework_path): + shutil.copy(framework_path, installed_framework_path) + elif os.path.isdir(framework_path): + if os.path.exists(installed_framework_path): + shutil.rmtree(installed_framework_path) + shutil.copytree(framework_path, installed_framework_path) + + CodeSignBundle(installed_framework_path, args.identity, + ['--deep', '--preserve-metadata=identifier,entitlements,flags']) + + +def GenerateEntitlements(path, provisioning_profile, bundle_identifier): + """Generates an entitlements file. + + Args: + path: path to the entitlements template file + provisioning_profile: ProvisioningProfile object to use, may be None + bundle_identifier: identifier of the bundle to sign. + """ + entitlements = Entitlements(path) + if provisioning_profile: + entitlements.LoadDefaults(provisioning_profile.entitlements) + app_identifier_prefix = \ + provisioning_profile.application_identifier_prefix + '.' + else: + app_identifier_prefix = '*.' + entitlements.ExpandVariables({ + 'CFBundleIdentifier': bundle_identifier, + 'AppIdentifierPrefix': app_identifier_prefix, + }) + return entitlements + + +def GenerateBundleInfoPlist(bundle, plist_compiler, partial_plist): + """Generates the bundle Info.plist for a list of partial .plist files. + + Args: + bundle: a Bundle instance + plist_compiler: string, path to the Info.plist compiler + partial_plist: list of path to partial .plist files to merge + """ + + # Filter empty partial .plist files (this happens if an application + # does not compile any asset catalog, in which case the partial .plist + # file from the asset catalog compilation step is just a stamp file). + filtered_partial_plist = [] + for plist in partial_plist: + plist_size = os.stat(plist).st_size + if plist_size: + filtered_partial_plist.append(plist) + + # Invoke the plist_compiler script. It needs to be a python script. + subprocess.check_call([ + 'python3', + plist_compiler, + 'merge', + '-f', + 'binary1', + '-o', + bundle.info_plist_path, + ] + filtered_partial_plist) + + +class Action(object): + """Class implementing one action supported by the script.""" + + @classmethod + def Register(cls, subparsers): + parser = subparsers.add_parser(cls.name, help=cls.help) + parser.set_defaults(func=cls._Execute) + cls._Register(parser) + + +class CodeSignBundleAction(Action): + """Class implementing the code-sign-bundle action.""" + + name = 'code-sign-bundle' + help = 'perform code signature for a bundle' + + @staticmethod + def _Register(parser): + parser.add_argument( + '--entitlements', '-e', dest='entitlements_path', + help='path to the entitlements file to use') + parser.add_argument( + 'path', help='path to the iOS bundle to codesign') + parser.add_argument( + '--identity', '-i', required=True, + help='identity to use to codesign') + parser.add_argument( + '--binary', '-b', required=True, + help='path to the iOS bundle binary') + parser.add_argument( + '--framework', '-F', action='append', default=[], dest='frameworks', + help='install and resign system framework') + parser.add_argument( + '--disable-code-signature', action='store_true', dest='no_signature', + help='disable code signature') + parser.add_argument( + '--disable-embedded-mobileprovision', action='store_false', + default=True, dest='embedded_mobileprovision', + help='disable finding and embedding mobileprovision') + parser.add_argument( + '--platform', '-t', required=True, + help='platform the signed bundle is targeting') + parser.add_argument( + '--partial-info-plist', '-p', action='append', default=[], + help='path to partial Info.plist to merge to create bundle Info.plist') + parser.add_argument( + '--plist-compiler-path', '-P', action='store', + help='path to the plist compiler script (for --partial-info-plist)') + parser.add_argument( + '--mobileprovision', + '-m', + action='append', + default=[], + dest='mobileprovision_files', + help='list of mobileprovision files to use. If empty, uses the files ' + + 'in $HOME/Library/MobileDevice/Provisioning Profiles') + parser.set_defaults(no_signature=False) + + @staticmethod + def _Execute(args): + if not args.identity: + args.identity = '-' + + bundle = Bundle(args.path, args.platform) + + if args.partial_info_plist: + GenerateBundleInfoPlist(bundle, args.plist_compiler_path, + args.partial_info_plist) + + # The bundle Info.plist may have been updated by GenerateBundleInfoPlist() + # above. Load the bundle information from Info.plist after the modification + # have been written to disk. + bundle.Load() + + # According to Apple documentation, the application binary must be the same + # as the bundle name without the .app suffix. See crbug.com/740476 for more + # information on what problem this can cause. + # + # To prevent this class of error, fail with an error if the binary name is + # incorrect in the Info.plist as it is not possible to update the value in + # Info.plist at this point (the file has been copied by a different target + # and ninja would consider the build dirty if it was updated). + # + # Also checks that the name of the bundle is correct too (does not cause the + # build to be considered dirty, but still terminate the script in case of an + # incorrect bundle name). + # + # Apple documentation is available at: + # https://developer.apple.com/library/content/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html + bundle_name = os.path.splitext(os.path.basename(bundle.path))[0] + errors = bundle.Validate({ + 'CFBundleName': bundle_name, + 'CFBundleExecutable': bundle_name, + }) + if errors: + for key in sorted(errors): + value, expected_value = errors[key] + sys.stderr.write('%s: error: %s value incorrect: %s != %s\n' % ( + bundle.path, key, value, expected_value)) + sys.stderr.flush() + sys.exit(1) + + # Delete existing embedded mobile provisioning. + embedded_provisioning_profile = os.path.join( + bundle.path, 'embedded.mobileprovision') + if os.path.isfile(embedded_provisioning_profile): + os.unlink(embedded_provisioning_profile) + + # Delete existing code signature. + if os.path.exists(bundle.signature_dir): + shutil.rmtree(bundle.signature_dir) + + # Install system frameworks if requested. + for framework_path in args.frameworks: + InstallSystemFramework(framework_path, args.path, args) + + # Copy main binary into bundle. + if not os.path.isdir(bundle.executable_dir): + os.makedirs(bundle.executable_dir) + shutil.copy(args.binary, bundle.binary_path) + + if bundle.kind == 'mac_framework': + # Create Versions/Current -> Versions/A symlink + CreateSymlink('A', os.path.join(bundle.path, 'Versions/Current')) + + # Create $binary_name -> Versions/Current/$binary_name symlink + CreateSymlink(os.path.join('Versions/Current', bundle.binary_name), + os.path.join(bundle.path, bundle.binary_name)) + + # Create optional symlinks. + for name in ('Headers', 'Resources', 'Modules'): + target = os.path.join(bundle.path, 'Versions/A', name) + if os.path.exists(target): + CreateSymlink(os.path.join('Versions/Current', name), + os.path.join(bundle.path, name)) + else: + obsolete_path = os.path.join(bundle.path, name) + if os.path.exists(obsolete_path): + os.unlink(obsolete_path) + + if args.no_signature: + return + + codesign_extra_args = [] + + if args.embedded_mobileprovision: + # Find mobile provisioning profile and embeds it into the bundle (if a + # code signing identify has been provided, fails if no valid mobile + # provisioning is found). + provisioning_profile_required = args.identity != '-' + provisioning_profile = FindProvisioningProfile( + args.mobileprovision_files, bundle.identifier, + provisioning_profile_required) + if provisioning_profile and args.platform != 'iphonesimulator': + provisioning_profile.Install(embedded_provisioning_profile) + + if args.entitlements_path is not None: + temporary_entitlements_file = \ + tempfile.NamedTemporaryFile(suffix='.xcent') + codesign_extra_args.extend( + ['--entitlements', temporary_entitlements_file.name]) + + entitlements = GenerateEntitlements( + args.entitlements_path, provisioning_profile, bundle.identifier) + entitlements.WriteTo(temporary_entitlements_file.name) + + CodeSignBundle(bundle.path, args.identity, codesign_extra_args) + + +class CodeSignFileAction(Action): + """Class implementing code signature for a single file.""" + + name = 'code-sign-file' + help = 'code-sign a single file' + + @staticmethod + def _Register(parser): + parser.add_argument( + 'path', help='path to the file to codesign') + parser.add_argument( + '--identity', '-i', required=True, + help='identity to use to codesign') + parser.add_argument( + '--output', '-o', + help='if specified copy the file to that location before signing it') + parser.set_defaults(sign=True) + + @staticmethod + def _Execute(args): + if not args.identity: + args.identity = '-' + + install_path = args.path + if args.output: + + if os.path.isfile(args.output): + os.unlink(args.output) + elif os.path.isdir(args.output): + shutil.rmtree(args.output) + + if os.path.isfile(args.path): + shutil.copy(args.path, args.output) + elif os.path.isdir(args.path): + shutil.copytree(args.path, args.output) + + install_path = args.output + + CodeSignBundle(install_path, args.identity, + ['--deep', '--preserve-metadata=identifier,entitlements']) + + +class GenerateEntitlementsAction(Action): + """Class implementing the generate-entitlements action.""" + + name = 'generate-entitlements' + help = 'generate entitlements file' + + @staticmethod + def _Register(parser): + parser.add_argument( + '--entitlements', '-e', dest='entitlements_path', + help='path to the entitlements file to use') + parser.add_argument( + 'path', help='path to the entitlements file to generate') + parser.add_argument( + '--info-plist', '-p', required=True, + help='path to the bundle Info.plist') + parser.add_argument( + '--mobileprovision', + '-m', + action='append', + default=[], + dest='mobileprovision_files', + help='set of mobileprovision files to use. If empty, uses the files ' + + 'in $HOME/Library/MobileDevice/Provisioning Profiles') + + @staticmethod + def _Execute(args): + info_plist = LoadPlistFile(args.info_plist) + bundle_identifier = info_plist['CFBundleIdentifier'] + provisioning_profile = FindProvisioningProfile(args.mobileprovision_files, + bundle_identifier, False) + entitlements = GenerateEntitlements( + args.entitlements_path, provisioning_profile, bundle_identifier) + entitlements.WriteTo(args.path) + + +class FindProvisioningProfileAction(Action): + """Class implementing the find-codesign-identity action.""" + + name = 'find-provisioning-profile' + help = 'find provisioning profile for use by Xcode project generator' + + @staticmethod + def _Register(parser): + parser.add_argument('--bundle-id', + '-b', + required=True, + help='bundle identifier') + parser.add_argument( + '--mobileprovision', + '-m', + action='append', + default=[], + dest='mobileprovision_files', + help='set of mobileprovision files to use. If empty, uses the files ' + + 'in $HOME/Library/MobileDevice/Provisioning Profiles') + + @staticmethod + def _Execute(args): + provisioning_profile_info = {} + provisioning_profile = FindProvisioningProfile(args.mobileprovision_files, + args.bundle_id, False) + for key in ('team_identifier', 'name'): + if provisioning_profile: + provisioning_profile_info[key] = getattr(provisioning_profile, key) + else: + provisioning_profile_info[key] = '' + print(json.dumps(provisioning_profile_info)) + + +def Main(): + # Cache this codec so that plistlib can find it. See + # https://crbug.com/999461#c12 for more details. + codecs.lookup('utf-8') + + parser = argparse.ArgumentParser('codesign iOS bundles') + subparsers = parser.add_subparsers() + + actions = [ + CodeSignBundleAction, + CodeSignFileAction, + GenerateEntitlementsAction, + FindProvisioningProfileAction, + ] + + for action in actions: + action.Register(subparsers) + + args = parser.parse_args() + args.func(args) + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/config/ios/compile_ib_files.py b/config/ios/compile_ib_files.py new file mode 100644 index 000000000000..e42001601278 --- /dev/null +++ b/config/ios/compile_ib_files.py @@ -0,0 +1,56 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import logging +import os +import re +import subprocess +import sys + + +def main(): + parser = argparse.ArgumentParser( + description='A script to compile xib and storyboard.', + fromfile_prefix_chars='@') + parser.add_argument('-o', '--output', required=True, + help='Path to output bundle.') + parser.add_argument('-i', '--input', required=True, + help='Path to input xib or storyboard.') + args, unknown_args = parser.parse_known_args() + + ibtool_args = [ + 'xcrun', 'ibtool', + '--errors', '--warnings', '--notices', + '--output-format', 'human-readable-text' + ] + ibtool_args += unknown_args + ibtool_args += [ + '--compile', + os.path.abspath(args.output), + os.path.abspath(args.input) + ] + + ibtool_section_re = re.compile(r'/\*.*\*/') + ibtool_re = re.compile(r'.*note:.*is clipping its content') + try: + stdout = subprocess.check_output(ibtool_args) + except subprocess.CalledProcessError as e: + print(e.output) + raise + current_section_header = None + for line in stdout.splitlines(): + if ibtool_section_re.match(line): + current_section_header = line + elif not ibtool_re.match(line): + if current_section_header: + print(current_section_header) + current_section_header = None + print(line) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/config/ios/compile_xcassets_unittests.py b/config/ios/compile_xcassets_unittests.py new file mode 100644 index 000000000000..8537e4ec2567 --- /dev/null +++ b/config/ios/compile_xcassets_unittests.py @@ -0,0 +1,141 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest +import compile_xcassets + + +class TestFilterCompilerOutput(unittest.TestCase): + + relative_paths = { + '/Users/janedoe/chromium/src/Chromium.xcassets': + '../../Chromium.xcassets', + '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car': + 'Chromium.app/Assets.car', + } + + def testNoError(self): + self.assertEquals( + '', + compile_xcassets.FilterCompilerOutput( + '/* com.apple.actool.compilation-results */\n' + '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n', + self.relative_paths)) + + def testNoErrorRandomMessages(self): + self.assertEquals( + '', + compile_xcassets.FilterCompilerOutput( + '2017-07-04 04:59:19.460 ibtoold[23487:41214] CoreSimulator is att' + 'empting to unload a stale CoreSimulatorService job. Existing' + ' job (com.apple.CoreSimulator.CoreSimulatorService.179.1.E8tt' + 'yeDeVgWK) is from an older version and is being removed to pr' + 'event problems.\n' + '/* com.apple.actool.compilation-results */\n' + '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n', + self.relative_paths)) + + def testWarning(self): + self.assertEquals( + '/* com.apple.actool.document.warnings */\n' + '../../Chromium.xcassets:./image1.imageset/[universal][][][1x][][][][' + '][][]: warning: The file "image1.png" for the image set "image1"' + ' does not exist.\n', + compile_xcassets.FilterCompilerOutput( + '/* com.apple.actool.document.warnings */\n' + '/Users/janedoe/chromium/src/Chromium.xcassets:./image1.imageset/[' + 'universal][][][1x][][][][][][]: warning: The file "image1.png' + '" for the image set "image1" does not exist.\n' + '/* com.apple.actool.compilation-results */\n' + '/Users/janedoe/chromium/src/out/Default/Chromium.app/Assets.car\n', + self.relative_paths)) + + def testError(self): + self.assertEquals( + '/* com.apple.actool.errors */\n' + '../../Chromium.xcassets: error: The output directory "/Users/janedoe/' + 'chromium/src/out/Default/Chromium.app" does not exist.\n', + compile_xcassets.FilterCompilerOutput( + '/* com.apple.actool.errors */\n' + '/Users/janedoe/chromium/src/Chromium.xcassets: error: The output ' + 'directory "/Users/janedoe/chromium/src/out/Default/Chromium.a' + 'pp" does not exist.\n' + '/* com.apple.actool.compilation-results */\n', + self.relative_paths)) + + def testSpurious(self): + self.assertEquals( + '/* com.apple.actool.document.warnings */\n' + '../../Chromium.xcassets:./AppIcon.appiconset: warning: A 1024x1024 ap' + 'p store icon is required for iOS apps\n', + compile_xcassets.FilterCompilerOutput( + '/* com.apple.actool.document.warnings */\n' + '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse' + 't: warning: A 1024x1024 app store icon is required for iOS ap' + 'ps\n' + '/* com.apple.actool.document.notices */\n' + '/Users/janedoe/chromium/src/Chromium.xcassets:./AppIcon.appiconse' + 't/[][ipad][76x76][][][1x][][]: notice: (null)\n', + self.relative_paths)) + + def testComplexError(self): + self.assertEquals( + '/* com.apple.actool.errors */\n' + ': error: Failed to find a suitable device for the type SimDeviceType ' + ': com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime SimRunt' + 'ime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimRuntime.iOS-1' + '0-3\n' + ' Failure Reason: Failed to create SimDeviceSet at path /Users/jane' + 'doe/Library/Developer/Xcode/UserData/IB Support/Simulator Devices' + '. You\'ll want to check the logs in ~/Library/Logs/CoreSimulator ' + 'to see why creating the SimDeviceSet failed.\n' + ' Underlying Errors:\n' + ' Description: Failed to initialize simulator device set.\n' + ' Failure Reason: Failed to subscribe to notifications from Cor' + 'eSimulatorService.\n' + ' Underlying Errors:\n' + ' Description: Error returned in reply to notification requ' + 'est: Connection invalid\n' + ' Failure Reason: Software caused connection abort\n', + compile_xcassets.FilterCompilerOutput( + '2017-07-07 10:37:27.367 ibtoold[88538:12553239] CoreSimulator det' + 'ected Xcode.app relocation or CoreSimulatorService version ch' + 'ange. Framework path (/Applications/Xcode.app/Contents/Devel' + 'oper/Library/PrivateFrameworks/CoreSimulator.framework) and v' + 'ersion (375.21) does not match existing job path (/Library/De' + 'veloper/PrivateFrameworks/CoreSimulator.framework/Versions/A/' + 'XPCServices/com.apple.CoreSimulator.CoreSimulatorService.xpc)' + ' and version (459.13). Attempting to remove the stale servic' + 'e in order to add the expected version.\n' + '2017-07-07 10:37:27.625 ibtoold[88538:12553256] CoreSimulatorServ' + 'ice connection interrupted. Resubscribing to notifications.\n' + '2017-07-07 10:37:27.632 ibtoold[88538:12553264] CoreSimulatorServ' + 'ice connection became invalid. Simulator services will no lo' + 'nger be available.\n' + '2017-07-07 10:37:27.642 ibtoold[88538:12553274] CoreSimulatorServ' + 'ice connection became invalid. Simulator services will no lo' + 'nger be available.\n' + '/* com.apple.actool.errors */\n' + ': error: Failed to find a suitable device for the type SimDeviceT' + 'ype : com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime' + ' SimRuntime : 10.3.1 (14E8301) - com.apple.CoreSimulator.SimR' + 'untime.iOS-10-3\n' + ' Failure Reason: Failed to create SimDeviceSet at path /Users/' + 'janedoe/Library/Developer/Xcode/UserData/IB Support/Simulator' + ' Devices. You\'ll want to check the logs in ~/Library/Logs/Co' + 'reSimulator to see why creating the SimDeviceSet failed.\n' + ' Underlying Errors:\n' + ' Description: Failed to initialize simulator device set.\n' + ' Failure Reason: Failed to subscribe to notifications from' + ' CoreSimulatorService.\n' + ' Underlying Errors:\n' + ' Description: Error returned in reply to notification ' + 'request: Connection invalid\n' + ' Failure Reason: Software caused connection abort\n' + '/* com.apple.actool.compilation-results */\n', + self.relative_paths)) + + +if __name__ == '__main__': + unittest.main() diff --git a/config/ios/config.gni b/config/ios/config.gni new file mode 100644 index 000000000000..c5c10c3f7e5c --- /dev/null +++ b/config/ios/config.gni @@ -0,0 +1,23 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Configure the environment for which to build. Could be either "device", + # "simulator" or "catalyst". If unspecified, then it will be assumed to be + # "simulator" if the target_cpu is "x68" or "x64", "device" otherwise. The + # default is only there for compatibility reasons and will be removed (see + # crbug.com/1138425 for more details). + target_environment = "" +} + +if (target_environment == "") { + if (current_cpu == "x86" || current_cpu == "x64") { + target_environment = "simulator" + } else { + target_environment = "device" + } +} + +assert(target_environment == "simulator" || target_environment == "device" || + target_environment == "catalyst") diff --git a/config/ios/dummy.py b/config/ios/dummy.py new file mode 100644 index 000000000000..e88c7888e226 --- /dev/null +++ b/config/ios/dummy.py @@ -0,0 +1,15 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Empty script that does nothing and return success error code. + +This script is used by some gn targets that pretend creating some output +but instead depend on another target creating the output indirectly (in +general this output is a directory that is used as input by a bundle_data +target). + +It ignores all parameters and terminate with a success error code. It +does the same thing as the unix command "true", but gn can only invoke +python scripts. +""" diff --git a/config/ios/entitlements.plist b/config/ios/entitlements.plist new file mode 100644 index 000000000000..429762e3a3f5 --- /dev/null +++ b/config/ios/entitlements.plist @@ -0,0 +1,12 @@ + + + + + application-identifier + $(AppIdentifierPrefix)$(CFBundleIdentifier) + keychain-access-groups + + $(AppIdentifierPrefix)$(CFBundleIdentifier) + + + diff --git a/config/ios/find_signing_identity.py b/config/ios/find_signing_identity.py new file mode 100644 index 000000000000..37b3284e9792 --- /dev/null +++ b/config/ios/find_signing_identity.py @@ -0,0 +1,90 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import os +import subprocess +import sys +import re + + +def Redact(value, from_nth_char=5): + """Redact value past the N-th character.""" + return value[:from_nth_char] + '*' * (len(value) - from_nth_char) + + +class Identity(object): + """Represents a valid identity.""" + + def __init__(self, identifier, name, team): + self.identifier = identifier + self.name = name + self.team = team + + def redacted(self): + return Identity(Redact(self.identifier), self.name, Redact(self.team)) + + def format(self): + return '%s: "%s (%s)"' % (self.identifier, self.name, self.team) + + +def ListIdentities(): + return subprocess.check_output([ + 'xcrun', + 'security', + 'find-identity', + '-v', + '-p', + 'codesigning', + ]).decode('utf8') + + +def FindValidIdentity(pattern): + """Find all identities matching the pattern.""" + lines = list(l.strip() for l in ListIdentities().splitlines()) + # Look for something like + # 1) 123ABC123ABC123ABC****** "iPhone Developer: DeveloperName (Team)" + regex = re.compile('[0-9]+\) ([A-F0-9]+) "([^"(]*) \(([^)"]*)\)"') + + result = [] + for line in lines: + res = regex.match(line) + if res is None: + continue + identifier, developer_name, team = res.groups() + if pattern is None or pattern in '%s (%s)' % (developer_name, team): + result.append(Identity(identifier, developer_name, team)) + return result + + +def Main(args): + parser = argparse.ArgumentParser('codesign iOS bundles') + parser.add_argument('--matching-pattern', + dest='pattern', + help='Pattern used to select the code signing identity.') + parsed = parser.parse_args(args) + + identities = FindValidIdentity(parsed.pattern) + if len(identities) == 1: + print(identities[0].identifier, end='') + return 0 + + all_identities = FindValidIdentity(None) + + print('Automatic code signing identity selection was enabled but could not') + print('find exactly one codesigning identity matching "%s".' % parsed.pattern) + print('') + print('Check that the keychain is accessible and that there is exactly one') + print('valid codesigning identity matching the pattern. Here is the parsed') + print('output of `xcrun security find-identity -v -p codesigning`:') + print() + for i, identity in enumerate(all_identities): + print(' %d) %s' % (i + 1, identity.redacted().format())) + print(' %d valid identities found' % (len(all_identities))) + return 1 + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1:])) diff --git a/config/ios/generate_umbrella_header.py b/config/ios/generate_umbrella_header.py new file mode 100644 index 000000000000..943c49c4df27 --- /dev/null +++ b/config/ios/generate_umbrella_header.py @@ -0,0 +1,75 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generates an umbrella header for an iOS framework.""" + +import argparse +import datetime +import os +import re +import string + + +HEADER_TEMPLATE = string.Template('''\ +// Copyright $year The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// This file is auto-generated by //build/ios/config/generate_umbrella_header.py + +#ifndef $header_guard +#define $header_guard + +$imports + +#endif // $header_guard +''') + + +def ComputeHeaderGuard(file_path): + """Computes the header guard for a file path. + + Args: + file_path: The path to convert into an header guard. + Returns: + The header guard string for the file_path. + """ + return re.sub(r'[.+/\\]', r'_', file_path.upper()) + '_' + + +def WriteUmbrellaHeader(output_path, imported_headers): + """Writes the umbrella header. + + Args: + output_path: The path to the umbrella header. + imported_headers: A list of headers to #import in the umbrella header. + """ + year = datetime.date.today().year + header_guard = ComputeHeaderGuard(output_path) + imports = '\n'.join([ + '#import "%s"' % os.path.basename(header) + for header in sorted(imported_headers) + ]) + with open(output_path, 'w') as output_file: + output_file.write( + HEADER_TEMPLATE.safe_substitute({ + 'year': year, + 'header_guard': header_guard, + 'imports': imports, + })) + + +def Main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--output-path', required=True, type=str, + help='Path to the generated umbrella header.') + parser.add_argument('imported_headers', type=str, nargs='+', + help='Headers to #import in the umbrella header.') + options = parser.parse_args() + + return WriteUmbrellaHeader(options.output_path, options.imported_headers) + + +if __name__ == '__main__': + Main() diff --git a/config/ios/hardlink.py b/config/ios/hardlink.py new file mode 100644 index 000000000000..7f1be597808e --- /dev/null +++ b/config/ios/hardlink.py @@ -0,0 +1,71 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Recursively create hardlink to target named output.""" + + +import argparse +import os +import shutil + + +def CreateHardlinkHelper(target, output): + """Recursively create a hardlink named output pointing to target. + + Args: + target: path to an existing file or directory + output: path to the newly created hardlink + + This function assumes that output does not exists but that the parent + directory containing output does. If those conditions are false, then + the function will fails with an exception corresponding to an OS error. + """ + if os.path.islink(target): + os.symlink(os.readlink(target), output) + elif not os.path.isdir(target): + try: + os.link(target, output) + except: + shutil.copy(target, output) + else: + os.mkdir(output) + for name in os.listdir(target): + CreateHardlinkHelper( + os.path.join(target, name), + os.path.join(output, name)) + + +def CreateHardlink(target, output): + """Recursively create a hardlink named output pointing to target. + + Args: + target: path to an existing file or directory + output: path to the newly created hardlink + + If output already exists, it is first removed. In all cases, the + parent directory containing output is created. + """ + if os.path.isdir(output): + shutil.rmtree(output) + elif os.path.exists(output): + os.unlink(output) + + parent_dir = os.path.dirname(os.path.abspath(output)) + if not os.path.isdir(parent_dir): + os.makedirs(parent_dir) + + CreateHardlinkHelper(target, output) + + +def Main(): + parser = argparse.ArgumentParser() + parser.add_argument('target', help='path to the file or directory to link to') + parser.add_argument('output', help='name of the hardlink to create') + args = parser.parse_args() + + CreateHardlink(args.target, args.output) + + +if __name__ == '__main__': + Main() diff --git a/config/ios/ios_sdk.gni b/config/ios/ios_sdk.gni new file mode 100644 index 000000000000..14174696849c --- /dev/null +++ b/config/ios/ios_sdk.gni @@ -0,0 +1,147 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/config.gni") +import("//build/config/ios/ios_sdk_overrides.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +assert(current_os == "ios") +assert(use_system_xcode, "Hermetic xcode doesn't work for ios.") + +declare_args() { + # SDK path to use. When empty this will use the default SDK based on the + # value of target_environment. + ios_bin_path = "" + ios_sdk_path = "" + ios_sdk_name = "" + ios_sdk_version = "" + ios_sdk_platform = "" + ios_sdk_platform_path = "" + ios_toolchains_path = "" + xcode_version = "" + xcode_version_int = 0 + xcode_build = "" + machine_os_build = "" + + # Set DEVELOPER_DIR while running sdk_info.py. + ios_sdk_developer_dir = "" + + # Control whether codesiging is enabled (ignored for simulator builds). + ios_enable_code_signing = true + + # Explicitly select the identity to use for codesigning. If defined, must + # be set to a non-empty string that will be passed to codesigning. Can be + # left unspecified if ios_code_signing_identity_description is used instead. + ios_code_signing_identity = "" + + # Pattern used to select the identity to use for codesigning. If defined, + # must be a substring of the description of exactly one of the identities by + # `security find-identity -v -p codesigning`. + ios_code_signing_identity_description = "Apple Development" + + # Prefix for CFBundleIdentifier property of iOS bundles (correspond to the + # "Organization Identifier" in Xcode). Code signing will fail if no mobile + # provisioning for the selected code signing identify support that prefix. + ios_app_bundle_id_prefix = "org.chromium.ost" + + # Paths to the mobileprovision files for the chosen code signing + # identity description and app bundle id prefix. + ios_mobileprovision_files = [] + + # Set to true if all test apps should use the same bundle id. + ios_use_shared_bundle_id_for_test_apps = true +} + +# If codesigning is enabled, use must configure either a codesigning identity +# or a filter to automatically select the codesigning identity. +if (target_environment == "device" && ios_enable_code_signing) { + assert(ios_code_signing_identity == "" || + ios_code_signing_identity_description == "", + "You should either specify the precise identity to use with " + + "ios_code_signing_identity or let the code select an identity " + + "automatically (via find_signing_identity.py which use the " + + "variable ios_code_signing_identity_description to set the " + + "pattern to match the identity to use).") +} + +if (ios_sdk_path == "") { + # Compute default target. + if (target_environment == "simulator") { + ios_sdk_name = "iphonesimulator" + ios_sdk_platform = "iPhoneSimulator" + } else if (target_environment == "device") { + ios_sdk_name = "iphoneos" + ios_sdk_platform = "iPhoneOS" + } else if (target_environment == "catalyst") { + ios_sdk_name = "macosx" + ios_sdk_platform = "MacOSX" + } else { + assert(false, "unsupported environment: $target_environment") + } + + ios_sdk_info_args = [ + "--get_sdk_info", + "--get_machine_info", + ] + ios_sdk_info_args += [ ios_sdk_name ] + if (ios_sdk_developer_dir != "") { + ios_sdk_info_args += [ + "--developer_dir", + ios_sdk_developer_dir, + ] + } + if (use_system_xcode && (use_goma || use_remoteexec)) { + ios_sdk_info_args += [ + "--create_symlink_at", + "sdk/xcode_links", + "--root_build_dir", + root_build_dir, + ] + } + script_name = "//build/config/apple/sdk_info.py" + _ios_sdk_result = exec_script(script_name, ios_sdk_info_args, "scope") + ios_bin_path = + rebase_path("${_ios_sdk_result.toolchains_path}/usr/bin/", root_build_dir) + ios_sdk_path = _ios_sdk_result.sdk_path + ios_sdk_platform_path = _ios_sdk_result.sdk_platform_path + ios_sdk_version = _ios_sdk_result.sdk_version + ios_sdk_build = _ios_sdk_result.sdk_build + ios_toolchains_path = _ios_sdk_result.toolchains_path + xcode_version = _ios_sdk_result.xcode_version + xcode_version_int = _ios_sdk_result.xcode_version_int + xcode_build = _ios_sdk_result.xcode_build + machine_os_build = _ios_sdk_result.machine_os_build + if (target_environment == "simulator") { + # This is weird, but Xcode sets DTPlatformBuild to an empty field for + # simulator builds. + ios_platform_build = "" + } else { + ios_platform_build = ios_sdk_build + } +} + +if (target_environment == "device" && ios_enable_code_signing) { + # Automatically select a codesigning identity if no identity is configured. + # This only applies to device build as simulator builds are not signed. + if (ios_code_signing_identity == "") { + find_signing_identity_args = [] + if (ios_code_signing_identity_description != "") { + find_signing_identity_args = [ + "--matching-pattern", + ios_code_signing_identity_description, + ] + } + ios_code_signing_identity = exec_script("find_signing_identity.py", + find_signing_identity_args, + "trim string") + } +} + +if (ios_use_shared_bundle_id_for_test_apps) { + shared_bundle_id_for_test_apps = + "$ios_app_bundle_id_prefix.chrome.unittests.dev" +} diff --git a/config/ios/ios_sdk_overrides.gni b/config/ios/ios_sdk_overrides.gni new file mode 100644 index 000000000000..a2373c6c9d23 --- /dev/null +++ b/config/ios/ios_sdk_overrides.gni @@ -0,0 +1,17 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains arguments that subprojects may choose to override. It +# asserts that those overrides are used, to prevent unused args warnings. + +declare_args() { + # Version of iOS that we're targeting. + ios_deployment_target = "15.0" +} + +# Always assert that ios_deployment_target is used on non-iOS platforms to +# prevent unused args warnings. +if (!is_ios) { + assert(ios_deployment_target == "15.0" || true) +} diff --git a/config/ios/ios_test_runner_wrapper.gni b/config/ios/ios_test_runner_wrapper.gni new file mode 100644 index 000000000000..378323c4f5c8 --- /dev/null +++ b/config/ios/ios_test_runner_wrapper.gni @@ -0,0 +1,152 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/coverage/coverage.gni") +import("//build/config/ios/ios_sdk.gni") +import("//build/util/generate_wrapper.gni") + +# Invokes generate_wrapper to create an executable script wrapping iOS' +# run.py with baked in arguments. Only takes effect when test entry in +# gn_isolate_map.pyl is updated to type="generated_script" with script +# set to the wrapper output path. +# +# Arguments: +# +# data +# (optional, default [ "//ios/build/bots/scripts/" ]) list of files or +# directories required to run target +# +# data_deps +# (optional) list of target non-linked labels +# +# deps +# (optional) list of files or directories required to run target +# +# executable_args +# (optional) a list of string arguments to pass to run.py +# +# retries +# (optional, default 3) number of retry attempts +# +# shards +# (optional, default 1) number of shards to execute tests in parallel. not +# the same as swarming shards. +# +# wrapper_output_name +# (optional, default "run_${target_name}") name of the wrapper script +# +template("ios_test_runner_wrapper") { + generate_wrapper(target_name) { + forward_variables_from(invoker, + [ + "deps", + "retries", + "shards", + "wrapper_output_name", + ]) + testonly = true + executable = "//testing/test_env.py" + + # iOS main test runner + _runner_path = + rebase_path("//ios/build/bots/scripts/run.py", root_build_dir) + + executable_args = [ "@WrappedPath(${_runner_path})" ] + + # arguments passed to run.py + if (defined(invoker.executable_args)) { + executable_args += invoker.executable_args + } + + _rebased_mac_toolchain = rebase_path("//mac_toolchain", root_build_dir) + _rebased_xcode_path = rebase_path("//Xcode.app", root_build_dir) + _rebased_ios_runtime_cache_prefix = + rebase_path("//Runtime-ios-", root_build_dir) + + # --out-dir argument is specified in gn_isolate_map.pyl because + # ${ISOLATED_OUTDIR} doesn't get resolved through this wrapper. + executable_args += [ + "--xcode-path", + "@WrappedPath(${_rebased_xcode_path})", + "--mac-toolchain-cmd", + "@WrappedPath(${_rebased_mac_toolchain})", + "--runtime-cache-prefix", + "@WrappedPath(${_rebased_ios_runtime_cache_prefix})", + ] + + # Default retries to 3 + if (!defined(retries)) { + retries = 3 + } + executable_args += [ + "--retries", + "${retries}", + ] + + # Default shards to 1 + if (!defined(shards)) { + shards = 1 + } + executable_args += [ + "--shards", + "${shards}", + ] + + if (xcode_version_int >= 1400) { + executable_args += [ + "--readline-timeout", + "600", + ] + } + + data_deps = [ "//testing:test_scripts_shared" ] + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + + # test runner relies on iossim for simulator builds. + if (target_environment == "simulator") { + _rebased_root_build_dir = rebase_path("${root_build_dir}", root_build_dir) + data_deps += [ "//testing/iossim" ] + + executable_args += [ + "--iossim", + "@WrappedPath(${_rebased_root_build_dir}/iossim)", + ] + } + + if (use_clang_coverage) { + executable_args += [ "--use-clang-coverage" ] + } + + if (!is_debug) { + executable_args += [ "--release" ] + } + + # wrapper script output name and path + if (!defined(wrapper_output_name)) { + _wrapper_output_name = "run_${target_name}" + } else { + _wrapper_output_name = wrapper_output_name + } + + wrapper_script = "${root_build_dir}/bin/${_wrapper_output_name}" + + data = [] + if (defined(invoker.data)) { + data += invoker.data + } + data += [ + "//ios/build/bots/scripts/", + "//ios/build/bots/scripts/plugin", + + # gRPC interface for iOS test plugin + "//ios/testing/plugin", + + # Variations test utilities used by variations_runner script. + "//testing/scripts/variations_seed_access_helper.py", + "//testing/test_env.py", + ] + } +} diff --git a/config/ios/ios_test_runner_xcuitest.gni b/config/ios/ios_test_runner_xcuitest.gni new file mode 100644 index 000000000000..6aeb08b1fb34 --- /dev/null +++ b/config/ios/ios_test_runner_xcuitest.gni @@ -0,0 +1,72 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/ios_test_runner_wrapper.gni") +import("//build/config/ios/rules.gni") + +# ios_test_runner_xcuitest are just ios_xcuitest_test with an +# ios_test_runner_wrapper. Currently used by Crashpad tests, which do not depend +# on EG2 (and therefore do not use ios_eg2_test) +template("ios_test_runner_xcuitest") { + assert(defined(invoker.xcode_test_application_name), + "xcode_test_application_name must be defined for $target_name") + assert( + defined(invoker.deps), + "deps must be defined for $target_name to include at least one xctest" + + "file.") + + _target_name = target_name + _test_target = "${target_name}_test" + ios_xcuitest_test(_test_target) { + forward_variables_from(invoker, + [ + "xcode_test_application_name", + "xctest_bundle_principal_class", + "bundle_deps", + "deps", + "data_deps", + ]) + + # TODO(crbug.com/1056328) Because we change the target name, the subnodes + # are going to append with the _test in the naming, which won't be backwards + # compatible during migration from iOS recipe to Chromium. + output_name = "${_target_name}" + } + + ios_test_runner_wrapper(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "deps", + "executable_args", + "retries", + "shards", + "xcode_test_application_name", + ]) + _root_build_dir = rebase_path("${root_build_dir}", root_build_dir) + + if (!defined(data_deps)) { + data_deps = [] + } + + # Include the top ios_test_runner_xcuitest target, and the host app + data_deps += [ ":${_test_target}" ] + + if (!defined(executable_args)) { + executable_args = [] + } + + # The xcuitest module is bundled as *-Runner.app, while the host app is + # bundled as *.app. + executable_args += [ + "--app", + "@WrappedPath(${_root_build_dir}/${target_name}-Runner.app)", + ] + executable_args += [ + "--host-app", + "@WrappedPath(${_root_build_dir}/${xcode_test_application_name}.app)", + ] + } +} diff --git a/config/ios/resources/XCTRunnerAddition+Info.plist b/config/ios/resources/XCTRunnerAddition+Info.plist new file mode 100644 index 000000000000..ed26f55d1636 --- /dev/null +++ b/config/ios/resources/XCTRunnerAddition+Info.plist @@ -0,0 +1,12 @@ + + + + + CFBundleIdentifier + ${BUNDLE_IDENTIFIER} + CFBundleName + ${PRODUCT_NAME} + CFBundleExecutable + ${EXECUTABLE_NAME} + + diff --git a/config/ios/rules.gni b/config/ios/rules.gni new file mode 100644 index 000000000000..c6d40925c4c9 --- /dev/null +++ b/config/ios/rules.gni @@ -0,0 +1,1870 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/apple/apple_info_plist.gni") +import("//build/config/apple/symbols.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/ios/ios_sdk.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +# Constants corresponding to the bundle type identifiers use application, +# application extension, XCTest and XCUITest targets respectively. +_ios_xcode_app_bundle_id = "com.apple.product-type.application" +_ios_xcode_appex_bundle_id = "com.apple.product-type.app-extension" +_ios_xcode_xctest_bundle_id = "com.apple.product-type.bundle.unit-test" +_ios_xcode_xcuitest_bundle_id = "com.apple.product-type.bundle.ui-testing" + +# Wrapper around create_bundle taking care of code signature settings. +# +# Arguments +# +# product_type +# string, product type for the generated Xcode project. +# +# bundle_gen_dir +# (optional) directory where the bundle is generated; must be below +# root_out_dir and defaults to root_out_dir if omitted. +# +# bundle_deps +# (optional) list of additional dependencies. +# +# bundle_deps_filter +# (optional) list of dependencies to filter (for more information +# see "gn help bundle_deps_filter"). +# +# bundle_extension +# string, extension of the bundle, used to generate bundle name. +# +# bundle_binary_target +# (optional) string, label of the target generating the bundle main +# binary. This target and bundle_binary_path are mutually exclusive. +# +# bundle_binary_output +# (optional) string, base name of the binary generated by the +# bundle_binary_target target, defaults to the target name. +# +# bundle_binary_path +# (optional) string, path to the bundle main binary. This target and +# bundle_binary_target are mutually exclusive. +# +# output_name: +# (optional) string, name of the generated application, if omitted, +# defaults to the target_name. +# +# extra_system_frameworks +# (optional) list of system framework to copy to the bundle. +# +# enable_code_signing +# (optional) boolean, control whether code signing is enabled or not, +# default to ios_enable_code_signing if not defined. +# +# entitlements_path: +# (optional) path to the template to use to generate the application +# entitlements by performing variable substitutions, defaults to +# //build/config/ios/entitlements.plist. +# +# entitlements_target: +# (optional) label of the target generating the application +# entitlements (must generate a single file as output); cannot be +# defined if entitlements_path is set. +# +# has_public_headers: +# (optional) boolean, defaults to false; only meaningful if the bundle +# is a framework bundle; if true, then the frameworks includes public +# headers +# +# disable_entitlements +# (optional, defaults to false) boolean, control whether entitlements willi +# be embedded in the application during signature. If false and no +# entitlements are provided, default empty entitlements will be used. +# +# disable_embedded_mobileprovision +# (optional, default to false) boolean, control whether mobile provisions +# will be embedded in the bundle. If true, the existing +# embedded.mobileprovision will be deleted. +# +# xcode_extra_attributes +# (optional) scope, extra attributes for Xcode projects. +# +# xcode_test_application_name: +# (optional) string, name of the test application for Xcode unit or ui +# test target. +# +# xcode_product_bundle_id: +# (optional) string, the bundle ID that will be added in the XCode +# attributes to enable some features when debugging (e.g. MetricKit). +# +# primary_info_plist: +# (optional) path to Info.plist to merge with the $partial_info_plist +# generated by the compilation of the asset catalog. +# +# partial_info_plist: +# (optional) path to the partial Info.plist generated by the asset +# catalog compiler; if defined $primary_info_plist must also be defined. +# +template("create_signed_bundle") { + assert(defined(invoker.product_type), + "product_type must be defined for $target_name") + assert(defined(invoker.bundle_extension), + "bundle_extension must be defined for $target_name") + assert(defined(invoker.bundle_binary_target) != + defined(invoker.bundle_binary_path), + "Only one of bundle_binary_target or bundle_binary_path may be " + + "specified for $target_name") + assert(!defined(invoker.partial_info_plist) || + defined(invoker.primary_info_plist), + "primary_info_plist must be defined when partial_info_plist is " + + "defined for $target_name") + + if (defined(invoker.xcode_test_application_name)) { + assert( + invoker.product_type == _ios_xcode_xctest_bundle_id || + invoker.product_type == _ios_xcode_xcuitest_bundle_id, + "xcode_test_application_name can be only defined for Xcode unit or ui test target.") + } + + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + if (defined(invoker.bundle_binary_path)) { + _bundle_binary_path = invoker.bundle_binary_path + } else { + _bundle_binary_target = invoker.bundle_binary_target + _bundle_binary_output = get_label_info(_bundle_binary_target, "name") + if (defined(invoker.bundle_binary_output)) { + _bundle_binary_output = invoker.bundle_binary_output + } + _bundle_binary_path = + get_label_info(_bundle_binary_target, "target_out_dir") + + "/$_bundle_binary_output" + } + + _bundle_gen_dir = root_out_dir + if (defined(invoker.bundle_gen_dir)) { + _bundle_gen_dir = invoker.bundle_gen_dir + } + + _bundle_extension = invoker.bundle_extension + + _enable_embedded_mobileprovision = true + if (defined(invoker.disable_embedded_mobileprovision)) { + _enable_embedded_mobileprovision = !invoker.disable_embedded_mobileprovision + } + + if (target_environment == "catalyst") { + _enable_embedded_mobileprovision = false + } + + _enable_entitlements = true + if (defined(invoker.disable_entitlements)) { + _enable_entitlements = !invoker.disable_entitlements + } + + if (_enable_entitlements) { + if (!defined(invoker.entitlements_target)) { + _entitlements_path = "//build/config/ios/entitlements.plist" + if (defined(invoker.entitlements_path)) { + _entitlements_path = invoker.entitlements_path + } + } else { + assert(!defined(invoker.entitlements_path), + "Cannot define both entitlements_path and entitlements_target " + + "for $target_name") + + _entitlements_target_outputs = + get_target_outputs(invoker.entitlements_target) + _entitlements_path = _entitlements_target_outputs[0] + } + } + + _enable_code_signing = ios_enable_code_signing + if (defined(invoker.enable_code_signing)) { + _enable_code_signing = invoker.enable_code_signing + } + + create_bundle(_target_name) { + forward_variables_from(invoker, + [ + "bundle_deps_filter", + "data_deps", + "deps", + "partial_info_plist", + "product_type", + "public_configs", + "public_deps", + "testonly", + "visibility", + "xcode_test_application_name", + ]) + + bundle_root_dir = "$_bundle_gen_dir/$_output_name$_bundle_extension" + if (target_environment == "simulator" || target_environment == "device") { + bundle_contents_dir = bundle_root_dir + bundle_resources_dir = bundle_contents_dir + bundle_executable_dir = bundle_contents_dir + } else if (target_environment == "catalyst") { + if (_bundle_extension != ".framework") { + bundle_contents_dir = "$bundle_root_dir/Contents" + bundle_resources_dir = "$bundle_contents_dir/Resources" + bundle_executable_dir = "$bundle_contents_dir/MacOS" + } else { + bundle_contents_dir = "$bundle_root_dir/Versions/A" + bundle_resources_dir = "$bundle_contents_dir/Resources" + bundle_executable_dir = bundle_contents_dir + } + } + + if (!defined(public_deps)) { + public_deps = [] + } + + _bundle_identifier = "" + if (defined(invoker.xcode_product_bundle_id)) { + _bundle_identifier = invoker.xcode_product_bundle_id + assert(_bundle_identifier == string_replace(_bundle_identifier, "_", "-"), + "$target_name: bundle_identifier does not respect rfc1034: " + + _bundle_identifier) + } + + xcode_extra_attributes = { + IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target + PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier + CODE_SIGNING_REQUIRED = "NO" + CODE_SIGNING_ALLOWED = "NO" + CODE_SIGN_IDENTITY = "" + DONT_GENERATE_INFOPLIST_FILE = "YES" + + # If invoker has defined extra attributes, they override the defaults. + if (defined(invoker.xcode_extra_attributes)) { + forward_variables_from(invoker.xcode_extra_attributes, "*") + } + } + + if (defined(invoker.bundle_binary_target)) { + public_deps += [ invoker.bundle_binary_target ] + } + + if (defined(invoker.bundle_deps)) { + if (!defined(deps)) { + deps = [] + } + deps += invoker.bundle_deps + } + if (!defined(deps)) { + deps = [] + } + + code_signing_script = "//build/config/ios/codesign.py" + code_signing_sources = [ _bundle_binary_path ] + if (_enable_entitlements) { + if (defined(invoker.entitlements_target)) { + deps += [ invoker.entitlements_target ] + } + code_signing_sources += [ _entitlements_path ] + } + code_signing_outputs = [ "$bundle_executable_dir/$_output_name" ] + if (_enable_code_signing) { + code_signing_outputs += + [ "$bundle_contents_dir/_CodeSignature/CodeResources" ] + } + if (ios_code_signing_identity != "" && target_environment == "device" && + _enable_embedded_mobileprovision) { + code_signing_outputs += + [ "$bundle_contents_dir/embedded.mobileprovision" ] + } + if (_bundle_extension == ".framework") { + if (target_environment == "catalyst") { + code_signing_outputs += [ + "$bundle_root_dir/Versions/Current", + "$bundle_root_dir/$_output_name", + ] + + if (defined(invoker.has_public_headers) && invoker.has_public_headers) { + code_signing_outputs += [ + "$bundle_root_dir/Headers", + "$bundle_root_dir/Modules", + ] + } + } else { + not_needed(invoker, [ "has_public_headers" ]) + } + } + + if (defined(invoker.extra_system_frameworks)) { + foreach(_framework, invoker.extra_system_frameworks) { + code_signing_outputs += [ "$bundle_contents_dir/Frameworks/" + + get_path_info(_framework, "file") ] + } + } + + code_signing_args = [ + "code-sign-bundle", + "-t=" + ios_sdk_name, + "-i=" + ios_code_signing_identity, + "-b=" + rebase_path(_bundle_binary_path, root_build_dir), + ] + foreach(mobileprovision, ios_mobileprovision_files) { + code_signing_args += + [ "-m=" + rebase_path(mobileprovision, root_build_dir) ] + } + code_signing_sources += ios_mobileprovision_files + if (_enable_entitlements) { + code_signing_args += + [ "-e=" + rebase_path(_entitlements_path, root_build_dir) ] + } + if (!_enable_embedded_mobileprovision) { + code_signing_args += [ "--disable-embedded-mobileprovision" ] + } + code_signing_args += [ rebase_path(bundle_root_dir, root_build_dir) ] + if (!_enable_code_signing) { + code_signing_args += [ "--disable-code-signature" ] + } + if (defined(invoker.extra_system_frameworks)) { + # All framework in extra_system_frameworks are expected to be system + # framework and the path to be already system absolute so do not use + # rebase_path here unless using Goma RBE and system Xcode (as in that + # case the system framework are found via a symlink in root_build_dir). + foreach(_framework, invoker.extra_system_frameworks) { + if (use_system_xcode && (use_goma || use_remoteexec)) { + _framework_path = rebase_path(_framework, root_build_dir) + } else { + _framework_path = _framework + } + code_signing_args += [ "-F=$_framework_path" ] + } + } + if (defined(invoker.partial_info_plist)) { + _partial_info_plists = [ + invoker.primary_info_plist, + invoker.partial_info_plist, + ] + + _plist_compiler_path = "//build/apple/plist_util.py" + + code_signing_sources += _partial_info_plists + code_signing_sources += [ _plist_compiler_path ] + if (target_environment != "catalyst" || + _bundle_extension != ".framework") { + code_signing_outputs += [ "$bundle_contents_dir/Info.plist" ] + } else { + code_signing_outputs += [ "$bundle_resources_dir/Info.plist" ] + } + + code_signing_args += + [ "-P=" + rebase_path(_plist_compiler_path, root_build_dir) ] + foreach(_partial_info_plist, _partial_info_plists) { + code_signing_args += + [ "-p=" + rebase_path(_partial_info_plist, root_build_dir) ] + } + } + } +} + +# Generates Info.plist files for Mac apps and frameworks. +# +# Arguments +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# executable_name: +# string, name of the generated target used for the product +# and executable name as specified in the output Info.plist. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +template("ios_info_plist") { + assert(defined(invoker.info_plist) != defined(invoker.info_plist_target), + "Only one of info_plist or info_plist_target may be specified in " + + target_name) + + if (defined(invoker.info_plist)) { + _info_plist = invoker.info_plist + } else { + _info_plist_target_output = get_target_outputs(invoker.info_plist_target) + _info_plist = _info_plist_target_output[0] + } + + apple_info_plist(target_name) { + format = "binary1" + extra_substitutions = [ + "IOS_BUNDLE_ID_PREFIX=$ios_app_bundle_id_prefix", + "IOS_PLATFORM_BUILD=$ios_platform_build", + "IOS_PLATFORM_NAME=$ios_sdk_name", + "IOS_PLATFORM_VERSION=$ios_sdk_version", + "IOS_SDK_BUILD=$ios_sdk_build", + "IOS_SDK_NAME=$ios_sdk_name$ios_sdk_version", + "IOS_SUPPORTED_PLATFORM=$ios_sdk_platform", + "BUILD_MACHINE_OS_BUILD=$machine_os_build", + "IOS_DEPLOYMENT_TARGET=$ios_deployment_target", + "XCODE_BUILD=$xcode_build", + "XCODE_VERSION=$xcode_version", + ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions + } + plist_templates = [ + "//build/config/ios/BuildInfo.plist", + _info_plist, + ] + if (defined(invoker.info_plist_target)) { + deps = [ invoker.info_plist_target ] + } + forward_variables_from(invoker, + [ + "executable_name", + "output_name", + "visibility", + "testonly", + ]) + } +} + +# Template to build an application bundle for iOS. +# +# This should be used instead of "executable" built-in target type on iOS. +# As the template forward the generation of the application executable to +# an "executable" target, all arguments supported by "executable" targets +# are also supported by this template. +# +# Arguments +# +# output_name: +# (optional) string, name of the generated application, if omitted, +# defaults to the target_name. +# +# extra_substitutions: +# (optional) list of string in "key=value" format, each value will +# be used as an additional variable substitution rule when generating +# the application Info.plist +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# entitlements_path: +# (optional) path to the template to use to generate the application +# entitlements by performing variable substitutions, defaults to +# //build/config/ios/entitlements.plist. +# +# entitlements_target: +# (optional) label of the target generating the application +# entitlements (must generate a single file as output); cannot be +# defined if entitlements_path is set. +# +# product_type +# (optional) string, product type for the generated Xcode project, +# default to "com.apple.product-type.application". Should only be +# overriden when building application extension. +# +# enable_code_signing +# (optional) boolean, control whether code signing is enabled or not, +# default to ios_enable_code_signing if not defined. +# +# variants +# (optional) list of scopes, each scope needs to define the attributes +# "name" and "bundle_deps"; if defined and non-empty, then one bundle +# named $target_out_dir/$variant/$output_name will be created for each +# variant with the same binary but the correct bundle_deps, the bundle +# at $target_out_dir/$output_name will be a copy of the first variant. +# +# bundle_identifier: +# (optional) string, value of CFBundleIdentifier in the application +# Info.plist, defaults to "$ios_app_bundle_id_prefix.$output_name" +# if omitted. Will be used to set BUNDLE_IDENTIFIER when generating +# the application Info.plist +# +# For more information, see "gn help executable". +template("ios_app_bundle") { + _output_name = target_name + _target_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + assert( + !defined(invoker.bundle_extension), + "bundle_extension must not be set for ios_app_bundle template for $target_name") + + if (defined(invoker.bundle_identifier)) { + _bundle_identifier = invoker.bundle_identifier + assert(_bundle_identifier == string_replace(_bundle_identifier, "_", "-"), + "$target_name: bundle_identifier does not respect rfc1034: " + + _bundle_identifier) + } else { + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = + "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-") + } + + if (defined(invoker.variants) && invoker.variants != []) { + _variants = [] + + foreach(_variant, invoker.variants) { + assert(defined(_variant.name) && _variant.name != "", + "name must be defined for all $target_name variants") + + assert(defined(_variant.bundle_deps), + "bundle_deps must be defined for all $target_name variants") + + _variants += [ + { + name = _variant.name + bundle_deps = _variant.bundle_deps + target_name = "${_target_name}_variants_${_variant.name}" + bundle_gen_dir = "$root_out_dir/variants/${_variant.name}" + }, + ] + } + } else { + # If no variants are passed to the template, use a fake variant with + # no name to avoid duplicating code. As no variant can have an empty + # name except this fake variant, it is possible to know if a variant + # is fake or not. + _variants = [ + { + name = "" + bundle_deps = [] + target_name = _target_name + bundle_gen_dir = root_out_dir + }, + ] + } + + _default_variant = _variants[0] + + _executable_target = _target_name + "_executable" + _generate_entitlements_target = _target_name + "_gen_entitlements" + _generate_entitlements_output = + get_label_info(":$_generate_entitlements_target", "target_out_dir") + + "/$_output_name.xcent" + + _product_type = _ios_xcode_app_bundle_id + if (defined(invoker.product_type)) { + _product_type = invoker.product_type + } + + if (_product_type == _ios_xcode_app_bundle_id) { + _bundle_extension = ".app" + } else if (_product_type == _ios_xcode_appex_bundle_id) { + _bundle_extension = ".appex" + } else { + assert(false, "unknown product_type \"$product_type\" for $_target_name") + } + + _is_app_bundle = _product_type == _ios_xcode_app_bundle_id + + executable(_executable_target) { + forward_variables_from(invoker, + "*", + [ + "bundle_deps", + "bundle_deps_filter", + "bundle_extension", + "enable_code_signing", + "entitlements_path", + "entitlements_target", + "extra_substitutions", + "extra_system_frameworks", + "info_plist", + "info_plist_target", + "output_name", + "product_type", + "visibility", + "xcode_extra_attributes", + ]) + + visibility = [] + foreach(_variant, _variants) { + visibility += [ ":${_variant.target_name}" ] + } + + if (!defined(frameworks)) { + frameworks = [] + } + frameworks += [ "UIKit.framework" ] + + if (target_environment == "simulator") { + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_generate_entitlements_target" ] + + if (!defined(inputs)) { + inputs = [] + } + inputs += [ _generate_entitlements_output ] + + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += [ "-Wl,-sectcreate,__TEXT,__entitlements," + + rebase_path(_generate_entitlements_output, root_build_dir) ] + } + + output_name = _output_name + output_prefix_override = true + output_dir = target_out_dir + } + + _generate_info_plist = target_name + "_generate_info_plist" + ios_info_plist(_generate_info_plist) { + forward_variables_from(invoker, + [ + "info_plist", + "info_plist_target", + ]) + + executable_name = _output_name + + extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions + } + } + + if (!defined(invoker.entitlements_target)) { + _entitlements_path = "//build/config/ios/entitlements.plist" + if (defined(invoker.entitlements_path)) { + _entitlements_path = invoker.entitlements_path + } + } else { + assert(!defined(invoker.entitlements_path), + "Cannot define both entitlements_path and entitlements_target" + + "for $_target_name") + + _entitlements_target_outputs = + get_target_outputs(invoker.entitlements_target) + _entitlements_path = _entitlements_target_outputs[0] + } + + action(_generate_entitlements_target) { + _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist") + _info_plist_path = _gen_info_plist_outputs[0] + + script = "//build/config/ios/codesign.py" + deps = [ ":$_generate_info_plist" ] + if (defined(invoker.entitlements_target)) { + deps += [ invoker.entitlements_target ] + } + sources = [ + _entitlements_path, + _info_plist_path, + ] + sources += ios_mobileprovision_files + + outputs = [ _generate_entitlements_output ] + + args = [ + "generate-entitlements", + "-e=" + rebase_path(_entitlements_path, root_build_dir), + "-p=" + rebase_path(_info_plist_path, root_build_dir), + ] + foreach(mobileprovision, ios_mobileprovision_files) { + args += [ "-m=" + rebase_path(mobileprovision, root_build_dir) ] + } + args += rebase_path(outputs, root_build_dir) + } + + # Only write PkgInfo for real application, not application extension. + if (_is_app_bundle) { + _create_pkg_info = target_name + "_pkg_info" + action(_create_pkg_info) { + forward_variables_from(invoker, [ "testonly" ]) + script = "//build/apple/write_pkg_info.py" + inputs = [ "//build/apple/plist_util.py" ] + sources = get_target_outputs(":$_generate_info_plist") + outputs = [ + # Cannot name the output PkgInfo as the name will not be unique if + # multiple ios_app_bundle are defined in the same BUILD.gn file. The + # file is renamed in the bundle_data outputs to the correct name. + "$target_gen_dir/$target_name", + ] + args = [ "--plist" ] + rebase_path(sources, root_build_dir) + + [ "--output" ] + rebase_path(outputs, root_build_dir) + deps = [ ":$_generate_info_plist" ] + } + + _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info" + bundle_data(_bundle_data_pkg_info) { + forward_variables_from(invoker, [ "testonly" ]) + sources = get_target_outputs(":$_create_pkg_info") + outputs = [ "{{bundle_resources_dir}}/PkgInfo" ] + public_deps = [ ":$_create_pkg_info" ] + } + } + + foreach(_variant, _variants) { + create_signed_bundle(_variant.target_name) { + forward_variables_from(invoker, + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "deps", + "enable_code_signing", + "entitlements_path", + "entitlements_target", + "extra_system_frameworks", + "public_configs", + "public_deps", + "testonly", + "visibility", + "xcode_extra_attributes", + ]) + + output_name = _output_name + bundle_gen_dir = _variant.bundle_gen_dir + bundle_binary_target = ":$_executable_target" + bundle_binary_output = _output_name + bundle_extension = _bundle_extension + product_type = _product_type + xcode_product_bundle_id = _bundle_identifier + + _generate_info_plist_outputs = + get_target_outputs(":$_generate_info_plist") + primary_info_plist = _generate_info_plist_outputs[0] + partial_info_plist = + "$target_gen_dir/${_variant.target_name}_partial_info.plist" + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_generate_info_plist" ] + + if (!defined(bundle_deps)) { + bundle_deps = [] + } + if (_is_app_bundle) { + bundle_deps += [ ":$_bundle_data_pkg_info" ] + } + bundle_deps += _variant.bundle_deps + + if (target_environment == "simulator") { + if (!defined(data_deps)) { + data_deps = [] + } + if (build_with_chromium) { + data_deps += [ "//testing/iossim" ] + } + } + } + } + + if (_default_variant.name != "") { + _bundle_short_name = "$_output_name$_bundle_extension" + action(_target_name) { + forward_variables_from(invoker, [ "testonly" ]) + + script = "//build/config/ios/hardlink.py" + public_deps = [] + foreach(_variant, _variants) { + public_deps += [ ":${_variant.target_name}" ] + } + + sources = [ "${_default_variant.bundle_gen_dir}/$_bundle_short_name" ] + outputs = [ "$root_out_dir/$_bundle_short_name" ] + + args = rebase_path(sources, root_build_dir) + + rebase_path(outputs, root_build_dir) + } + } +} + +set_defaults("ios_app_bundle") { + configs = default_executable_configs +} + +# Template to build an application extension bundle for iOS. +# +# This should be used instead of "executable" built-in target type on iOS. +# As the template forward the generation of the application executable to +# an "executable" target, all arguments supported by "executable" targets +# are also supported by this template. +# +# Arguments +# +# output_name: +# (optional) string, name of the generated application, if omitted, +# defaults to the target_name. +# +# extra_substitutions: +# (optional) list of string in "key=value" format, each value will +# be used as an additional variable substitution rule when generating +# the application Info.plist +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# For more information, see "gn help executable". +template("ios_appex_bundle") { + ios_app_bundle(target_name) { + forward_variables_from(invoker, + "*", + [ + "bundle_extension", + "product_type", + ]) + product_type = _ios_xcode_appex_bundle_id + } +} + +set_defaults("ios_appex_bundle") { + configs = [ "//build/config/ios:ios_extension_executable_flags" ] +} + +# Template to compile .xib and .storyboard files. +# +# Arguments +# +# sources: +# list of string, sources to compile +# +# ibtool_flags: +# (optional) list of string, additional flags to pass to the ibtool +template("compile_ib_files") { + action_foreach(target_name) { + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + assert(defined(invoker.sources), + "sources must be specified for $target_name") + assert(defined(invoker.output_extension), + "output_extension must be specified for $target_name") + + ibtool_flags = [] + if (defined(invoker.ibtool_flags)) { + ibtool_flags = invoker.ibtool_flags + } + + _output_extension = invoker.output_extension + + script = "//build/config/ios/compile_ib_files.py" + sources = invoker.sources + outputs = [ + "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension", + ] + args = [ + "--input", + "{{source}}", + "--output", + rebase_path( + "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension", + root_build_dir), + ] + args += ibtool_flags + } +} + +# Compile a xib or storyboard file and add it to a bundle_data so that it is +# available at runtime in the bundle. +# +# Arguments +# +# source: +# string, path of the xib or storyboard to compile. +# +# Forwards all variables to the bundle_data target. +template("bundle_data_ib_file") { + assert(defined(invoker.source), "source needs to be defined for $target_name") + + _source_extension = get_path_info(invoker.source, "extension") + assert(_source_extension == "xib" || _source_extension == "storyboard", + "source must be a .xib or .storyboard for $target_name") + + _target_name = target_name + if (_source_extension == "xib") { + _compile_ib_file = target_name + "_compile_xib" + _output_extension = "nib" + } else { + _compile_ib_file = target_name + "_compile_storyboard" + _output_extension = "storyboardc" + } + + compile_ib_files(_compile_ib_file) { + sources = [ invoker.source ] + output_extension = _output_extension + visibility = [ ":$_target_name" ] + ibtool_flags = [ + "--minimum-deployment-target", + ios_deployment_target, + "--auto-activate-custom-fonts", + "--target-device", + "iphone", + "--target-device", + "ipad", + ] + } + + bundle_data(_target_name) { + forward_variables_from(invoker, "*", [ "source" ]) + + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$_compile_ib_file" ] + + sources = get_target_outputs(":$_compile_ib_file") + + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } +} + +# Compile a strings file and add it to a bundle_data so that it is available +# at runtime in the bundle. +# +# Arguments +# +# source: +# string, path of the strings file to compile. +# +# output: +# string, path of the compiled file in the final bundle. +# +# Forwards all variables to the bundle_data target. +template("bundle_data_strings") { + assert(defined(invoker.source), "source needs to be defined for $target_name") + assert(defined(invoker.output), "output needs to be defined for $target_name") + + _source_extension = get_path_info(invoker.source, "extension") + assert(_source_extension == "strings", + "source must be a .strings for $target_name") + + _target_name = target_name + _convert_target = target_name + "_compile_strings" + + convert_plist(_convert_target) { + visibility = [ ":$_target_name" ] + source = invoker.source + output = + "$target_gen_dir/$_target_name/" + get_path_info(invoker.source, "file") + format = "binary1" + } + + bundle_data(_target_name) { + forward_variables_from(invoker, + "*", + [ + "source", + "output", + ]) + + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$_convert_target" ] + + sources = get_target_outputs(":$_convert_target") + + outputs = [ invoker.output ] + } +} + +# Template to package a shared library into an iOS framework bundle. +# +# By default, the bundle target this template generates does not link the +# resulting framework into anything that depends on it. If a dependency wants +# a link-time (as well as build-time) dependency on the framework bundle, +# depend against "$target_name+link". If only the build-time dependency is +# required (e.g., for copying into another bundle), then use "$target_name". +# +# Arguments +# +# output_name: +# (optional) string, name of the generated framework without the +# .framework suffix. If omitted, defaults to target_name. +# +# public_headers: +# (optional) list of paths to header file that needs to be copied +# into the framework bundle Headers subdirectory. If omitted or +# empty then the Headers subdirectory is not created. +# +# sources +# (optional) list of files. Needs to be defined and non-empty if +# public_headers is defined and non-empty. +# +# enable_code_signing +# (optional) boolean, control whether code signing is enabled or not, +# default to ios_enable_code_signing if not defined. +# +# This template provides two targets for the resulting framework bundle. The +# link-time behavior varies depending on which of the two targets below is +# added as a dependency: +# - $target_name only adds a build-time dependency. Targets that depend on +# it will not link against the framework. +# - $target_name+link adds a build-time and link-time dependency. Targets +# that depend on it will link against the framework. +# +# The build-time-only dependency is used for when a target needs to use the +# framework either only for resources, or because the target loads it at run- +# time, via dlopen() or NSBundle. The link-time dependency will cause the +# dependee to have the framework loaded by dyld at launch. +# +# Example of build-time only dependency: +# +# framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# The GoatTeleporter.app will not directly link against +# CoreTeleportation.framework, but it will be included in the bundle's +# Frameworks directory. +# +# Example of link-time dependency: +# +# framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# ldflags = [ +# "-install_name", +# "@executable_path/../Frameworks/$target_name.framework" +# ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation+link" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# Note that the framework is still copied to the app's bundle, but dyld will +# load this library when the app is launched because it uses the "+link" +# target as a dependency. This also requires that the framework set its +# install_name so that dyld can locate it. +# +# See "gn help shared_library" for more information on arguments supported +# by shared library target. +template("ios_framework_bundle") { + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _has_public_headers = + defined(invoker.public_headers) && invoker.public_headers != [] + + _shared_library_target = _target_name + "_shared_library" + _link_target_name = _target_name + "+link" + + if (_has_public_headers) { + _default_toolchain_target_gen_dir = + get_label_info("$_target_name", "target_gen_dir") + + _framework_headers_target = _target_name + "_framework_headers" + + _headers_map_config = _target_name + "_headers_map" + _header_map_filename = + "$_default_toolchain_target_gen_dir/$_output_name.headers.hmap" + config(_headers_map_config) { + visibility = [ + ":${_shared_library_target}", + ":${_target_name}_signed_bundle", + ] + include_dirs = [ _header_map_filename ] + } + } + + _framework_headers_config = _target_name + "_framework_headers_config" + config(_framework_headers_config) { + framework_dirs = [ root_out_dir ] + } + + _framework_public_config = _target_name + "_public_config" + config(_framework_public_config) { + configs = [ ":$_framework_headers_config" ] + frameworks = [ "$_output_name.framework" ] + } + + shared_library(_shared_library_target) { + forward_variables_from(invoker, + "*", + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "enable_code_signing", + "extra_substitutions", + "info_plist", + "info_plist_target", + "output_name", + "public_configs", + "visibility", + ]) + + visibility = [ ":${_target_name}_signed_bundle" ] + + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += + [ "-Wl,-install_name,@rpath/$_output_name.framework/$_output_name" ] + + if (_has_public_headers) { + configs += [ ":$_headers_map_config" ] + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_framework_headers_target" ] + } + + output_extension = "" + output_name = _output_name + output_prefix_override = true + output_dir = target_out_dir + } + + if (_has_public_headers) { + _public_headers = invoker.public_headers + + _framework_root_dir = "$root_out_dir/$_output_name.framework" + if (target_environment == "simulator" || target_environment == "device") { + _framework_contents_dir = _framework_root_dir + } else if (target_environment == "catalyst") { + _framework_contents_dir = "$_framework_root_dir/Versions/A" + } + + _compile_headers_map_target = _target_name + "_compile_headers_map" + action(_compile_headers_map_target) { + visibility = [ ":$_framework_headers_target" ] + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "testonly", + ]) + script = "//build/config/ios/write_framework_hmap.py" + outputs = [ _header_map_filename ] + + # The header map generation only wants the list of headers, not all of + # sources, so filter any non-header source files from "sources". It is + # less error prone that having the developer duplicate the list of all + # headers in addition to "sources". + sources = [] + foreach(_source, invoker.sources) { + if (get_path_info(_source, "extension") == "h") { + sources += [ _source ] + } + } + + args = [ + rebase_path(_header_map_filename, root_build_dir), + rebase_path(_framework_root_dir, root_build_dir), + ] + rebase_path(sources, root_build_dir) + } + + _create_module_map_target = _target_name + "_module_map" + action(_create_module_map_target) { + visibility = [ ":$_framework_headers_target" ] + script = "//build/config/ios/write_framework_modulemap.py" + outputs = [ "$_framework_contents_dir/Modules/module.modulemap" ] + args = [ + _output_name, + rebase_path("$_framework_contents_dir/Modules", root_build_dir), + ] + } + + _copy_public_headers_target = _target_name + "_copy_public_headers" + copy(_copy_public_headers_target) { + forward_variables_from(invoker, + [ + "testonly", + "deps", + ]) + visibility = [ ":$_framework_headers_target" ] + sources = _public_headers + outputs = [ "$_framework_contents_dir/Headers/{{source_file_part}}" ] + + # Do not use forward_variables_from for "public_deps" as + # we do not want to forward those dependencies. + if (defined(invoker.public_deps)) { + if (!defined(deps)) { + deps = [] + } + deps += invoker.public_deps + } + } + + group(_framework_headers_target) { + forward_variables_from(invoker, [ "testonly" ]) + deps = [ + ":$_compile_headers_map_target", + ":$_create_module_map_target", + ] + public_deps = [ ":$_copy_public_headers_target" ] + } + } + + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = + "$ios_app_bundle_id_prefix." + string_replace(_output_name, "_", "-") + + _info_plist_target = _target_name + "_info_plist" + _info_plist_bundle = _target_name + "_info_plist_bundle" + ios_info_plist(_info_plist_target) { + visibility = [ ":$_info_plist_bundle" ] + executable_name = _output_name + forward_variables_from(invoker, + [ + "info_plist", + "info_plist_target", + ]) + + extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions + } + } + + bundle_data(_info_plist_bundle) { + visibility = [ ":${_target_name}_signed_bundle" ] + forward_variables_from(invoker, [ "testonly" ]) + sources = get_target_outputs(":$_info_plist_target") + public_deps = [ ":$_info_plist_target" ] + + if (target_environment != "catalyst") { + outputs = [ "{{bundle_contents_dir}}/Info.plist" ] + } else { + outputs = [ "{{bundle_resources_dir}}/Info.plist" ] + } + } + + create_signed_bundle(_target_name + "_signed_bundle") { + forward_variables_from(invoker, + [ + "bundle_deps", + "bundle_deps_filter", + "data_deps", + "deps", + "enable_code_signing", + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + + product_type = "com.apple.product-type.framework" + bundle_extension = ".framework" + + output_name = _output_name + bundle_binary_target = ":$_shared_library_target" + bundle_binary_output = _output_name + + has_public_headers = _has_public_headers + + # Framework do not have entitlements nor mobileprovision because they use + # the one from the bundle using them (.app or .appex) as they are just + # dynamic library with shared code. + disable_entitlements = true + disable_embedded_mobileprovision = true + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_info_plist_bundle" ] + } + + group(_target_name) { + forward_variables_from(invoker, + [ + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":${_target_name}_signed_bundle" ] + + if (_has_public_headers) { + if (!defined(public_configs)) { + public_configs = [] + } + public_configs += [ ":$_framework_headers_config" ] + } + } + + group(_link_target_name) { + forward_variables_from(invoker, + [ + "public_configs", + "public_deps", + "testonly", + "visibility", + ]) + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ ":$_target_name" ] + + if (!defined(all_dependent_configs)) { + all_dependent_configs = [] + } + all_dependent_configs += [ ":$_framework_public_config" ] + } + + bundle_data(_target_name + "+bundle") { + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + public_deps = [ ":$_target_name" ] + sources = [ "$root_out_dir/$_output_name.framework" ] + outputs = [ "{{bundle_contents_dir}}/Frameworks/$_output_name.framework" ] + } +} + +set_defaults("ios_framework_bundle") { + configs = default_shared_library_configs +} + +# Template to build a xctest bundle that contains a loadable module for iOS. +# +# Arguments +# +# deps: +# list of labels to depends on, these values are used to create the +# loadable module. +# +# product_type +# string, product type for the generated Xcode project, use +# "com.apple.product-type.bundle.unit-test" for unit test and +# "com.apple.product-type.bundle.ui-testing" for UI testing. +# +# host_target: +# string, name of the target that depends on the generated bundle, this +# value is used to restrict visibilities. +# +# xcode_test_application_name: +# string, name of the test application for Xcode unit or ui test target. +# +# output_name +# (optional) string, name of the generated application, if omitted, +# defaults to the target_name. +# +# This template defines two targets, one named "${target_name}" is the xctest +# bundle, and the other named "${target_name}_bundle" is a bundle_data that +# wraps the xctest bundle and that only the "${host_target}" can depend on. +# +template("ios_xctest_bundle") { + assert(defined(invoker.deps), "deps must be defined for $target_name") + assert(defined(invoker.product_type), + "product_type must be defined for $target_name") + assert(invoker.product_type == _ios_xcode_xctest_bundle_id || + invoker.product_type == _ios_xcode_xcuitest_bundle_id, + "product_type defined for $target_name is invalid.") + assert(defined(invoker.host_target), + "host_target must be defined for $target_name") + assert(defined(invoker.xcode_test_application_name), + "xcode_test_application_name must be defined for $target_name") + + _target_name = target_name + _output_name = target_name + + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _loadable_module_target = _target_name + "_loadable_module" + + loadable_module(_loadable_module_target) { + forward_variables_from(invoker, + "*", + [ + "host_target", + "output_dir", + "output_extension", + "output_name", + "output_prefix_override", + "product_type", + "testonly", + "visibility", + "xcode_test_application_name", + "xcode_test_application_output_name", + "xctest_bundle_principal_class", + "bundle_deps_filter", + ]) + + testonly = true + visibility = [ ":$_target_name" ] + + configs += [ "//build/config/ios:xctest_config" ] + + output_dir = target_out_dir + output_name = _output_name + output_prefix_override = true + output_extension = "" + } + + _info_plist_target = _target_name + "_info_plist" + _info_plist_bundle = _target_name + "_info_plist_bundle" + + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = "$ios_app_bundle_id_prefix.chrome." + + string_replace(_output_name, "_", "-") + + ios_info_plist(_info_plist_target) { + testonly = true + visibility = [ ":$_info_plist_bundle" ] + + info_plist = "//build/config/ios/Module-Info.plist" + executable_name = _output_name + + if (defined(invoker.xctest_bundle_principal_class)) { + _principal_class = invoker.xctest_bundle_principal_class + } else { + # Fall back to a reasonable default value. + _principal_class = "NSObject" + } + extra_substitutions = [ + "XCTEST_BUNDLE_PRINCIPAL_CLASS=${_principal_class}", + "BUNDLE_IDENTIFIER=$_bundle_identifier", + ] + } + + bundle_data(_info_plist_bundle) { + testonly = true + visibility = [ ":$_target_name" ] + + public_deps = [ ":$_info_plist_target" ] + + sources = get_target_outputs(":$_info_plist_target") + outputs = [ "{{bundle_contents_dir}}/Info.plist" ] + } + + _xctest_bundle = _target_name + "_bundle" + create_signed_bundle(_target_name) { + forward_variables_from(invoker, + [ + "bundle_id", + "data_deps", + "bundle_deps_filter", + "enable_code_signing", + "product_type", + "xcode_test_application_name", + ]) + + testonly = true + visibility = [ ":$_xctest_bundle" ] + + bundle_extension = ".xctest" + + output_name = _output_name + bundle_binary_target = ":$_loadable_module_target" + bundle_binary_output = _output_name + + xcode_extra_attributes = { + IPHONEOS_DEPLOYMENT_TARGET = ios_deployment_target + PRODUCT_BUNDLE_IDENTIFIER = _bundle_identifier + CODE_SIGNING_REQUIRED = "NO" + CODE_SIGNING_ALLOWED = "NO" + CODE_SIGN_IDENTITY = "" + DONT_GENERATE_INFOPLIST_FILE = "YES" + + # For XCUITest, Xcode requires specifying the host application name + # via the TEST_TARGET_NAME attribute. + if (invoker.product_type == _ios_xcode_xcuitest_bundle_id) { + TEST_TARGET_NAME = invoker.xcode_test_application_name + } + + # For XCTest, Xcode requires specifying the host application path via + # both BUNDLE_LOADER and TEST_HOST attributes. + if (invoker.product_type == _ios_xcode_xctest_bundle_id) { + _xcode_app_name = invoker.xcode_test_application_name + if (defined(invoker.xcode_test_application_output_name)) { + _xcode_app_name = invoker.xcode_test_application_output_name + } + + BUNDLE_LOADER = "\$(TEST_HOST)" + TEST_HOST = "\$(BUILT_PRODUCTS_DIR)/" + + "${_xcode_app_name}.app/${_xcode_app_name}" + } + } + + deps = [ ":$_info_plist_bundle" ] + } + + bundle_data(_xctest_bundle) { + forward_variables_from(invoker, [ "host_target" ]) + + testonly = true + visibility = [ ":$host_target" ] + + public_deps = [ ":$_target_name" ] + sources = [ "$root_out_dir/$_output_name.xctest" ] + outputs = [ "{{bundle_contents_dir}}/PlugIns/$_output_name.xctest" ] + } +} + +set_defaults("ios_xctest_bundle") { + configs = default_shared_library_configs +} + +# For Chrome on iOS we want to run XCTests for all our build configurations +# (Debug, Release, ...). In addition, the symbols visibility is configured to +# private by default. To simplify testing with those constraints, our tests are +# compiled in the TEST_HOST target instead of the .xctest bundle. +template("ios_xctest_test") { + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _xctest_target = _target_name + "_module" + _xctest_output = _output_name + "_module" + + _host_target = _target_name + _host_output = _output_name + + # Allow invokers to specify their own target for the xctest module, but + # fall back to a default (empty) module otherwise. + if (defined(invoker.xctest_module_target)) { + _xctest_module_target = invoker.xctest_module_target + } else { + _xctest_module_target_name = _xctest_target + "shell_source" + _xctest_module_target = ":$_xctest_module_target_name" + source_set(_xctest_module_target_name) { + sources = [ "//build/config/ios/xctest_shell.mm" ] + + configs += [ "//build/config/ios:xctest_config" ] + } + } + + ios_xctest_bundle(_xctest_target) { + forward_variables_from(invoker, [ "data_deps" ]) + output_name = _xctest_output + product_type = _ios_xcode_xctest_bundle_id + host_target = _host_target + + # TODO(crbug.com/1056328) The change in output name results in a mismatch + # between this value and the ios_app_bundle target name. To mitigate, this + # has been modified to _host_target. output_name is set to _host_output + # to mitigate the naming. + xcode_test_application_name = _host_target + xcode_test_application_output_name = _host_output + + deps = [ _xctest_module_target ] + } + + ios_app_bundle(_host_target) { + forward_variables_from(invoker, "*", [ "testonly" ]) + + testonly = true + output_name = _host_output + configs += [ "//build/config/ios:xctest_config" ] + + if (!defined(invoker.info_plist) && !defined(invoker.info_plist_target)) { + info_plist = "//build/config/ios/Host-Info.plist" + } + + # Xcode needs the following frameworks installed in the application (and + # signed) for the XCTest to run, so install them using + # extra_system_frameworks. + extra_system_frameworks = [ + "$ios_sdk_platform_path/Developer/Library/Frameworks/XCTest.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework", + "$ios_sdk_platform_path/Developer/usr/lib/libXCTestBundleInject.dylib", + ] + + # Xcode 13 now depends on XCTestCore. To keep things future proof, copy over + # everything that Xcode copies. + if (xcode_version_int >= 1300) { + extra_system_frameworks += [ + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestCore.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUIAutomation.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUnit.framework", + "$ios_sdk_platform_path/Developer/usr/lib/libXCTestSwiftSupport.dylib", + ] + } + + # XCTestSupport framework is required as of Xcode 14.3 or later. + if (xcode_version_int >= 1430) { + extra_system_frameworks += [ "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestSupport.framework" ] + } + + _xctest_bundle = _xctest_target + "_bundle" + if (!defined(bundle_deps)) { + bundle_deps = [] + } + bundle_deps += [ ":$_xctest_bundle" ] + } +} + +set_defaults("ios_xctest_test") { + configs = default_executable_configs +} + +# Template to build a xcuitest test runner bundle. +# +# Xcode requires a test runner application with a copy of the XCTest dynamic +# library bundle in it for the XCUITest to run. The test runner bundle is created +# by copying the system bundle XCTRunner.app from Xcode SDK with the plist file +# being properly tweaked, and a xctest and it needs to be code signed in order +# to run on devices. +# +# Arguments +# +# xctest_bundle +# string, name of the dependent xctest bundle target. +# +# output_name +# (optional) string, name of the generated application, if omitted, +# defaults to the target_name. +# +template("ios_xcuitest_test_runner_bundle") { + assert(defined(invoker.xctest_bundle), + "xctest_bundle must be defined for $target_name") + + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + # Bundle identifier should respect rfc1034, so replace "_" with "-". + _bundle_identifier = "$ios_app_bundle_id_prefix.chrome." + + string_replace(_output_name, "_", "-") + + _xctrunner_path = + "$ios_sdk_platform_path/Developer/Library/Xcode/Agents/XCTRunner.app" + + _info_plist_merge_plist = _target_name + "_info_plist_merge_plist" + _info_plist_target = _target_name + "_info_plist" + _info_plist_bundle = _target_name + "_info_plist_bundle" + + action(_info_plist_merge_plist) { + testonly = true + script = "//build/apple/plist_util.py" + + sources = [ + "$_xctrunner_path/Info.plist", + + # NOTE: The XCTRunnerAddition+Info.plist must come after the Info.plist + # because it overrides the values under "CFBundleIdentifier" and + # "CFBundleName". + "//build/config/ios/resources/XCTRunnerAddition+Info.plist", + ] + + _output_name = "$target_gen_dir/${_target_name}_merged.plist" + outputs = [ _output_name ] + args = [ + "merge", + "-f=xml1", + "-x=$xcode_version", + "-o=" + rebase_path(_output_name, root_build_dir), + ] + rebase_path(sources, root_build_dir) + + if (use_system_xcode && (use_goma || use_remoteexec)) { + deps = [ "//build/config/ios:copy_xctrunner_app" ] + } + } + + ios_info_plist(_info_plist_target) { + testonly = true + visibility = [ ":$_info_plist_bundle" ] + + executable_name = _output_name + info_plist_target = ":$_info_plist_merge_plist" + extra_substitutions = [ "BUNDLE_IDENTIFIER=$_bundle_identifier" ] + } + + bundle_data(_info_plist_bundle) { + testonly = true + visibility = [ ":$_target_name" ] + + public_deps = [ ":$_info_plist_target" ] + + sources = get_target_outputs(":$_info_plist_target") + outputs = [ "{{bundle_contents_dir}}/Info.plist" ] + } + + _pkginfo_bundle = _target_name + "_pkginfo_bundle" + bundle_data(_pkginfo_bundle) { + testonly = true + visibility = [ ":$_target_name" ] + + sources = [ "$_xctrunner_path/PkgInfo" ] + + outputs = [ "{{bundle_contents_dir}}/PkgInfo" ] + + if (use_system_xcode && (use_goma || use_remoteexec)) { + public_deps = [ "//build/config/ios:copy_xctrunner_app" ] + } + } + + _xctest_bundle = invoker.xctest_bundle + create_signed_bundle(_target_name) { + testonly = true + + bundle_binary_target = "//build/config/ios:xctest_runner_without_arm64e" + bundle_binary_output = "XCTRunner" + bundle_extension = ".app" + product_type = _ios_xcode_app_bundle_id + + output_name = _output_name + + # Xcode needs the following frameworks installed in the application + # (and signed) for the XCUITest to run, so install them using + # extra_system_frameworks. + extra_system_frameworks = [ + "$ios_sdk_platform_path/Developer/Library/Frameworks/XCTest.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework", + ] + + # Xcode 13 now depends on XCTestCore. To keep things future proof, copy over + # everything that Xcode copies. + if (xcode_version_int >= 1300) { + extra_system_frameworks += [ + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestCore.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUIAutomation.framework", + "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCUnit.framework", + "$ios_sdk_platform_path/Developer/usr/lib/libXCTestSwiftSupport.dylib", + ] + } + + # XCTestSupport framework is required as of Xcode 14.3 or later. + if (xcode_version_int >= 1430) { + extra_system_frameworks += [ "$ios_sdk_platform_path/Developer/Library/PrivateFrameworks/XCTestSupport.framework" ] + } + + bundle_deps = [] + if (defined(invoker.bundle_deps)) { + bundle_deps += invoker.bundle_deps + } + bundle_deps += [ + ":$_info_plist_bundle", + ":$_pkginfo_bundle", + ":$_xctest_bundle", + ] + } +} + +# Template to build a XCUITest that consists of two parts: the test runner +# application bundle and the xctest dynamic library. +# +# Arguments +# +# deps: +# list of labels to depends on, these values are used to create the +# xctest dynamic library. +# +# xcode_test_application_name: +# string, name of the test application for the ui test target. +# +# runner_only_bundle_deps: +# list of labels of bundle target to include in the runner and +# exclude from the test module (the use case is a framework bundle +# that is used by the test module and thus needs to be packaged in +# the runner application bundle) +# +# This template defines two targets, one named "${target_name}_module" is the +# xctest dynamic library, and the other named "${target_name}_runner" is the +# test runner application bundle. +# +template("ios_xcuitest_test") { + assert(defined(invoker.deps), "deps must be defined for $target_name") + assert(defined(invoker.xcode_test_application_name), + "xcode_test_application_name must be defined for $target_name") + + _xcuitest_target = target_name + if (defined(invoker.output_name)) { + _xcuitest_target = invoker.output_name + } + + _xcuitest_runner_target = _xcuitest_target + "_runner" + _xcuitest_module_target = _xcuitest_target + "_module" + + group(target_name) { + testonly = true + + deps = [ ":$_xcuitest_runner_target" ] + } + + _xcuitest_module_output = _xcuitest_target + ios_xctest_bundle(_xcuitest_module_target) { + forward_variables_from(invoker, + [ + "xcode_test_application_name", + "xctest_bundle_principal_class", + "data_deps", + ]) + + product_type = _ios_xcode_xcuitest_bundle_id + host_target = _xcuitest_runner_target + output_name = _xcuitest_module_output + + deps = invoker.deps + + if (defined(invoker.runner_only_bundle_deps)) { + bundle_deps_filter = invoker.runner_only_bundle_deps + } + } + + _xcuitest_runner_output = _xcuitest_target + "-Runner" + ios_xcuitest_test_runner_bundle(_xcuitest_runner_target) { + output_name = _xcuitest_runner_output + xctest_bundle = _xcuitest_module_target + "_bundle" + forward_variables_from(invoker, [ "bundle_deps" ]) + + if (defined(invoker.runner_only_bundle_deps)) { + if (!defined(bundle_deps)) { + bundle_deps = [] + } + bundle_deps += invoker.runner_only_bundle_deps + } + } +} + +set_defaults("ios_xcuitest_test") { + configs = default_executable_configs +} diff --git a/config/ios/strip_arm64e.py b/config/ios/strip_arm64e.py new file mode 100644 index 000000000000..56e684fd58c5 --- /dev/null +++ b/config/ios/strip_arm64e.py @@ -0,0 +1,70 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Strip arm64e architecture from a binary if present.""" + +import argparse +import os +import shutil +import subprocess +import sys + + +def check_output(command): + """Returns the output from |command| or propagates error, quitting script.""" + process = subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + outs, errs = process.communicate() + if process.returncode: + sys.stderr.write('error: command failed with retcode %d: %s\n\n' % + (process.returncode, ' '.join(map(repr, command)))) + sys.stderr.write(errs.decode('UTF-8', errors='ignore')) + sys.exit(process.returncode) + return outs.decode('UTF-8') + + +def check_call(command): + """Invokes |command| or propagates error.""" + check_output(command) + + +def parse_args(args): + """Parses the command-line.""" + parser = argparse.ArgumentParser() + parser.add_argument('--input', required=True, help='Path to input binary') + parser.add_argument('--output', required=True, help='Path to output binary') + parser.add_argument('--xcode-version', required=True, help='Version of Xcode') + return parser.parse_args(args) + + +def get_archs(path): + """Extracts the architectures present in binary at |path|.""" + outputs = check_output(["xcrun", "lipo", "-info", os.path.abspath(path)]) + return outputs.split(': ')[-1].split() + + +def main(args): + parsed = parse_args(args) + + outdir = os.path.dirname(parsed.output) + if not os.path.isdir(outdir): + os.makedirs(outdir) + + if os.path.exists(parsed.output): + os.unlink(parsed.output) + + # As "lipo" fails with an error if asked to remove an architecture that is + # not included, only use it if "arm64e" is present in the binary. Otherwise + # simply copy the file. + if 'arm64e' in get_archs(parsed.input): + check_output([ + "xcrun", "lipo", "-remove", "arm64e", "-output", + os.path.abspath(parsed.output), + os.path.abspath(parsed.input) + ]) + else: + shutil.copy(parsed.input, parsed.output) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/config/ios/swift_source_set.gni b/config/ios/swift_source_set.gni new file mode 100644 index 000000000000..0f5cc0764fb2 --- /dev/null +++ b/config/ios/swift_source_set.gni @@ -0,0 +1,25 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Defines a template for Swift source files. The default module_name +# of the target is the entire target label (without the leading //) +# with all "/" and ":" replaced with "_". +template("swift_source_set") { + _target_name = target_name + source_set(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (!defined(module_name)) { + _target_label = get_label_info(":$_target_name", "label_no_toolchain") + + # Strip the // from the beginning of the label. + _target_label = string_replace(_target_label, "//", "", 1) + module_name = + string_replace(string_replace(_target_label, "/", "_"), ":", "_") + } + } +} +set_defaults("swift_source_set") { + configs = default_compiler_configs +} diff --git a/config/ios/write_framework_hmap.py b/config/ios/write_framework_hmap.py new file mode 100644 index 000000000000..88892534981a --- /dev/null +++ b/config/ios/write_framework_hmap.py @@ -0,0 +1,102 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import os +import struct +import sys + +def Main(args): + if len(args) < 4: + print( + "Usage: %s output.hmap Foo.framework header1.h..." % args[0], + file=sys.stderr) + return 1 + + (out, framework, all_headers) = args[1], args[2], args[3:] + + framework_name = os.path.basename(framework).split('.')[0] + all_headers = map(os.path.abspath, all_headers) + filelist = {} + for header in all_headers: + filename = os.path.basename(header) + filelist[filename] = header + filelist[os.path.join(framework_name, filename)] = header + WriteHmap(out, filelist) + return 0 + + +def NextGreaterPowerOf2(x): + return 2**(x).bit_length() + + +def WriteHmap(output_name, filelist): + """Generates a header map based on |filelist|. + + Per Mark Mentovai: + A header map is structured essentially as a hash table, keyed by names used + in #includes, and providing pathnames to the actual files. + + The implementation below and the comment above comes from inspecting: + http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt + while also looking at the implementation in clang in: + https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp + """ + magic = 1751998832 + version = 1 + _reserved = 0 + count = len(filelist) + capacity = NextGreaterPowerOf2(count) + strings_offset = 24 + (12 * capacity) + max_value_length = len(max(filelist.values(), key=lambda v: len(v))) + + out = open(output_name, 'wb') + out.write(struct.pack(' +#import + +// For Chrome on iOS we want to run EarlGrey tests (that are XCTests) for all +// our build configurations (Debug, Release, ...). In addition, the symbols +// visibility is configured to private by default. To simplify testing with +// those constraints, our tests are compiled in the TEST_HOST target instead +// of the .xctest bundle that all link against this single test (just there to +// ensure that the bundle is not empty). + +@interface XCTestShellEmptyClass : NSObject +@end + +@implementation XCTestShellEmptyClass +@end diff --git a/config/linux/BUILD.gn b/config/linux/BUILD.gn new file mode 100644 index 000000000000..131bb71d1d3b --- /dev/null +++ b/config/linux/BUILD.gn @@ -0,0 +1,70 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/c++/c++.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/linux/pkg_config.gni") +import("//build/config/ui.gni") + +group("linux") { + visibility = [ "//:optimize_gn_gen" ] +} + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic that is +# Linux-only. This is not applied to Android, but is applied to ChromeOS. +config("compiler") { + if (current_cpu == "arm64") { + import("//build/config/arm.gni") + cflags = [] + asmflags = [] + if (arm_control_flow_integrity == "standard") { + cflags += [ "-mbranch-protection=standard" ] + asmflags += [ "-mbranch-protection=standard" ] + } else if (arm_control_flow_integrity == "pac") { + cflags += [ "-mbranch-protection=pac-ret" ] + asmflags += [ "-mbranch-protection=pac-ret" ] + } + } +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Linux-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like + # OS_LINUX and the like. + if (is_chromeos) { + defines = [ "OS_CHROMEOS" ] + } + + if ((!is_chromeos || default_toolchain != "//build/toolchain/cros:target") && + (!use_custom_libcxx || current_cpu == "mipsel")) { + libs = [ "atomic" ] + } +} + +config("libcap") { + libs = [ "cap" ] +} + +config("libresolv") { + libs = [ "resolv" ] +} + +if (use_glib) { + pkg_config("glib") { + packages = [ + "glib-2.0", + "gmodule-2.0", + "gobject-2.0", + "gthread-2.0", + ] + defines = [ + "GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_56", + "GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_56", + ] + } +} diff --git a/config/linux/OWNERS b/config/linux/OWNERS new file mode 100644 index 000000000000..280ba478dca6 --- /dev/null +++ b/config/linux/OWNERS @@ -0,0 +1 @@ +thomasanderson@chromium.org diff --git a/config/linux/atk/BUILD.gn b/config/linux/atk/BUILD.gn new file mode 100644 index 000000000000..239c3870a149 --- /dev/null +++ b/config/linux/atk/BUILD.gn @@ -0,0 +1,35 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/features.gni") +import("//build/config/linux/pkg_config.gni") +import("//build/config/ui.gni") + +# CrOS doesn't install GTK or any gnome packages. +assert(!is_chromeos) + +# These packages should _only_ be expected when building for a target. +assert(current_toolchain == default_toolchain) + +if (use_atk) { + assert(use_glib, "use_atk=true requires that use_glib=true") +} + +pkg_config("atk") { + packages = [ + "atk", + "atk-bridge-2.0", + ] + atk_lib_dir = exec_script(pkg_config_script, + common_pkg_config_args + pkg_config_args + [ + "--libdir", + "atk", + ], + "string") + defines = [ + "ATK_LIB_DIR=\"$atk_lib_dir\"", + "USE_ATK_BRIDGE", + ] +} diff --git a/config/linux/atspi2/BUILD.gn b/config/linux/atspi2/BUILD.gn new file mode 100644 index 000000000000..51b6d33aab3c --- /dev/null +++ b/config/linux/atspi2/BUILD.gn @@ -0,0 +1,29 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/config/ui.gni") + +# These packages should _only_ be expected when building for a target. +assert(current_toolchain == default_toolchain) + +if (use_atk) { + pkg_config("atspi2") { + packages = [ "atspi-2" ] + atspi_version = exec_script(pkg_config_script, + common_pkg_config_args + pkg_config_args + [ + "atspi-2", + "--version-as-components", + ], + "value") + atspi_major_version = atspi_version[0] + atspi_minor_version = atspi_version[1] + atspi_micro_version = atspi_version[2] + defines = [ + "ATSPI_MAJOR_VERSION=$atspi_major_version", + "ATSPI_MINOR_VERSION=$atspi_minor_version", + "ATSPI_MICRO_VERSION=$atspi_micro_version", + ] + } +} diff --git a/config/linux/dbus/BUILD.gn b/config/linux/dbus/BUILD.gn new file mode 100644 index 000000000000..2414c3416737 --- /dev/null +++ b/config/linux/dbus/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/linux/pkg_config.gni") + +assert(use_dbus) + +# Note: if your target also depends on //dbus, you don't need to add this +# config (it will get added automatically if you depend on //dbus). +pkg_config("dbus") { + packages = [ "dbus-1" ] +} diff --git a/config/linux/dri/BUILD.gn b/config/linux/dri/BUILD.gn new file mode 100644 index 000000000000..e3a0a83a99fe --- /dev/null +++ b/config/linux/dri/BUILD.gn @@ -0,0 +1,18 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +assert(is_linux, "This file should only be referenced on Linux") + +pkg_config("dri") { + packages = [ "dri" ] + dri_driver_dir = exec_script(pkg_config_script, + common_pkg_config_args + pkg_config_args + [ + "--dridriverdir", + "dri", + ], + "string") + defines = [ "DRI_DRIVER_DIR=\"$dri_driver_dir\"" ] +} diff --git a/config/linux/gtk/BUILD.gn b/config/linux/gtk/BUILD.gn new file mode 100644 index 000000000000..355067ea178b --- /dev/null +++ b/config/linux/gtk/BUILD.gn @@ -0,0 +1,45 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/gtk/gtk.gni") +import("//build/config/linux/pkg_config.gni") + +assert(is_linux, "This file should only be referenced on Linux") + +# GN doesn't check visibility for configs so we give this an obviously internal +# name to discourage random targets from accidentally depending on this and +# bypassing the GTK target's visibility. +pkg_config("gtk_internal_config") { + # Gtk requires gmodule, but it does not list it as a dependency in some + # misconfigured systems. + packages = [ + "gmodule-2.0", + "gthread-2.0", + ] + if (gtk_version == 3) { + packages += [ "gtk+-3.0" ] + } else { + assert(gtk_version == 4) + packages += [ "gtk4" ] + } +} + +group("gtk") { + visibility = [ + # These are allow-listed for WebRTC builds. Nothing in else should depend + # on GTK. + "//examples:peerconnection_client", + "//remoting/host:common", + "//remoting/host:remoting_me2me_host_static", + "//remoting/host/file_transfer", + "//remoting/host/it2me:common", + "//remoting/host/it2me:main", + "//remoting/host/linux", + "//remoting/host/remote_open_url:common", + "//remoting/test:it2me_standalone_host_main", + "//webrtc/examples:peerconnection_client", + ] + + public_configs = [ ":gtk_internal_config" ] +} diff --git a/config/linux/gtk/gtk.gni b/config/linux/gtk/gtk.gni new file mode 100644 index 000000000000..9e6131d4c0a2 --- /dev/null +++ b/config/linux/gtk/gtk.gni @@ -0,0 +1,14 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ui.gni") + +declare_args() { + # Whether or not we should use libgtk. + use_gtk = is_linux && !is_castos + + # The (major) version of GTK to build against. A different version may be + # loaded at runtime. + gtk_version = 3 +} diff --git a/config/linux/libdrm/BUILD.gn b/config/linux/libdrm/BUILD.gn new file mode 100644 index 000000000000..31ab0d8da212 --- /dev/null +++ b/config/linux/libdrm/BUILD.gn @@ -0,0 +1,31 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +import("//build/config/chromecast_build.gni") +import("//build/config/chromeos/args.gni") +import("//build/config/linux/pkg_config.gni") + +assert(is_linux || is_chromeos) + +declare_args() { + # Controls whether the build should use the version of libdrm library shipped + # with the system. In release builds of desktop Linux and Chrome OS we use the + # system version. Some Chromecast devices use this as well. + use_system_libdrm = is_chromeos_device || (is_linux && !is_castos) +} + +if (use_system_libdrm) { + pkg_config("libdrm_config") { + packages = [ "libdrm" ] + } + group("libdrm") { + public_configs = [ ":libdrm_config" ] + } +} else { + group("libdrm") { + public_deps = [ "//third_party/libdrm" ] + } + config("libdrm_exynos_include_config") { + include_dirs = [ "//third_party/libdrm/src/exynos" ] + } +} diff --git a/config/linux/libffi/BUILD.gn b/config/linux/libffi/BUILD.gn new file mode 100644 index 000000000000..771170c3e870 --- /dev/null +++ b/config/linux/libffi/BUILD.gn @@ -0,0 +1,24 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +declare_args() { + # Controls whether the build should use the version of libffi library shipped + # with the system. By default, we only use the system version on Chrome OS: + # on Linux, libffi must be statically linked to prevent a situation where the + # runtime version of libffi is different from the build-time version from the + # sysroot. + use_system_libffi = default_toolchain == "//build/toolchain/cros:target" +} + +if (use_system_libffi) { + pkg_config("libffi") { + packages = [ "libffi" ] + } +} else { + config("libffi") { + libs = [ ":libffi_pic.a" ] + } +} diff --git a/config/linux/libva/BUILD.gn b/config/linux/libva/BUILD.gn new file mode 100644 index 000000000000..380da0435af2 --- /dev/null +++ b/config/linux/libva/BUILD.gn @@ -0,0 +1,17 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +assert(is_linux || is_chromeos, "This file should only be referenced on Linux") + +pkg_config("libva") { + packages = [ "libva" ] + + # Do not use exec_script to check the version here. It is done with a + # static_assert instead. + + # vaapi decoders use dlopen pre-sandbox anyway to improve startup times. + ignore_libs = true +} diff --git a/config/linux/nss/BUILD.gn b/config/linux/nss/BUILD.gn new file mode 100644 index 000000000000..c67cefc148b0 --- /dev/null +++ b/config/linux/nss/BUILD.gn @@ -0,0 +1,18 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +if (is_linux || is_chromeos) { + # This is a dependency on NSS with no libssl3. On Linux and Chrome OS, we use + # NSS for platform certificate integration. We use our own TLS library, so + # exclude the one from NSS. + pkg_config("nss") { + packages = [ "nss" ] + extra_args = [ + "-v", + "-lssl3", + ] + } +} diff --git a/config/linux/pangocairo/BUILD.gn b/config/linux/pangocairo/BUILD.gn new file mode 100644 index 000000000000..e2030b817304 --- /dev/null +++ b/config/linux/pangocairo/BUILD.gn @@ -0,0 +1,19 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pangocairo/pangocairo.gni") +import("//build/config/linux/pkg_config.gni") + +if (use_pangocairo) { + pkg_config("pangocairo") { + packages = [ "pangocairo" ] + + # We don't want pkgconfig for pangocairo to explicitly request FreeType to get + # linked, because we control which FreeType to link to. + extra_args = [ + "-v", + "freetype", + ] + } +} diff --git a/config/linux/pangocairo/pangocairo.gni b/config/linux/pangocairo/pangocairo.gni new file mode 100644 index 000000000000..c7662ac33356 --- /dev/null +++ b/config/linux/pangocairo/pangocairo.gni @@ -0,0 +1,10 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/ui.gni") + +declare_args() { + use_pangocairo = is_linux && !is_castos +} diff --git a/config/linux/pkg-config.py b/config/linux/pkg-config.py new file mode 100755 index 000000000000..2e38c7ffbd10 --- /dev/null +++ b/config/linux/pkg-config.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import json +import os +import subprocess +import sys +import re +from optparse import OptionParser + +# This script runs pkg-config, optionally filtering out some results, and +# returns the result. +# +# The result will be [ , , , , ] +# where each member is itself a list of strings. +# +# You can filter out matches using "-v " where all results from +# pkgconfig matching the given regular expression will be ignored. You can +# specify more than one regular expression my specifying "-v" more than once. +# +# You can specify a sysroot using "-s " where sysroot is the absolute +# system path to the sysroot used for compiling. This script will attempt to +# generate correct paths for the sysroot. +# +# When using a sysroot, you must also specify the architecture via +# "-a " where arch is either "x86" or "x64". +# +# CrOS systemroots place pkgconfig files at /usr/share/pkgconfig +# and one of /usr/lib/pkgconfig or /usr/lib64/pkgconfig +# depending on whether the systemroot is for a 32 or 64 bit architecture. They +# specify the 'lib' or 'lib64' of the pkgconfig path by defining the +# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates this +# variable to this script with the "--system_libdir " flag. If no +# flag is provided, then pkgconfig files are assumed to come from +# /usr/lib/pkgconfig. +# +# Additionally, you can specify the option --atleast-version. This will skip +# the normal outputting of a dictionary and instead print true or false, +# depending on the return value of pkg-config for the given package. + + +def SetConfigPath(options): + """Set the PKG_CONFIG_LIBDIR environment variable. + + This takes into account any sysroot and architecture specification from the + options on the given command line. + """ + + sysroot = options.sysroot + assert sysroot + + # Compute the library path name based on the architecture. + arch = options.arch + if sysroot and not arch: + print("You must specify an architecture via -a if using a sysroot.") + sys.exit(1) + + libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig' + libdir += ':' + sysroot + '/usr/share/pkgconfig' + os.environ['PKG_CONFIG_LIBDIR'] = libdir + return libdir + + +def GetPkgConfigPrefixToStrip(options, args): + """Returns the prefix from pkg-config where packages are installed. + + This returned prefix is the one that should be stripped from the beginning of + directory names to take into account sysroots. + """ + # Some sysroots, like the Chromium OS ones, may generate paths that are not + # relative to the sysroot. For example, + # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all + # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) + # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr). + # To support this correctly, it's necessary to extract the prefix to strip + # from pkg-config's |prefix| variable. + prefix = subprocess.check_output([options.pkg_config, + "--variable=prefix"] + args, env=os.environ).decode('utf-8') + if prefix[-4] == '/usr': + return prefix[4:] + return prefix + + +def MatchesAnyRegexp(flag, list_of_regexps): + """Returns true if the first argument matches any regular expression in the + given list.""" + for regexp in list_of_regexps: + if regexp.search(flag) != None: + return True + return False + + +def RewritePath(path, strip_prefix, sysroot): + """Rewrites a path by stripping the prefix and prepending the sysroot.""" + if os.path.isabs(path) and not path.startswith(sysroot): + if path.startswith(strip_prefix): + path = path[len(strip_prefix):] + path = path.lstrip('/') + return os.path.join(sysroot, path) + else: + return path + + +def main(): + # If this is run on non-Linux platforms, just return nothing and indicate + # success. This allows us to "kind of emulate" a Linux build from other + # platforms. + if "linux" not in sys.platform: + print("[[],[],[],[],[]]") + return 0 + + parser = OptionParser() + parser.add_option('-d', '--debug', action='store_true') + parser.add_option('-p', action='store', dest='pkg_config', type='string', + default='pkg-config') + parser.add_option('-v', action='append', dest='strip_out', type='string') + parser.add_option('-s', action='store', dest='sysroot', type='string') + parser.add_option('-a', action='store', dest='arch', type='string') + parser.add_option('--system_libdir', action='store', dest='system_libdir', + type='string', default='lib') + parser.add_option('--atleast-version', action='store', + dest='atleast_version', type='string') + parser.add_option('--libdir', action='store_true', dest='libdir') + parser.add_option('--dridriverdir', action='store_true', dest='dridriverdir') + parser.add_option('--version-as-components', action='store_true', + dest='version_as_components') + (options, args) = parser.parse_args() + + # Make a list of regular expressions to strip out. + strip_out = [] + if options.strip_out != None: + for regexp in options.strip_out: + strip_out.append(re.compile(regexp)) + + if options.sysroot: + libdir = SetConfigPath(options) + if options.debug: + sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir) + prefix = GetPkgConfigPrefixToStrip(options, args) + else: + prefix = '' + + if options.atleast_version: + # When asking for the return value, just run pkg-config and print the return + # value, no need to do other work. + if not subprocess.call([options.pkg_config, + "--atleast-version=" + options.atleast_version] + + args): + print("true") + else: + print("false") + return 0 + + if options.version_as_components: + cmd = [options.pkg_config, "--modversion"] + args + try: + version_string = subprocess.check_output(cmd).decode('utf-8') + except: + sys.stderr.write('Error from pkg-config.\n') + return 1 + print(json.dumps(list(map(int, version_string.strip().split("."))))) + return 0 + + + if options.libdir: + cmd = [options.pkg_config, "--variable=libdir"] + args + if options.debug: + sys.stderr.write('Running: %s\n' % cmd) + try: + libdir = subprocess.check_output(cmd).decode('utf-8') + except: + print("Error from pkg-config.") + return 1 + sys.stdout.write(libdir.strip()) + return 0 + + if options.dridriverdir: + cmd = [options.pkg_config, "--variable=dridriverdir"] + args + if options.debug: + sys.stderr.write('Running: %s\n' % cmd) + try: + dridriverdir = subprocess.check_output(cmd).decode('utf-8') + except: + print("Error from pkg-config.") + return 1 + sys.stdout.write(dridriverdir.strip()) + return + + cmd = [options.pkg_config, "--cflags", "--libs"] + args + if options.debug: + sys.stderr.write('Running: %s\n' % ' '.join(cmd)) + + try: + flag_string = subprocess.check_output(cmd).decode('utf-8') + except: + sys.stderr.write('Could not run pkg-config.\n') + return 1 + + # For now just split on spaces to get the args out. This will break if + # pkgconfig returns quoted things with spaces in them, but that doesn't seem + # to happen in practice. + all_flags = flag_string.strip().split(' ') + + + sysroot = options.sysroot + if not sysroot: + sysroot = '' + + includes = [] + cflags = [] + libs = [] + lib_dirs = [] + + for flag in all_flags[:]: + if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out): + continue; + + if flag[:2] == '-l': + libs.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:2] == '-L': + lib_dirs.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:2] == '-I': + includes.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:3] == '-Wl': + # Don't allow libraries to control ld flags. These should be specified + # only in build files. + pass + elif flag == '-pthread': + # Many libs specify "-pthread" which we don't need since we always include + # this anyway. Removing it here prevents a bunch of duplicate inclusions + # on the command line. + pass + else: + cflags.append(flag) + + # Output a GN array, the first one is the cflags, the second are the libs. The + # JSON formatter prints GN compatible lists when everything is a list of + # strings. + print(json.dumps([includes, cflags, libs, lib_dirs])) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/config/linux/pkg_config.gni b/config/linux/pkg_config.gni new file mode 100644 index 000000000000..cb9b4600331f --- /dev/null +++ b/config/linux/pkg_config.gni @@ -0,0 +1,129 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sysroot.gni") + +# Defines a config specifying the result of running pkg-config for the given +# packages. Put the package names you want to query in the "packages" variable +# inside the template invocation. +# +# You can also add defines via the "defines" variable. This can be useful to +# add this to the config to pass defines that the library expects to get by +# users of its headers. +# +# Example: +# pkg_config("mything") { +# packages = [ "mything1", "mything2" ] +# defines = [ "ENABLE_AWESOME" ] +# } +# +# You can also use "extra args" to filter out results (see pkg-config.py): +# extra_args = [ "-v, "foo" ] +# To ignore libs and ldflags (only cflags/defines will be set, which is useful +# when doing manual dynamic linking), set: +# ignore_libs = true + +declare_args() { + # A pkg-config wrapper to call instead of trying to find and call the right + # pkg-config directly. Wrappers like this are common in cross-compilation + # environments. + # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on + # the sysroot mechanism to find the right .pc files. + pkg_config = "" + + # A optional pkg-config wrapper to use for tools built on the host. + host_pkg_config = "" + + # CrOS systemroots place pkgconfig files at /usr/share/pkgconfig + # and one of /usr/lib/pkgconfig or /usr/lib64/pkgconfig + # depending on whether the systemroot is for a 32 or 64 bit architecture. + # + # When build under GYP, CrOS board builds specify the 'system_libdir' variable + # as part of the GYP_DEFINES provided by the CrOS emerge build or simple + # chrome build scheme. This variable permits controlling this for GN builds + # in similar fashion by setting the `system_libdir` variable in the build's + # args.gn file to 'lib' or 'lib64' as appropriate for the target architecture. + system_libdir = "lib" +} + +pkg_config_script = "//build/config/linux/pkg-config.py" + +# Define the args we pass to the pkg-config script for other build files that +# need to invoke it manually. +pkg_config_args = [] + +common_pkg_config_args = [] +if (sysroot != "") { + # Pass the sysroot if we're using one (it requires the CPU arch also). + common_pkg_config_args += [ + "-s", + rebase_path(sysroot), + "-a", + current_cpu, + ] +} + +if (pkg_config != "") { + pkg_config_args += [ + "-p", + pkg_config, + ] +} + +# Only use the custom libdir when building with the target sysroot. +if (target_sysroot != "" && sysroot == target_sysroot) { + pkg_config_args += [ + "--system_libdir", + system_libdir, + ] +} + +if (host_pkg_config != "") { + host_pkg_config_args = [ + "-p", + host_pkg_config, + ] +} else { + host_pkg_config_args = pkg_config_args +} + +template("pkg_config") { + assert(defined(invoker.packages), + "Variable |packages| must be defined to be a list in pkg_config.") + config(target_name) { + if (host_toolchain == current_toolchain) { + args = common_pkg_config_args + host_pkg_config_args + invoker.packages + } else { + args = common_pkg_config_args + pkg_config_args + invoker.packages + } + if (defined(invoker.extra_args)) { + args += invoker.extra_args + } + + pkgresult = exec_script(pkg_config_script, args, "value") + cflags = pkgresult[1] + + foreach(include, pkgresult[0]) { + if (use_sysroot) { + # We want the system include paths to use -isystem instead of -I to + # suppress warnings in those headers. + include_relativized = rebase_path(include, root_build_dir) + cflags += [ "-isystem$include_relativized" ] + } else { + cflags += [ "-I$include" ] + } + } + + if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) { + libs = pkgresult[2] + lib_dirs = pkgresult[3] + } + + forward_variables_from(invoker, + [ + "defines", + "visibility", + ]) + } +} diff --git a/config/locales.gni b/config/locales.gni new file mode 100644 index 000000000000..ed26f3de9bed --- /dev/null +++ b/config/locales.gni @@ -0,0 +1,261 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromeos/ui_mode.gni") + +# This file creates |platform_pak_locales| which is the set of packed locales +# based on the current platform. Locales in this list are formatted based on +# what .pak files expect. The |platform_pak_locales| variable *may* contain +# pseudolocales, depending on the |enable_pseudolocales| flag. +# If you specifically want to have the locales variable with or without +# pseudolocales, then use |locales_with_pseudolocales| or +# |locales_without_pseudolocales|. + +# The following additional platform specific lists are created: +# - |extended_locales| list of locales not shipped on desktop builds +# - |android_bundle_locales_as_resources| locales formatted for XML output names +# - |locales_as_apple_outputs| formatted for mac output bundles + +pseudolocales = [ + "ar-XB", + "en-XA", +] + +# Superset of all locales used in Chrome with platform specific changes noted. +all_chrome_locales = + [ + "af", + "am", + "ar", + "as", + "az", + "be", + "bg", + "bn", + "bs", + "ca", + "cs", + "cy", + "da", + "de", + "el", + "en-GB", + "en-US", + "es", + "es-419", # "es-MX" in iOS (Mexico vs Latin America) "es-US" on Android + "et", + "eu", + "fa", + "fi", + "fil", # "tl" in .xml but "fil" in TC and .pak + "fr", + "fr-CA", + "gl", + "gu", + "he", # "iw" in .xml and TC but "he" in .pak + "hi", + "hr", + "hu", + "hy", + "id", # "in" in .xml but "id" in TC and .pak + "is", + "it", + "ja", + "ka", + "kk", + "km", + "kn", + "ko", + "ky", + "lo", + "lt", + "lv", + "mk", + "ml", + "mn", + "mr", + "ms", + "my", + "nb", # "no" in TC but "nb" in .xml and .pak + "ne", + "nl", + "or", + "pa", + "pl", + "pt-BR", # just "pt" in iOS + "pt-PT", + "ro", + "ru", + "si", + "sk", + "sl", + "sq", + "sr", + "sr-Latn", # -b+sr+Latn in .xml + "sv", + "sw", + "ta", + "te", + "th", + "tr", + "uk", + "ur", + "uz", + "vi", + "zh-CN", + "zh-HK", + "zh-TW", + "zu", + ] + pseudolocales + +if (is_ios) { + # Chrome on iOS uses "es-MX" and "pt" for "es-419" and "pt-BR". + all_chrome_locales -= [ + "es-419", + "pt-BR", + ] + all_chrome_locales += [ + "es-MX", + "pt", + ] +} + +# Chrome locales not on Windows, Mac, or Linux. +# This list is used for all platforms except Android. On Android, this list is +# modified to exclude locales that are not used on Android, so +# `platform_pak_locales - extended_locales` works as expected. +extended_locales = [ + "as", + "az", + "be", + "bs", + "cy", + "eu", + "fr-CA", + "gl", + "hy", + "is", + "ka", + "kk", + "km", + "ky", + "lo", + "mk", + "mn", + "my", + "ne", + "or", + "pa", + "si", + "sq", + "sr-Latn", + "uz", + "zh-HK", + "zu", +] + +# Chrome locales not on Android. +# These locales have not yet been tested yet. Specifically, AOSP has not been +# translated to Welsh at the time of writing (April 2022): +# https://cs.android.com/android/platform/superproject/+/master:build/make/target/product/languages_default.mk +# Due to this, the only way a user could see Welsh strings - assuming they were +# built - would be to manually switch their "Chrome language" in Chrome's +# language settings to Welsh, so Welsh usage would probably be very low. +_non_android_locales = [ "cy" ] + +# Setup |platform_pak_locales| for each platform. +platform_pak_locales = all_chrome_locales +if (is_android) { + platform_pak_locales -= _non_android_locales + extended_locales -= _non_android_locales +} else { + platform_pak_locales -= extended_locales +} + +# The base list for all platforms except Android excludes the extended locales. +# Add or subtract platform specific locales below. +if (is_chromeos) { + platform_pak_locales += [ + "cy", + "eu", + "gl", + "is", + "zu", + ] + platform_pak_locales -= [ "ur" ] +} else if (is_ios) { + platform_pak_locales -= [ + "af", + "am", + "bn", + "et", + "fil", + "gu", + "kn", + "lv", + "ml", + "mr", + "sl", + "sw", + "ta", + "te", + "ur", + ] +} + +# List for Android locale names in .xml exports. Note: needs to stay in sync +# with |ToAndroidLocaleName| in build/android/gyp/util/resource_utils.py. +if (is_android) { + # - add r: (e.g. zh-HK -> zh-rHK ) + android_bundle_locales_as_resources = [] + foreach(_locale, platform_pak_locales) { + android_bundle_locales_as_resources += + [ string_replace(_locale, "-", "-r") ] + } + + # - remove en-US + # - swap: (he, id, en-419, fil) -> (iw, in, es-rUS, tl) + # - sr-rLatn -> -b+sr+Latn + android_bundle_locales_as_resources -= [ + "en-rUS", + "es-r419", + "fil", + "he", + "id", + "sr-rLatn", + ] + android_bundle_locales_as_resources += [ + "b+sr+Latn", + "es-rUS", + "in", + "iw", + "tl", + ] +} + +locales_without_pseudolocales = platform_pak_locales - pseudolocales +locales_with_pseudolocales = platform_pak_locales + +declare_args() { + # We want to give pseudolocales to everyone except end-users (devs & QA). + # Note that this only packages the locales in, and doesn't add the ui to enable them. + enable_pseudolocales = !is_official_build +} + +if (!enable_pseudolocales) { + platform_pak_locales -= pseudolocales +} + +if (is_apple) { + # Same as the locales list but in the format Mac expects for output files: + # it uses underscores instead of hyphens, and "en" instead of "en-US". + locales_as_apple_outputs = [] + foreach(locale, platform_pak_locales) { + if (locale == "en-US") { + locales_as_apple_outputs += [ "en" ] + } else { + locales_as_apple_outputs += [ string_replace(locale, "-", "_") ] + } + } +} diff --git a/config/logging.gni b/config/logging.gni new file mode 100644 index 000000000000..a08195b7ddfb --- /dev/null +++ b/config/logging.gni @@ -0,0 +1,32 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/dcheck_always_on.gni") + +declare_args() { + # Use LogErrorNotReached() for NOTREACHED(). + enable_log_error_not_reached = + is_chromeos_ash && !(is_debug || dcheck_always_on) + enable_stack_trace_line_numbers = false + + # Use runtime vlog everywhere except for ash-chrome. + # When `use_runtime_vlog` is true, + # command line switch `--vmodule=xxx` or `--v=x` could be used to + # control vlog level at runtime. + # when `use_runtime_volog` is false, + # verbose log level is controlled by `ENABLE_VLOG_LEVEL` macro. VLOG(n) + # is kept and generate output if `n` is less than or equal to the vlog + # level defined by the macro. + # Command line switch `--vmodule=xxx`, or `--v=x` would have no effect. + # + # Runtime vlog is used everywhere except on ash-chrome. + # Ash-chrome has a few vmodule patterns that need to be used indefinitely + # to investigate problems from logs in feedback reports. These vmodule + # patterns are using too much cpu cycles (see http://crbug/489441). Turning + # off runtime vlog and using build time vlog would avoid paying that cpu tax + # and have a nice side effect of a smaller production binary. + use_runtime_vlog = !is_chromeos_ash +} diff --git a/config/mac/BUILD.gn b/config/mac/BUILD.gn new file mode 100644 index 000000000000..7af3124d4eba --- /dev/null +++ b/config/mac/BUILD.gn @@ -0,0 +1,129 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/apple/symbols.gni") +import("//build/config/c++/c++.gni") +import("//build/config/mac/mac_sdk.gni") +import("//build/config/sysroot.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic. +config("compiler") { + # These flags are shared between the C compiler and linker. + common_mac_flags = [] + + # CPU architecture. + if (current_cpu == "x64") { + clang_arch = "x86_64" + } else if (current_cpu == "x86") { + clang_arch = "i386" + } else if (current_cpu == "arm64") { + clang_arch = current_cpu + } else { + assert(false, "unknown current_cpu $current_cpu") + } + if (host_os == "mac") { + common_mac_flags += [ + "-arch", + clang_arch, + ] + } else { + common_mac_flags += [ "--target=$clang_arch-apple-macos" ] + } + + # This is here so that all files get recompiled after an Xcode update. + # (defines are passed via the command line, and build system rebuild things + # when their commandline changes). Nothing should ever read this define. + defines = [ "CR_XCODE_VERSION=$xcode_version" ] + + asmflags = common_mac_flags + cflags = common_mac_flags + + # Without this, the constructors and destructors of a C++ object inside + # an Objective C struct won't be called, which is very bad. + cflags_objcc = [ "-fobjc-call-cxx-cdtors" ] + + ldflags = common_mac_flags + + if (save_unstripped_output) { + ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ] + } + + if (export_libcxxabi_from_executables) { + ldflags += [ "-Wl,-undefined,dynamic_lookup" ] + } +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Mac-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + common_flags = [ + "-isysroot", + rebase_path(sysroot, root_build_dir), + "-mmacos-version-min=$mac_deployment_target", + ] + + asmflags = common_flags + cflags = common_flags + ldflags = common_flags +} + +# On Mac, this is used for everything except static libraries. +config("mac_dynamic_flags") { + ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and classes. + + if (is_component_build) { + ldflags += [ + # Path for loading shared libraries for unbundled binaries. + "-Wl,-rpath,@loader_path/.", + + # Path for loading shared libraries for bundled binaries. Get back from + # Binary.app/Contents/MacOS. + "-Wl,-rpath,@loader_path/../../..", + ] + + # Path for loading shared libraries for unbundled binaries for + # the host toolchain (see https://crbug.com/1315433). Only used + # for when building for iOS. + if (target_os == "ios" && current_toolchain == host_toolchain) { + ldflags += [ "-Wl,-rpath,@loader_path/" + rebase_path( + get_label_info(":mac_dynamic_flags", "root_out_dir"), + root_build_dir) ] + } + } +} + +# When building with Goma, all inputs must be relative to the build directory. +# If using the system Xcode, which typically resides outside the build root, a +# symlink to the SDK is created in the build directory, and the path to that +# link is stored in $mac_sdk_path. If an action references a file in the SDK as +# an input, GN will complain that no target generates the file because it is +# below the $root_build_dir. The below action lists as outputs the files in the +# SDK that are referenced as inputs to actions, so that GN thinks a target has +# generated them. The list is centralized here, as multiple targets need to +# reference the same files, and an output can only be generated once. +# +# The symbolic link for $mac_sdk_path is set up by +# //build/config/apple/sdk_info.py in //build/config/mac/mac_sdk.gni. +if (use_system_xcode && (use_goma || use_remoteexec) && target_os == "mac" && + current_toolchain == default_toolchain) { + action("sdk_inputs") { + script = "//build/noop.py" + outputs = [ + "$mac_sdk_path/usr/include/mach/exc.defs", + "$mac_sdk_path/usr/include/mach/mach_exc.defs", + "$mac_sdk_path/usr/include/mach/notify.defs", + ] + } +} else { + group("sdk_inputs") { + if (current_toolchain != default_toolchain) { + public_deps = [ ":sdk_inputs($default_toolchain)" ] + } + } +} diff --git a/config/mac/BuildInfo.plist b/config/mac/BuildInfo.plist new file mode 100644 index 000000000000..bfa3b8d5732d --- /dev/null +++ b/config/mac/BuildInfo.plist @@ -0,0 +1,16 @@ + + + + + DTCompiler + ${GCC_VERSION} + DTSDKBuild + ${MAC_SDK_BUILD} + DTSDKName + ${MAC_SDK_NAME} + DTXcode + ${XCODE_VERSION} + DTXcodeBuild + ${XCODE_BUILD} + + diff --git a/config/mac/OWNERS b/config/mac/OWNERS new file mode 100644 index 000000000000..6f3324f07c8f --- /dev/null +++ b/config/mac/OWNERS @@ -0,0 +1 @@ +file://build/apple/OWNERS diff --git a/config/mac/mac_sdk.gni b/config/mac/mac_sdk.gni new file mode 100644 index 000000000000..d3c4e3c7fdd1 --- /dev/null +++ b/config/mac/mac_sdk.gni @@ -0,0 +1,136 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/gclient_args.gni") +import("//build/config/mac/mac_sdk_overrides.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") + +assert(current_os == "mac" || current_toolchain == default_toolchain) + +declare_args() { + # The `MACOSX_DEPLOYMENT_TARGET` variable used when compiling. This partially + # controls the minimum supported version of macOS for Chromium by + # affecting the symbol availability rules. This may differ from + # `mac_min_system_version` when dropping support for older macOSes but where + # additional code changes are required to be compliant with the availability + # rules. + mac_deployment_target = "10.13" + + # The value of the `LSMinimumSystemVersion` in `Info.plist` files. This + # partially controls the minimum supported version of macOS for Chromium by + # affecting the `Info.plist`. This may differ from `mac_deployment_target` + # when dropping support for older macOSes. This must be greater than or equal + # to the `mac_deployment_target` version. + mac_min_system_version = "10.13" + + # Path to a specific version of the Mac SDK, not including a slash at the end. + # If empty, the path to the lowest version greater than or equal to + # `mac_sdk_min` is used. + mac_sdk_path = "" + + # The SDK name as accepted by `xcodebuild`. + mac_sdk_name = "macosx" + + # The SDK version used when making official builds. This is a single exact + # version, not a minimum. If this version isn't available official builds + # will fail. + mac_sdk_official_version = "13.3" + + # The SDK build version used when making official builds. This is a single + # exact version found at "System/Library/CoreServices/SystemVersion.plist" + # inside the SDK. + mac_sdk_official_build_version = "21E226" + + # Production builds should use hermetic Xcode. If you want to do production + # builds with system Xcode to test new SDKs, set this. + # Don't set this on any bots. + mac_allow_system_xcode_for_official_builds_for_testing = false +} + +# Check that the version of macOS SDK used is the one requested when building +# a version of Chrome shipped to the users. Disable the check if building for +# iOS as the version macOS SDK used is not relevant for the tool build for the +# host (they are not shipped) --- this is required as Chrome on iOS is usually +# build with the latest version of Xcode that may not ship with the version of +# the macOS SDK used to build Chrome on mac. +# TODO(crbug.com/635745): the check for target_os should be replaced by a +# check that current_toolchain is default_toolchain, and the file should +# assert that current_os is "mac" once this file is no longer included by +# iOS toolchains. +if (is_chrome_branded && is_official_build && target_os != "ios") { + assert(!use_system_xcode || + mac_allow_system_xcode_for_official_builds_for_testing, + "official branded builds should use hermetic xcode") +} + +# The path to the hermetic install of Xcode. Only relevant when +# use_system_xcode = false. +if (!use_system_xcode) { + _hermetic_xcode_path = "//build/mac_files/xcode_binaries" +} + +script_name = "//build/config/apple/sdk_info.py" +sdk_info_args = [] +if (!use_system_xcode) { + sdk_info_args += [ + "--developer_dir", + rebase_path(_hermetic_xcode_path, "", root_build_dir), + ] +} + +# Goma RBE requires paths relative to source directory. When using system +# Xcode, this is done by creating symbolic links in root_build_dir. +if (use_system_xcode && (use_goma || use_remoteexec)) { + sdk_info_args += [ + "--get_sdk_info", + "--create_symlink_at", + "sdk/xcode_links", + "--root_build_dir", + root_build_dir, + ] +} +sdk_info_args += [ mac_sdk_name ] + +_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope") +xcode_version = _mac_sdk_result.xcode_version +xcode_build = _mac_sdk_result.xcode_build +if (mac_sdk_path == "" && use_system_xcode && (use_goma || use_remoteexec)) { + mac_sdk_path = _mac_sdk_result.sdk_path +} + +if (use_system_xcode) { + # The tool will print the SDK path on the first line, and the version on the + # second line. + find_sdk_args = [ + "--print_sdk_path", + "--print_bin_path", + "--print_sdk_build", + mac_sdk_min, + ] + find_sdk_lines = + exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines") + mac_sdk_version = find_sdk_lines[3] + mac_sdk_build_version = find_sdk_lines[2] + if (mac_sdk_path == "") { + mac_sdk_path = find_sdk_lines[0] + mac_bin_path = find_sdk_lines[1] + } else { + mac_bin_path = find_sdk_lines[1] + } +} else { + mac_sdk_version = mac_sdk_official_version + mac_sdk_build_version = mac_sdk_official_build_version + _dev = _hermetic_xcode_path + "/Contents/Developer" + _sdk = "MacOSX${mac_sdk_version}.sdk" + mac_sdk_path = _dev + "/Platforms/MacOSX.platform/Developer/SDKs/$_sdk" + mac_bin_path = _dev + "/Toolchains/XcodeDefault.xctoolchain/usr/bin/" + + # If we're using hermetic Xcode, then we want the paths to be relative so that + # generated ninja files are independent of the directory location. + # TODO(thakis): Do this at the uses of this variable instead. + mac_bin_path = rebase_path(mac_bin_path, root_build_dir) +} diff --git a/config/mac/mac_sdk_overrides.gni b/config/mac/mac_sdk_overrides.gni new file mode 100644 index 000000000000..de58f3f72c8b --- /dev/null +++ b/config/mac/mac_sdk_overrides.gni @@ -0,0 +1,16 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains arguments that subprojects may choose to override. It +# asserts that those overrides are used, to prevent unused args warnings. + +_sdk_min_from_env = getenv("FORCE_MAC_SDK_MIN") +declare_args() { + # Minimum supported version of the Mac SDK. + if (_sdk_min_from_env == "") { + mac_sdk_min = "10.15" + } else { + mac_sdk_min = _sdk_min_from_env + } +} diff --git a/config/mac/package_framework.py b/config/mac/package_framework.py new file mode 100644 index 000000000000..a9210eba65b6 --- /dev/null +++ b/config/mac/package_framework.py @@ -0,0 +1,60 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import errno +import os +import shutil +import sys + +def Main(): + parser = argparse.ArgumentParser(description='Create Mac Framework symlinks') + parser.add_argument('--framework', action='store', type=str, required=True) + parser.add_argument('--version', action='store', type=str) + parser.add_argument('--contents', action='store', type=str, nargs='+') + parser.add_argument('--stamp', action='store', type=str, required=True) + args = parser.parse_args() + + VERSIONS = 'Versions' + CURRENT = 'Current' + + # Ensure the Foo.framework/Versions/A/ directory exists and create the + # Foo.framework/Versions/Current symlink to it. + if args.version: + try: + os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0o755) + except OSError as e: + if e.errno != errno.EEXIST: + raise e + _Relink(os.path.join(args.version), + os.path.join(args.framework, VERSIONS, CURRENT)) + + # Establish the top-level symlinks in the framework bundle. The dest of + # the symlinks may not exist yet. + if args.contents: + for item in args.contents: + _Relink(os.path.join(VERSIONS, CURRENT, item), + os.path.join(args.framework, item)) + + # Write out a stamp file. + if args.stamp: + with open(args.stamp, 'w') as f: + f.write(str(args)) + + return 0 + + +def _Relink(dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + try: + os.remove(link) + except OSError as e: + if e.errno != errno.ENOENT: + shutil.rmtree(link) + os.symlink(dest, link) + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/config/mac/prepare_framework_version.py b/config/mac/prepare_framework_version.py new file mode 100644 index 000000000000..0e9daeba989d --- /dev/null +++ b/config/mac/prepare_framework_version.py @@ -0,0 +1,42 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import shutil +import sys + +# Ensures that the current version matches the last-produced version, which is +# stored in the version_file. If it does not, then the framework_root_dir is +# obliterated. +# Usage: python prepare_framework_version.py out/obj/version_file \ +# out/Framework.framework \ +# 'A' + +def PrepareFrameworkVersion(version_file, framework_root_dir, version): + # Test what the current framework version is. Stop if it is up-to-date. + try: + with open(version_file, 'r') as f: + current_version = f.read() + if current_version == version: + return + except IOError: + pass + + # The framework version has changed, so clobber the framework. + if os.path.exists(framework_root_dir): + shutil.rmtree(framework_root_dir) + + # Write out the new framework version file, making sure its containing + # directory exists. + dirname = os.path.dirname(version_file) + if not os.path.isdir(dirname): + os.makedirs(dirname, 0o700) + + with open(version_file, 'w+') as f: + f.write(version) + + +if __name__ == '__main__': + PrepareFrameworkVersion(sys.argv[1], sys.argv[2], sys.argv[3]) + sys.exit(0) diff --git a/config/mac/rules.gni b/config/mac/rules.gni new file mode 100644 index 000000000000..f613a049bdfa --- /dev/null +++ b/config/mac/rules.gni @@ -0,0 +1,627 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/apple/apple_info_plist.gni") +import("//build/config/apple/symbols.gni") +import("//build/config/mac/mac_sdk.gni") + +# Generates Info.plist files for Mac apps and frameworks. +# +# Arguments +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# executable_name: +# string, name of the generated target used for the product +# and executable name as specified in the output Info.plist. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +template("mac_info_plist") { + assert(defined(invoker.info_plist) != defined(invoker.info_plist_target), + "Only one of info_plist or info_plist_target may be specified in " + + target_name) + + if (defined(invoker.info_plist)) { + _info_plist = invoker.info_plist + } else { + _info_plist_target_output = get_target_outputs(invoker.info_plist_target) + _info_plist = _info_plist_target_output[0] + } + + apple_info_plist(target_name) { + format = "xml1" + extra_substitutions = [ + "MAC_SDK_BUILD=$mac_sdk_build_version", + "MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version", + "MACOSX_DEPLOYMENT_TARGET=$mac_deployment_target", + "CHROMIUM_MIN_SYSTEM_VERSION=$mac_min_system_version", + "XCODE_BUILD=$xcode_build", + "XCODE_VERSION=$xcode_version", + ] + if (defined(invoker.extra_substitutions)) { + extra_substitutions += invoker.extra_substitutions + } + plist_templates = [ + "//build/config/mac/BuildInfo.plist", + _info_plist, + ] + if (defined(invoker.info_plist_target)) { + deps = [ invoker.info_plist_target ] + } + forward_variables_from(invoker, + [ + "testonly", + "executable_name", + ]) + } +} + +# Template to package a shared library into a Mac framework bundle. +# +# By default, the bundle target this template generates does not link the +# resulting framework into anything that depends on it. If a dependency wants +# a link-time (as well as build-time) dependency on the framework bundle, +# depend against "$target_name+link". If only the build-time dependency is +# required (e.g., for copying into another bundle), then use "$target_name". +# +# Arguments +# +# framework_version: +# string, version of the framework. Typically this is a +# single letter, like "A". +# +# framework_contents: +# list of string, top-level items in the framework. This is +# the list of symlinks to create in the .framework directory that link +# into Versions/Current/. +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# output_name: +# (optional) string, name of the generated framework without the +# .framework suffix. If omitted, defaults to target_name. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +# +# This template provides three targets for the resulting framework bundle. The +# link-time behavior varies depending on which of the two targets below is +# added as a dependency: +# - $target_name only adds a build-time dependency. Targets that depend on +# it will not link against the framework. +# - $target_name+link adds a build-time and link-time dependency. Targets +# that depend on it will link against the framework. +# - $target_name+link_nested adds a build-time and link-time dependency, but +# only on the shared library and not the fully-assembled framework bundle. +# This should only be used for other nested binary components of the +# framework bundle (e.g. Helpers) that themselves depend on the main shared +# library of the framework bundle. +# +# The build-time-only dependency is used for when a target needs to use the +# framework either only for resources, or because the target loads it at run- +# time, via dlopen() or NSBundle. The link-time dependency will cause the +# dependee to have the framework loaded by dyld at launch. +# +# Example of build-time only dependency: +# +# mac_framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# The GoatTeleporter.app will not directly link against +# CoreTeleportation.framework, but it will be included in the bundle's +# Frameworks directory. +# +# Example of link-time dependency: +# +# mac_framework_bundle("CoreTeleportation") { +# sources = [ ... ] +# ldflags = [ +# "-install_name", +# "@executable_path/../Frameworks/$target_name.framework" +# ] +# } +# +# bundle_data("core_teleportation_bundle_data") { +# deps = [ ":CoreTeleportation+link" ] +# sources = [ "$root_out_dir/CoreTeleportation.framework" ] +# outputs = [ "{{bundle_contents_dir}}/Frameworks/{{source_file_part}}" ] +# } +# +# app_bundle("GoatTeleporter") { +# sources = [ ... ] +# deps = [ +# ":core_teleportation_bundle_data", +# ] +# } +# +# Note that the framework is still copied to the app's bundle, but dyld will +# load this library when the app is launched because it uses the "+link" +# target as a dependency. This also requires that the framework set its +# install_name so that dyld can locate it. +# +# See "gn help shared_library" for more information on arguments supported +# by shared library target. +template("mac_framework_bundle") { + assert(defined(invoker.deps) || defined(invoker.public_deps), + "Dependencies must be specified for $target_name") + assert(invoker.framework_version != "", "framework_version is required") + assert(defined(invoker.framework_contents), "framework_contents is required") + + _info_plist_target = target_name + "_info_plist" + + mac_info_plist(_info_plist_target) { + executable_name = target_name + if (defined(invoker.output_name)) { + executable_name = invoker.output_name + } + forward_variables_from(invoker, + [ + "extra_substitutions", + "info_plist", + "info_plist_target", + "testonly", + ]) + } + + _info_plist_bundle_data = _info_plist_target + "_bundle_data" + + bundle_data(_info_plist_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + sources = get_target_outputs(":$_info_plist_target") + outputs = [ "{{bundle_resources_dir}}/Info.plist" ] + public_deps = [ ":$_info_plist_target" ] + } + + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + # Create a file to track the build dependency on the framework_version and + # framework_contents variables. + _framework_toc = [ + "Version=" + invoker.framework_version, + _output_name, + ] + invoker.framework_contents + _framework_contents = [ _output_name ] + invoker.framework_contents + _framework_toc_file = "$target_out_dir/${target_name}.toc" + write_file(_framework_toc_file, _framework_toc) + + # Create local variables for referencing different parts of the bundle. + _framework_target = _target_name + _framework_name = _output_name + ".framework" + _framework_base_dir = "$root_out_dir/$_framework_name" + _framework_root_dir = + _framework_base_dir + "/Versions/${invoker.framework_version}" + + # Clean the entire framework if the framework_version changes. + _version_file = "$target_out_dir/${target_name}_version" + exec_script("//build/config/mac/prepare_framework_version.py", + [ + rebase_path(_version_file), + rebase_path(_framework_base_dir), + invoker.framework_version, + ]) + + # Create the symlinks. + _framework_package_target = target_name + "_package" + action(_framework_package_target) { + script = "//build/config/mac/package_framework.py" + + # The TOC file never needs to be read, since its contents are the values + # of GN variables. It is only used to trigger this rule when the values + # change. + inputs = [ _framework_toc_file ] + + _stamp_file = "$target_out_dir/run_${_framework_package_target}.stamp" + outputs = [ _stamp_file ] + + visibility = [ ":$_framework_target" ] + + args = [ + "--framework", + rebase_path(_framework_base_dir, root_build_dir), + "--stamp", + rebase_path(_stamp_file, root_build_dir), + "--version", + invoker.framework_version, + "--contents", + ] + _framework_contents + + # It is not possible to list _framework_contents as outputs, since + # ninja does not properly stat symbolic links. + # https://github.com/ninja-build/ninja/issues/1186 + } + + _link_shared_library_target = target_name + "_shared_library" + _shared_library_bundle_data = target_name + "_shared_library_bundle_data" + + shared_library(_link_shared_library_target) { + forward_variables_from(invoker, + "*", + [ + "assert_no_deps", + "bundle_deps", + "code_signing_enabled", + "data_deps", + "info_plist", + "info_plist_target", + "output_name", + "visibility", + ]) + visibility = [ + ":$_shared_library_bundle_data", + ":${_framework_target}+link_nested", + ] + output_name = _output_name + output_prefix_override = true + output_extension = "" + output_dir = "$target_out_dir/$_link_shared_library_target" + } + + bundle_data(_shared_library_bundle_data) { + visibility = [ ":$_framework_target" ] + forward_variables_from(invoker, [ "testonly" ]) + sources = [ "$target_out_dir/$_link_shared_library_target/$_output_name" ] + outputs = [ "{{bundle_executable_dir}}/$_output_name" ] + public_deps = [ ":$_link_shared_library_target" ] + } + + _framework_public_config = _target_name + "_public_config" + config(_framework_public_config) { + visibility = [ ":$_framework_target+link" ] + framework_dirs = [ root_out_dir ] + frameworks = [ _framework_name ] + } + + create_bundle(_framework_target) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "public_deps", + "testonly", + ]) + + if (defined(invoker.visibility)) { + visibility = invoker.visibility + visibility += [ ":$_target_name+link" ] + } + + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_info_plist_bundle_data" ] + + if (defined(invoker.bundle_deps)) { + deps += invoker.bundle_deps + } + + if (!defined(public_deps)) { + public_deps = [] + } + public_deps += [ + ":$_framework_package_target", + ":$_shared_library_bundle_data", + ] + + if (enable_dsyms) { + data = [ + "$root_out_dir/$_output_name.dSYM/Contents/Info.plist", + "$root_out_dir/$_output_name.dSYM/Contents/Resources/DWARF/$_output_name", + ] + } + + bundle_root_dir = _framework_base_dir + bundle_contents_dir = _framework_root_dir + bundle_resources_dir = "$bundle_contents_dir/Resources" + bundle_executable_dir = bundle_contents_dir + } + + group(_target_name + "+link") { + forward_variables_from(invoker, + [ + "public_configs", + "testonly", + "visibility", + ]) + public_deps = [ ":$_target_name" ] + if (!defined(public_configs)) { + public_configs = [] + } + public_configs += [ ":$_framework_public_config" ] + } + + group(_target_name + "+link_nested") { + forward_variables_from(invoker, + [ + "public_configs", + "testonly", + "visibility", + ]) + + # Depend only on the shared library. Nested code will be a dependency of + # the create_bundle target, which would be cyclic with depending on the + # framework itself. This is sufficient to link; for loading, a proper + # install_name should be set. + public_deps = [ ":$_link_shared_library_target" ] + } +} + +set_defaults("mac_framework_bundle") { + configs = default_shared_library_configs +} + +# Template to create a Mac executable application bundle. +# +# Arguments +# +# package_type: +# (optional) string, the product package type to create. Options are: +# "app" to create a .app bundle (default) +# "xpc" to create an .xpc service bundle +# +# info_plist: +# (optional) string, path to the Info.plist file that will be used for +# the bundle. +# +# info_plist_target: +# (optional) string, if the info_plist is generated from an action, +# rather than a regular source file, specify the target name in lieu +# of info_plist. The two arguments are mutually exclusive. +# +# output_name: +# (optional) string, name of the generated app without the +# .app suffix. If omitted, defaults to target_name. +# +# extra_configs: +# (optional) list of label, additional configs to apply to the +# executable target. +# +# remove_configs: +# (optional) list of label, default configs to remove from the target. +# +# extra_substitutions: +# (optional) string array, 'key=value' pairs for extra fields which are +# specified in a source Info.plist template. +template("mac_app_bundle") { + _target_name = target_name + _output_name = target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + _package_type = "app" + if (defined(invoker.package_type)) { + _package_type = invoker.package_type + } + + if (_package_type == "app") { + _output_extension = "app" + _product_type = "com.apple.product-type.application" + _write_pkg_info = true + } else if (_package_type == "xpc") { + _output_extension = "xpc" + _product_type = "com.apple.product-type.xpc-service" + _write_pkg_info = false + } else if (_package_type == "bundle") { + _output_extension = "bundle" + _product_type = "com.apple.product-type.bundle" + _write_pkg_info = false + } else { + assert(false, "Unsupported packge_type: " + packge_type) + } + + _executable_target = target_name + "_executable" + _executable_bundle_data = _executable_target + "_bundle_data" + + _info_plist_target = target_name + "_info_plist" + + mac_info_plist(_info_plist_target) { + executable_name = _output_name + forward_variables_from(invoker, + [ + "extra_substitutions", + "info_plist", + "info_plist_target", + "testonly", + ]) + } + + if (_write_pkg_info) { + _pkg_info_target = target_name + "_pkg_info" + + action(_pkg_info_target) { + forward_variables_from(invoker, [ "testonly" ]) + script = "//build/apple/write_pkg_info.py" + inputs = [ "//build/apple/plist_util.py" ] + sources = get_target_outputs(":$_info_plist_target") + outputs = [ "$target_gen_dir/$_pkg_info_target" ] + args = [ "--plist" ] + rebase_path(sources, root_build_dir) + + [ "--output" ] + rebase_path(outputs, root_build_dir) + deps = [ ":$_info_plist_target" ] + } + } + + executable(_executable_target) { + visibility = [ ":$_executable_bundle_data" ] + forward_variables_from(invoker, + "*", + [ + "assert_no_deps", + "data_deps", + "info_plist", + "output_name", + "visibility", + ]) + if (defined(extra_configs)) { + configs += extra_configs + } + if (defined(remove_configs)) { + configs -= remove_configs + } + output_name = _output_name + output_dir = "$target_out_dir/$_executable_target" + } + + bundle_data(_executable_bundle_data) { + visibility = [ ":$_target_name" ] + forward_variables_from(invoker, [ "testonly" ]) + sources = [ "$target_out_dir/$_executable_target/$_output_name" ] + outputs = [ "{{bundle_executable_dir}}/$_output_name" ] + public_deps = [ ":$_executable_target" ] + } + + _info_plist_bundle_data = _info_plist_target + "_bundle_data" + + bundle_data(_info_plist_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + visibility = [ ":$_target_name" ] + sources = get_target_outputs(":$_info_plist_target") + outputs = [ "{{bundle_contents_dir}}/Info.plist" ] + public_deps = [ ":$_info_plist_target" ] + } + + if (_write_pkg_info) { + _pkg_info_bundle_data = _pkg_info_target + "_bundle_data" + + bundle_data(_pkg_info_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + visibility = [ ":$_target_name" ] + sources = get_target_outputs(":$_pkg_info_target") + outputs = [ "{{bundle_contents_dir}}/PkgInfo" ] + public_deps = [ ":$_pkg_info_target" ] + } + } + + create_bundle(_target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "public_deps", + "testonly", + ]) + if (!defined(deps)) { + deps = [] + } + deps += [ + ":$_executable_bundle_data", + ":$_info_plist_bundle_data", + ] + if (_write_pkg_info) { + deps += [ ":$_pkg_info_bundle_data" ] + } + + if (enable_dsyms) { + data = [ + "$root_out_dir/$_output_name.dSYM/Contents/Info.plist", + "$root_out_dir/$_output_name.dSYM/Contents/Resources/DWARF/$_output_name", + ] + } + + product_type = _product_type + bundle_root_dir = "$root_out_dir/${_output_name}.${_output_extension}" + bundle_contents_dir = "$bundle_root_dir/Contents" + bundle_resources_dir = "$bundle_contents_dir/Resources" + bundle_executable_dir = "$bundle_contents_dir/MacOS" + } +} + +# Template to package a loadable_module into a .plugin bundle. +# +# This takes no extra arguments that differ from a loadable_module. +template("mac_plugin_bundle") { + assert(defined(invoker.deps), + "Dependencies must be specified for $target_name") + + _target_name = target_name + _loadable_module_target = _target_name + "_loadable_module" + _loadable_module_bundle_data = _loadable_module_target + "_bundle_data" + + _output_name = _target_name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + + loadable_module(_loadable_module_target) { + visibility = [ ":$_loadable_module_bundle_data" ] + forward_variables_from(invoker, + "*", + [ + "assert_no_deps", + "data_deps", + "output_name", + "visibility", + ]) + output_dir = "$target_out_dir" + output_name = _output_name + } + + bundle_data(_loadable_module_bundle_data) { + forward_variables_from(invoker, [ "testonly" ]) + visibility = [ ":$_target_name" ] + sources = [ "$target_out_dir/$_output_name.so" ] + outputs = [ "{{bundle_executable_dir}}/$_output_name" ] + public_deps = [ ":$_loadable_module_target" ] + } + + create_bundle(_target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "public_deps", + "testonly", + "visibility", + ]) + if (!defined(deps)) { + deps = [] + } + deps += [ ":$_loadable_module_bundle_data" ] + + if (enable_dsyms) { + data = [ + "$root_out_dir/$_output_name.so.dSYM/Contents/Info.plist", + "$root_out_dir/$_output_name.so.dSYM/Contents/Resources/DWARF/$_output_name.so", + ] + } + + bundle_root_dir = "$root_out_dir/$_output_name.plugin" + bundle_contents_dir = "$bundle_root_dir/Contents" + bundle_executable_dir = "$bundle_contents_dir/MacOS" + } +} diff --git a/config/mips.gni b/config/mips.gni new file mode 100644 index 000000000000..986ffcb45c49 --- /dev/null +++ b/config/mips.gni @@ -0,0 +1,67 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/v8_target_cpu.gni") + +# These are primarily relevant in current_cpu == "mips*" contexts, where +# MIPS code is being compiled. But they can also be relevant in the +# other contexts when the code will change its behavior based on the +# cpu it wants to generate code for. +declare_args() { + # MIPS MultiMedia Instruction compilation flag. + mips_use_mmi = false +} + +if (current_cpu == "mipsel" || v8_current_cpu == "mipsel" || + current_cpu == "mips" || v8_current_cpu == "mips") { + declare_args() { + # MIPS arch variant. Possible values are: + # "r1" + # "r2" + # "r6" + # "loongson3" + mips_arch_variant = "r1" + + # MIPS DSP ASE revision. Possible values are: + # 0: unavailable + # 1: revision 1 + # 2: revision 2 + mips_dsp_rev = 0 + + # MIPS SIMD Arch compilation flag. + mips_use_msa = false + + # MIPS floating-point ABI. Possible values are: + # "hard": sets the GCC -mhard-float option. + # "soft": sets the GCC -msoft-float option. + mips_float_abi = "hard" + + # MIPS32 floating-point register width. Possible values are: + # "fp32": sets the GCC -mfp32 option. + # "fp64": sets the GCC -mfp64 option. + # "fpxx": sets the GCC -mfpxx option. + mips_fpu_mode = "fp32" + } +} else if (current_cpu == "mips64el" || v8_current_cpu == "mips64el" || + current_cpu == "mips64" || v8_current_cpu == "mips64") { + # MIPS arch variant. Possible values are: + # "r2" + # "r6" + # "loongson3" + if (current_os == "android" || target_os == "android") { + declare_args() { + mips_arch_variant = "r6" + + # MIPS SIMD Arch compilation flag. + mips_use_msa = true + } + } else { + declare_args() { + mips_arch_variant = "r2" + + # MIPS SIMD Arch compilation flag. + mips_use_msa = false + } + } +} diff --git a/config/nacl/BUILD.gn b/config/nacl/BUILD.gn new file mode 100644 index 000000000000..d60994291e4e --- /dev/null +++ b/config/nacl/BUILD.gn @@ -0,0 +1,132 @@ +# Copyright (c) 2014 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/nacl/config.gni") + +# Native Client Definitions +config("nacl_defines") { + if (is_linux || is_chromeos || is_android || is_nacl) { + defines = [ + "_POSIX_C_SOURCE=199506", + "_XOPEN_SOURCE=600", + "_GNU_SOURCE=1", + "__STDC_LIMIT_MACROS=1", + ] + } else if (is_win) { + defines = [ "__STDC_LIMIT_MACROS=1" ] + } + + if (current_cpu == "pnacl") { + # TODO: Remove the following definition once NACL_BUILD_ARCH and + # NACL_BUILD_SUBARCH are defined by the PNaCl toolchain. + defines += [ "NACL_BUILD_ARCH=pnacl" ] + } +} + +config("nexe_defines") { + defines = [ + "DYNAMIC_ANNOTATIONS_ENABLED=1", + "DYNAMIC_ANNOTATIONS_PREFIX=NACL_", + ] +} + +config("nacl_warnings") { + if (is_win) { + # Some NaCl code uses forward declarations of static const variables, + # with initialized definitions later on. (The alternative would be + # many, many more forward declarations of everything used in that + # const variable's initializer before the definition.) The Windows + # compiler is too stupid to notice that there is an initializer later + # in the file, and warns about the forward declaration. + cflags = [ "/wd4132" ] + } +} + +config("nacl_static_libstdc++") { + # The sysroot of linux x86 bots can have a different version of libstdc++ + # than the one that is on the bots natively. Linking dynamically against + # libstdc++ can then lead to linking against symbols that are not found when + # running the executable. + # Therefore, link statically instead. + if (is_linux && current_cpu == "x86") { + ldflags = [ "-static-libstdc++" ] + } +} + +# The base target that all targets in the NaCl build should depend on. +# This allows configs to be modified for everything in the NaCl build, even when +# the NaCl build is composed into the Chrome build. (GN has no functionality to +# add flags to everything in //native_client, having a base target works around +# that limitation.) +source_set("nacl_base") { + public_configs = [ + ":nacl_defines", + ":nacl_warnings", + ":nacl_static_libstdc++", + ] + if (current_os == "nacl") { + public_configs += [ ":nexe_defines" ] + } +} + +config("compiler") { + configs = [] + cflags = [] + ldflags = [] + libs = [] + + if (is_clang && current_cpu != "pnacl") { + # -no-integrated-as is the default in nacl-clang for historical + # compatibility with inline assembly code and so forth. But there + # are no such cases in Chromium code, and -integrated-as is nicer in + # general. Moreover, the IRT must be built using LLVM's assembler + # on x86-64 to preserve sandbox base address hiding. Use it + # everywhere for consistency (and possibly quicker builds). + cflags += [ "-integrated-as" ] + } + + asmflags = cflags +} + +config("compiler_codegen") { + cflags = [] + + if (is_nacl_irt) { + cflags += [ + # A debugger should be able to unwind IRT call frames. This is + # the default behavior on x86-64 and when compiling C++ with + # exceptions enabled; the change is for the benefit of x86-32 C. + # The frame pointer is unnecessary when unwind tables are used. + "-fasynchronous-unwind-tables", + "-fomit-frame-pointer", + ] + + if (current_cpu == "x86") { + # The x86-32 IRT needs to be callable with an under-aligned + # stack; so we disable SSE instructions, which can fault on + # misaligned addresses. See + # https://code.google.com/p/nativeclient/issues/detail?id=3935 + cflags += [ + "-mstackrealign", + "-mno-sse", + ] + } + } + + asmflags = cflags +} + +config("irt_optimize") { + cflags = [ + # Optimize for space, keep the IRT nexe small. + "-Os", + + # These are omitted from non-IRT libraries to keep the libraries + # themselves small. + "-ffunction-sections", + "-fdata-sections", + ] + + ldflags = [ "-Wl,--gc-sections" ] +} diff --git a/config/nacl/config.gni b/config/nacl/config.gni new file mode 100644 index 000000000000..c8062b41a818 --- /dev/null +++ b/config/nacl/config.gni @@ -0,0 +1,53 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Native Client supports multiple toolchains: + # - nacl_glibc, based on gcc and glibc. + # - pnacl_newlib, based on llvm 3.7 and newlib (default). + # - saigo_newlib, based on llvm 12+ and newlib. + + # True if nacl_glibc is used. + is_nacl_glibc = false + + # True if saigo_newlib is used. + is_nacl_saigo = false +} + +nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86" + +if (is_nacl_glibc) { + if (current_cpu == "x86" || current_cpu == "x64") { + nacl_toolchain_package = "nacl_x86_glibc" + } else if (current_cpu == "arm") { + nacl_toolchain_package = "nacl_arm_glibc" + } +} else { + nacl_toolchain_package = "pnacl_newlib" +} + +if (current_cpu == "pnacl") { + _nacl_tuple = "pnacl" +} else if (current_cpu == "x86" || current_cpu == "x64") { + _nacl_tuple = "x86_64-nacl" +} else if (current_cpu == "arm") { + _nacl_tuple = "arm-nacl" +} else if (current_cpu == "mipsel") { + _nacl_tuple = "mipsel-nacl" +} else { + # In order to allow this file to be included unconditionally + # from build files that can't depend on //components/nacl/features.gni + # we provide a dummy value that should be harmless if nacl isn't needed. + # If nacl *is* needed this will result in a real error, indicating that + # people need to set the toolchain path correctly. + _nacl_tuple = "unknown" +} + +nacl_toolchain_bindir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/bin" +nacl_toolchain_tooldir = + "${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}" +nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-" + +nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + current_cpu +is_nacl_irt = current_toolchain == nacl_irt_toolchain diff --git a/config/nacl/host_toolchain.gni b/config/nacl/host_toolchain.gni new file mode 100644 index 000000000000..09b93b14a135 --- /dev/null +++ b/config/nacl/host_toolchain.gni @@ -0,0 +1,18 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# The compiler for the trusted nacl_helper_bootstrap binary. +nacl_bootstrap_compiler = "g++" +if (default_toolchain == "//build/toolchain/cros:target") { + import("//build/toolchain/cros_toolchain.gni") + if (target_cpu == "arm64" && current_cpu == "arm") { + nacl_bootstrap_compiler = cros_nacl_helper_arm32_cxx + } else { + nacl_bootstrap_compiler = cros_target_cxx + } +} else if (current_cpu == "arm" && !is_android) { + nacl_bootstrap_compiler = "arm-linux-gnueabihf-g++" +} else if (current_cpu == "mipsel" && !is_android) { + nacl_bootstrap_compiler = "mipsel-linux-gnu-g++" +} diff --git a/config/nacl/rules.gni b/config/nacl/rules.gni new file mode 100644 index 000000000000..a15d32622c02 --- /dev/null +++ b/config/nacl/rules.gni @@ -0,0 +1,130 @@ +# Copyright 2015 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/nacl/config.gni") + +# Generate a nmf file +# +# Native Client Manifest (nmf) is a JSON file that tells the browser where to +# download and load Native Client application files and libraries. +# +# Variables: +# executables: .nexe/.pexe/.bc executables to generate nmf for +# lib_prefix: path to prepend to shared libraries in the nmf +# nmf: the name and the path of the output file +# nmfflags: additional flags for the nmf generator +# stage_dependencies: directory for staging libraries +template("generate_nmf") { + assert(defined(invoker.executables), "Must define executables") + assert(defined(invoker.nmf), "Must define nmf") + + action(target_name) { + forward_variables_from(invoker, + [ + "deps", + "data_deps", + "executables", + "lib_prefix", + "nmf", + "nmfflags", + "public_deps", + "stage_dependencies", + "testonly", + "visibility", + ]) + if (!defined(nmfflags)) { + nmfflags = [] + } + + # TODO(phosek): Remove this conditional once + # https://bugs.chromium.org/p/nativeclient/issues/detail?id=4339 is + # resolved. + if (current_cpu == "pnacl") { + objdump = rebase_path("${nacl_toolchain_bindir}/x86_64-nacl-objdump") + } else { + objdump = rebase_path("${nacl_toolprefix}objdump") + } + if (host_os == "win") { + objdump += ".exe" + } + + script = "//native_client_sdk/src/tools/create_nmf.py" + inputs = [ objdump ] + sources = executables + outputs = [ nmf ] + if (is_nacl_glibc) { + if (defined(stage_dependencies)) { + nmfflags += [ "--stage-dependencies=" + + rebase_path(stage_dependencies, root_build_dir) ] + lib_path = stage_dependencies + } else { + lib_path = root_build_dir + } + if (defined(lib_prefix)) { + nmfflags += [ "--lib-prefix=" + lib_prefix ] + lib_path += "/${lib_prefix}" + } + + # Starts empty so the code below can use += everywhere. + data = [] + + nmfflags += + [ "--library-path=" + rebase_path(root_out_dir, root_build_dir) ] + + # NOTE: There is no explicit dependency for the lib directory + # (lib32 and lib64 for x86/x64) created in the product directory. + # They are created as a side-effect of nmf creation. + if (current_cpu != "x86" && current_cpu != "x64") { + nmfflags += + [ "--library-path=" + + rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir) ] + if (current_cpu == "arm") { + data += [ "${lib_path}/libarm/" ] + } else { + data += [ "${lib_path}/lib/" ] + } + } else { + # For x86-32, the lib/ directory is called lib32/ instead. + if (current_cpu == "x86") { + nmfflags += + [ "--library-path=" + + rebase_path("${nacl_toolchain_tooldir}/lib32", root_build_dir) ] + data += [ "${lib_path}/lib32/" ] + } + + # x86-32 Windows needs to build both x86-32 and x86-64 NaCl + # binaries into the same nmf covering both architectures. That + # gets handled at a higher level (see the nacl_test_data template), + # so a single generate_nmf invocation gets both x86-32 and x86-64 + # nexes listed in executables. + if (current_cpu == "x64" || target_os == "win") { + # For x86-64, the lib/ directory is called lib64/ instead + # when copied by create_nmf.py. + glibc_tc = "//build/toolchain/nacl:glibc" + assert(current_toolchain == "${glibc_tc}_${current_cpu}") + if (current_cpu == "x64") { + x64_out_dir = root_out_dir + } else { + x64_out_dir = get_label_info(":${target_name}(${glibc_tc}_x64)", + "root_out_dir") + } + nmfflags += [ + "--library-path=" + rebase_path(x64_out_dir, root_build_dir), + "--library-path=" + + rebase_path("${nacl_toolchain_tooldir}/lib", root_build_dir), + ] + data += [ "${lib_path}/lib64/" ] + } + } + } + args = [ + "--no-default-libpath", + "--objdump=" + rebase_path(objdump, root_build_dir), + "--output=" + rebase_path(nmf, root_build_dir), + ] + nmfflags + rebase_path(sources, root_build_dir) + if (is_nacl_glibc && current_cpu == "arm") { + deps += [ "//native_client/src/untrusted/elf_loader:elf_loader" ] + } + } +} diff --git a/config/ozone.gni b/config/ozone.gni new file mode 100644 index 000000000000..8bb512ad634b --- /dev/null +++ b/config/ozone.gni @@ -0,0 +1,132 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chromecast_build.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/toolchain/toolchain.gni") + +declare_args() { + # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux + # that does not require X11. + use_ozone = is_chromeos || is_fuchsia || is_linux +} + +declare_args() { + # Ozone extra platforms file path. Can be overridden to build out of + # tree ozone platforms. + ozone_extra_path = "//build/config/ozone_extra.gni" + + # Select platforms automatically. Turn this off for manual control. + ozone_auto_platforms = use_ozone + + # TODO(petermcneeley): Backwards compatiblity support for VM images. + # Remove when deprecated. (https://crbug.com/1122009) + ozone_platform_gbm = -1 + + # Enable explicit apitrace (https://apitrace.github.io) loading. + # This requires apitrace library with additional bindings. + # See ChromeOS package for details: + # https://chromium-review.googlesource.com/c/chromiumos/overlays/chromiumos-overlay/+/2659419 + # Chrome will not start without an apitrace.so library. + # Trace will be saved to /tmp/gltrace.dat file by default. You can + # override it at run time with TRACE_FILE= environment variable. + enable_opengl_apitrace = false +} + +declare_args() { + # The platform that will used at runtime by default. This can be overridden + # with the command line flag --ozone-platform=. + ozone_platform = "" + + # Compile the 'cast' platform. + ozone_platform_cast = false + + # Compile the 'drm' platform. + ozone_platform_drm = false + + # Compile the 'headless' platform. + ozone_platform_headless = false + + # Compile the 'scenic' platform. + ozone_platform_scenic = false + + # Compile the 'flatland' platform. + ozone_platform_flatland = false + + # Compile the 'x11' platform. + ozone_platform_x11 = false + + # Compile the 'wayland' platform. + ozone_platform_wayland = false + + if (ozone_auto_platforms) { + # Use headless as the default platform unless modified below. + ozone_platform = "headless" + ozone_platform_headless = true + + if (is_cast_audio_only) { + # Just use headless for audio-only Cast platforms. + } else if (is_castos) { + # Enable the Cast ozone platform on all video CastOS builds. + ozone_platform_cast = true + + # For visual desktop Chromecast builds, override the default "headless" + # platform with --ozone-platform=x11. + # NOTE: The CQ is one such case. + if (target_os == "linux" && + (target_cpu == "x86" || target_cpu == "x64")) { + ozone_platform_x11 = true + } else { + ozone_platform = "cast" + } + } else if (is_chromeos_ash) { + ozone_platform = "x11" + ozone_platform_drm = true + ozone_platform_x11 = true + } else if (is_chromeos_lacros) { + ozone_platform = "wayland" + ozone_platform_wayland = true + } else if (is_linux) { + ozone_platform = "x11" + ozone_platform_wayland = true + ozone_platform_x11 = true + } else if (is_fuchsia) { + ozone_platform = "flatland" + ozone_platform_scenic = true + ozone_platform_flatland = true + } + } + + # TODO(petermcneeley): Backwards compatiblity support for VM images. + # Remove when deprecated. (https://crbug.com/1122009) + if (ozone_platform_gbm != -1) { + ozone_platform_drm = ozone_platform_gbm + } +} + +import(ozone_extra_path) + +_ozone_extra_directory = get_path_info(ozone_extra_path, "dir") + +# Extra paths to add to targets visibility list. +ozone_external_platform_visibility = [ "$_ozone_extra_directory/*" ] + +if (is_a_target_toolchain) { + assert(use_ozone || !(ozone_platform_cast || ozone_platform_drm || + ozone_platform_flatland || + ozone_platform_headless || ozone_platform_x11 || + ozone_platform_wayland || ozone_platform_scenic), + "Must set use_ozone to select ozone platforms") +} + +# TODO(petermcneeley): Backwards compatiblity support for VM images. +# Remove when deprecated. (https://crbug.com/1122009) + +assert(ozone_platform_gbm == -1 || ozone_platform_drm == ozone_platform_gbm) + +ozone_platform_gbm = ozone_platform_drm + +if (ozone_platform == "gbm") { + ozone_platform = "drm" +} diff --git a/config/ozone_extra.gni b/config/ozone_extra.gni new file mode 100644 index 000000000000..cbbca1efe223 --- /dev/null +++ b/config/ozone_extra.gni @@ -0,0 +1,33 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This list contains the name of external platforms that are passed to the +# --ozone-platform command line argument or used for the ozone_platform build +# config. For example ozone_external_platforms = [ "foo1", "foo2", ... ] +ozone_external_platforms = [] + +# This list contains dependencies for external platforms. Typically, the Ozone +# implementations are placed into ui/ozone/platform/ and so this will look +# something like: +# ozone_external_platform_deps = [ "platform/foo1", "platform/foo_2", ... ] +ozone_external_platform_deps = [] + +# If a platform has unit tests, the corresponding source_set can be listed here +# so that they get included into ozone_unittests. +# ozone_external_platform_test_deps = [ "platform/foo1:foo1_unitests", ... ] +ozone_external_platform_test_deps = [] + +# If a platform has integration tests, the corresponding source_set can be +# listed here so that they get included into ozone_integration_tests. +ozone_external_platform_integration_test_deps = [] + +# If a platform has test support files for ui, the corresponding source_set can +# be listed here so that they get included into ui_test_support. +# ozone_external_platform_ui_test_support_deps = [ "platform/foo1:ui_test_support", ... ] +ozone_external_platform_ui_test_support_deps = [] + +# If a platform has a test support for interactive_ui_tests, the corresponding +# source_set can be listed here so that they can included into +# interactive_ui_tests. +ozone_external_interactive_ui_tests_deps = [] diff --git a/config/pch.gni b/config/pch.gni new file mode 100644 index 000000000000..bc4e9e6d150b --- /dev/null +++ b/config/pch.gni @@ -0,0 +1,15 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") + +declare_args() { + # Precompiled header file support is by default available, + # but for distributed build system uses (like goma or rbe) or when + # doing official builds. + # On Linux it slows down the build, so don't enable it by default. + enable_precompiled_headers = + !is_official_build && !(use_goma || use_remoteexec) && !is_linux +} diff --git a/config/posix/BUILD.gn b/config/posix/BUILD.gn new file mode 100644 index 000000000000..8312d7ecf62c --- /dev/null +++ b/config/posix/BUILD.gn @@ -0,0 +1,59 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sysroot.gni") + +# This build configuration is used by both Fuchsia and POSIX systems. +assert(is_posix || is_fuchsia) + +group("posix") { + visibility = [ "//:optimize_gn_gen" ] +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Posix-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + asmflags = [] + cflags = [] + cflags_c = [] + cflags_cc = [] + cflags_objc = [] + cflags_objcc = [] + defines = [] + ldflags = [] + + if (!is_apple && sysroot != "" && current_os != "zos") { + # Pass the sysroot to all C compiler variants, the assembler, and linker. + sysroot_flags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ] + if (is_linux || is_chromeos) { + # This is here so that all files get recompiled after a sysroot roll and + # when turning the sysroot on or off. (defines are passed via the command + # line, and build system rebuilds things when their commandline + # changes). Nothing should ever read this define. + sysroot_key = + exec_script("//build/linux/sysroot_scripts/install-sysroot.py", + [ "--print-key=$current_cpu" ], + "trim string", + [ "//build/linux/sysroot_scripts/sysroots.json" ]) + defines += [ "CR_SYSROOT_KEY=$sysroot_key" ] + } + asmflags += sysroot_flags + ldflags += sysroot_flags + + # When use_custom_libcxx=true, some -isystem flags get passed to + # cflags_cc to set up libc++ include paths. We want to make sure + # the sysroot includes take lower precendence than the libc++ + # ones, so they must appear later in the command line. However, + # the gn reference states "These variant-specific versions of + # cflags* will be appended on the compiler command line after + # 'cflags'." Because of this, we must set the sysroot flags for + # all cflags variants instead of using 'cflags' directly. + cflags_c += sysroot_flags + cflags_cc += sysroot_flags + cflags_objc += sysroot_flags + cflags_objcc += sysroot_flags + } +} diff --git a/config/profiling/OWNERS b/config/profiling/OWNERS new file mode 100644 index 000000000000..ea1bcbbb28cc --- /dev/null +++ b/config/profiling/OWNERS @@ -0,0 +1 @@ +pasthana@google.com \ No newline at end of file diff --git a/config/profiling/profiling.gni b/config/profiling/profiling.gni new file mode 100644 index 000000000000..d30ef7456990 --- /dev/null +++ b/config/profiling/profiling.gni @@ -0,0 +1,14 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/compiler/pgo/pgo.gni") +import("//build/config/coverage/coverage.gni") + +declare_args() { + use_clang_profiling = + is_a_target_toolchain && (use_clang_coverage || chrome_pgo_phase == 1) +} + +assert(!use_clang_profiling || is_clang, + "Clang Source-based profiling requires clang.") diff --git a/config/python.gni b/config/python.gni new file mode 100644 index 000000000000..f3431fcc0dc5 --- /dev/null +++ b/config/python.gni @@ -0,0 +1,161 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Creates a group() that lists Python sources as |data|. +# Having such targets serves two purposes: +# 1) Causes files to be included in runtime_deps, so that they are uploaded to +# swarming when running tests remotely. +# 2) Causes "gn analyze" to know about all Python inputs so that tests will be +# re-run when relevant Python files change. +# +# All non-trivial Python scripts should use a "pydeps" file to track their +# sources. To create a .pydep file for a target in //example: +# +# build/print_python_deps.py \ +# --root example \ +# --output example/$target_name.pydeps \ +# path/to/your/script.py +# +# Keep the .pydep file up-to-date by adding to //PRESUBMIT.py under one of: +# _ANDROID_SPECIFIC_PYDEPS_FILES, _GENERIC_PYDEPS_FILES +# +# Variables +# pydeps_file: Path to .pydeps file to read sources from (optional). +# data: Additional files to include in data. E.g. non-.py files needed by the +# library, or .py files that are conditionally / lazily imported. +# +# Example +# python_library("my_library_py") { +# pydeps_file = "my_library.pydeps" +# data = [ "foo.dat" ] +# } +template("python_library") { + group(target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "testonly", + "visibility", + ]) + + if (defined(invoker.pydeps_file)) { + # Read and filter out comments. + _pydeps_lines = read_file(invoker.pydeps_file, "list lines") + _pydeps_entries = filter_exclude(_pydeps_lines, [ "#*" ]) + + # Dependencies are listed relative to the pydeps file directory, but data + # parameter expects paths that are relative to the current BUILD.gn + _script_dir = get_path_info(invoker.pydeps_file, "dir") + _rebased_pydeps_entries = rebase_path(_pydeps_entries, ".", _script_dir) + + # Even though the .pydep file is not used at runtime, it must be added + # so that "gn analyze" will mark the target as changed when .py files + # are removed but none are added or modified. + data = _rebased_pydeps_entries + [ invoker.pydeps_file ] + } else { + data = [] + } + if (defined(invoker.data)) { + data += invoker.data + } + } +} + +# A template used for actions that execute a Python script, which has an +# associated .pydeps file. In other words: +# +# - This is very similar to just an action(), except that |script| must point +# to a Python script (e.g. "//build/.../foo.py") that has a corresponding +# .pydeps file in the source tree (e.g. "//build/.../foo.pydeps"). +# +# - The .pydeps file contains a list of python dependencies (imports really) +# and is generated _manually_ by using a command like: +# +# build/print_python_deps.py --inplace build/android/gyp/foo.py +# +# Example +# action_with_pydeps("create_foo") { +# script = "myscript.py" +# args = [...] +# } +template("action_with_pydeps") { + action(target_name) { + # Ensure that testonly and visibility are forwarded + # explicitly, since this performs recursive scope lookups, which is + # required to ensure their definition from scopes above the caller are + # properly handled. All other variables are forwarded with "*", which + # doesn't perform recursive lookups at all. See https://crbug.com/862232 + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + forward_variables_from(invoker, + "*", + [ + "testonly", + "visibility", + ]) + + # Read and filter out comments. + # Happens every time the template is instantiated, but benchmarking shows no + # perceivable impact on overall 'gn gen' speed. + _pydeps_file = invoker.script + "deps" + + _pydeps_lines = + read_file(_pydeps_file, "list lines") # https://crbug.com/1102058 + _pydeps_entries = filter_exclude(_pydeps_lines, [ "#*" ]) + + if (!defined(inputs)) { + inputs = [] + } + + # Dependencies are listed relative to the script directory, but inputs + # expects paths that are relative to the current BUILD.gn + _script_dir = get_path_info(_pydeps_file, "dir") + inputs += rebase_path(_pydeps_entries, ".", _script_dir) + } +} + +template("action_foreach_with_pydeps") { + action_foreach(target_name) { + # Ensure that testonly and visibility are forwarded + # explicitly, since this performs recursive scope lookups, which is + # required to ensure their definition from scopes above the caller are + # properly handled. All other variables are forwarded with "*", which + # doesn't perform recursive lookups at all. See https://crbug.com/862232 + forward_variables_from(invoker, + [ + "testonly", + "visibility", + ]) + forward_variables_from(invoker, + "*", + [ + "testonly", + "visibility", + ]) + + # Read and filter out comments. + # Happens every time the template is instantiated, but benchmarking shows no + # perceivable impact on overall 'gn gen' speed. + if (defined(invoker.deps_file)) { + _pydeps_file = invoker.deps_file + } else { + _pydeps_file = invoker.script + "deps" + } + _pydeps_lines = read_file(_pydeps_file, "list lines") + _pydeps_entries = filter_exclude(_pydeps_lines, [ "#*" ]) + + if (!defined(inputs)) { + inputs = [] + } + + # Dependencies are listed relative to the script directory, but inputs + # expects paths that are relative to the current BUILD.gn + _script_dir = get_path_info(script, "dir") + inputs += rebase_path(_pydeps_entries, ".", _script_dir) + } +} diff --git a/config/riscv.gni b/config/riscv.gni new file mode 100644 index 000000000000..b9597a0a9d66 --- /dev/null +++ b/config/riscv.gni @@ -0,0 +1,19 @@ +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/v8_target_cpu.gni") + +if (current_cpu == "riscv64" || v8_current_cpu == "riscv64") { + declare_args() { + # RISCV Vector extension compilation flag. + riscv_use_rvv = false + + # RISCV Vector extension VELEN. Possible values are: + # 128 + # 256 + # 512 + # 1024 + riscv_rvv_vlen = 128 + } +} diff --git a/config/rts.gni b/config/rts.gni new file mode 100644 index 000000000000..677b3fa2fe46 --- /dev/null +++ b/config/rts.gni @@ -0,0 +1,5 @@ +declare_args() { + # For more info about RTS, please see + # //docs/testing/regression-test-selection.md + use_rts = false +} diff --git a/config/rust.gni b/config/rust.gni new file mode 100644 index 000000000000..b05e37015287 --- /dev/null +++ b/config/rust.gni @@ -0,0 +1,310 @@ +# Copyright 2021 The Chromium Project. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +if (is_android) { + import("//build/config/android/config.gni") +} + +declare_args() { + # Whether to allow Rust code to be part of the Chromium *build process*. + # This can be used to create Rust test binaries, even if the flag below + # is false. + # This only applies to Chromium itself, so the build_with_chromium check + # should not be removed. + # TODO(crbug.com/1386212): Mac + # TODO(crbug.com/1271215): Windows + # TODO(crbug.com/1426472): use_clang_coverage + # TODO(crbug.com/1427362): using_sanitizer + # TODO(crbug.com/1427364): target_cpu != "x86" + # There is no specific bug for !is_official_build or other platforms, since + # this is just a matter of rolling things out slowly and carefully and there + # may be no actual bugs there. + enable_rust = (is_linux || is_android) && !is_official_build && + !using_sanitizer && target_cpu != "x86" && + !use_clang_coverage && is_clang && build_with_chromium + + # As we incrementally enable Rust on mainstream builders, we want to enable + # the toolchain (by switching 'enable_rust' to true) while still disabling + # almost all Rust features). Yet we still want to have some builders with + # all Rust features enabled. + enable_all_rust_features = false + + # Use the Rust toolchain built in-tree. See //tools/rust. + use_chromium_rust_toolchain = true + + # Build libstd locally with GN and use that instead of the prebuilts, where + # applicable. If this is false the prebuilt libstd will always be used. If + # true, the local build is only used with the Chromium Rust toolchain and only + # on supported platforms and GN targets. + enable_local_libstd = true + + # Chromium currently has a Rust toolchain for Android and Linux, but + # if you wish to experiment on more platforms you can use this + # argument to specify an alternative toolchain. + # This should be an absolute path to a directory + # containing a 'bin' directory and others. Commonly + # /.rustup/toolchains/nightly-- + rust_sysroot_absolute = "" + + # If you're using an external Rust toolchain, set this to a Rust + # the output of rustc -V. + rustc_version = "" + + # If you're using a Rust toolchain as specified by rust_sysroot_absolute, + # you can specify whether it supports nacl here. + rust_toolchain_supports_nacl = false + + # Any extra std rlibs in your Rust toolchain, relative to the standard + # Rust toolchain. Typically used with 'rust_sysroot_absolute' + added_rust_stdlib_libs = [] + + # Any removed std rlibs in your Rust toolchain, relative to the standard + # Rust toolchain. Typically used with 'rust_sysroot_absolute' + removed_rust_stdlib_libs = [] + + # Non-rlib libs provided in the toolchain sysroot. Usually this is empty, but + # e.g. the Android Rust Toolchain provides a libunwind.a that rustc expects. + extra_sysroot_libs = [] + + # Use goma for Rust builds. Experimental. The only known problem is + # b/193072381, but then again, we don't expect a build speedup before much + # more work is done. + use_goma_rust = false + + # The host toolchain to use when you don't want sanitizers enabled. By default + # it is the regular toolchain, but when that toolchain has sanitizers, then + # this variable is changed to avoid them. + host_toolchain_no_sanitizers = host_toolchain +} + +declare_args() { + # Use a separate declare_args so these variables' defaults can depend on the + # ones above. + + # When true, uses the locally-built std in all Rust targets. + # + # As an internal implementation detail this can be overridden on specific + # targets (e.g. to run build.rs scripts while building std), but this + # generally should not be done. + use_local_std_by_default = enable_local_libstd && use_chromium_rust_toolchain + + # Individual Rust components. + + # Conversions between Rust types and C++ types. + enable_rust_base_conversions = enable_all_rust_features + + # The base::JSONReader implementation. Requires base conversions. + enable_rust_json = enable_all_rust_features + + # Support for chrome://crash-rust to check crash dump collection works. + enable_rust_crash = enable_all_rust_features + + # Support for Rust mojo bindings. + enable_rust_mojo = enable_all_rust_features + + # Support for the 'gnrt' Rust tool. + enable_rust_gnrt = enable_all_rust_features + + # Rust gtest interop + enable_rust_gtest_interop = enable_all_rust_features + + # Enable Boringssl Rust bindings generation + enable_rust_boringssl = enable_all_rust_features +} + +# Platform support for "official" toolchains (Android or Chromium) +android_toolchain_supports_platform = + (!is_nacl && + (is_android && (current_cpu == "arm" || current_cpu == "arm64" || + current_cpu == "x64" || current_cpu == "x86"))) || + (is_linux && current_cpu == "x64") +chromium_toolchain_supports_platform = !is_nacl +custom_toolchain_supports_platform = !is_nacl || rust_toolchain_supports_nacl + +toolchain_has_rust = + enable_rust && + ((use_chromium_rust_toolchain && chromium_toolchain_supports_platform) || + (!use_chromium_rust_toolchain && android_toolchain_supports_platform) || + (rust_sysroot_absolute != "" && custom_toolchain_supports_platform)) + +# The rustc_revision is used to introduce a dependency on the toolchain version +# (so e.g. rust targets are rebuilt, and the standard library is re-copied when +# the toolchain changes). It is left empty for custom toolchains. +rustc_revision = "" +if (toolchain_has_rust) { + if (use_chromium_rust_toolchain) { + update_rust_args = [ "--print-package-version" ] + rustc_revision = exec_script("//tools/rust/update_rust.py", + update_rust_args, + "trim string") + } else if (rust_sysroot_absolute != "") { + rustc_revision = rustc_version + } else { + # Android toolchain version. + rustc_revision = "rustc 1.64.0-dev (Android Rust Toolchain version 9099361)" + } +} + +# TODO(crbug.com/1278030): To build unit tests for Android we need to build +# them as a dylib and put them into an APK. We should reuse all the same logic +# for gtests from the `//testing/test:test` template. +can_build_rust_unit_tests = toolchain_has_rust && !is_android + +# Whether to build chrome://crash/rust support. +build_rust_crash = toolchain_has_rust && enable_rust_crash + +# We want to store rust_sysroot as a source-relative variable for ninja +# portability. In practice if an external toolchain was specified, it might +# be an absolute path, but we'll do our best. +if (enable_rust) { + if (rust_sysroot_absolute != "") { + rust_sysroot = get_path_info(rust_sysroot_absolute, "abspath") + use_unverified_rust_toolchain = true + } else if (use_chromium_rust_toolchain) { + rust_sysroot = "//third_party/rust-toolchain" + use_unverified_rust_toolchain = false + } else { + if (host_os != "linux") { + assert(false, + "Attempt to use Android Rust toolchain on an unsupported platform") + } + + rust_sysroot = "//third_party/android_rust_toolchain/toolchain" + use_unverified_rust_toolchain = false + extra_sysroot_libs += [ "libunwind.a" ] + } +} + +# Figure out the Rust target triple (aka 'rust_abi_target') +# +# This is here rather than in the toolchain files because it's used also by +# //build/rust/std to find the Rust standard library and construct a sysroot for +# rustc invocations. +# +# The list of architectures supported by Rust is here: +# https://doc.rust-lang.org/nightly/rustc/platform-support.html. We map Chromium +# targets to Rust targets comprehensively despite not having official support +# (see '*_toolchain_supports_platform above') to enable experimentation with +# other toolchains. +rust_abi_target = "" +if (is_linux || is_chromeos) { + cpu = current_cpu + if (cpu == "arm64") { + cpu = "aarch64" + } else if (cpu == "x64") { + cpu = "x86_64" + } + rust_abi_target = cpu + "-unknown-linux-gnu" +} else if (is_android) { + import("//build/config/android/abi.gni") + rust_abi_target = android_abi_target + if (rust_abi_target == "arm-linux-androideabi") { + # Android clang target specifications mostly match Rust, but this + # is an exception + rust_abi_target = "armv7-linux-androideabi" + } +} else if (is_fuchsia) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-fuchsia" + } else if (current_cpu == "x64") { + rust_abi_target = "x86_64-fuchsia" + } else { + assert(false, "Architecture not supported") + } +} else if (is_ios) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-apple-ios" + } else if (current_cpu == "arm") { + # There's also an armv7s-apple-ios, which targets a more recent ARMv7 + # generation CPU found in later iPhones. We'll go with the older one for + # maximal compatibility. As we come to support all the different platforms + # with Rust, we might want to be more precise here. + rust_abi_target = "armv7-apple-ios" + } else if (current_cpu == "x64") { + rust_abi_target = "x86_64-apple-ios" + } else if (current_cpu == "x86") { + rust_abi_target = "i386-apple-ios" + } else { + assert(false, "Architecture not supported") + } +} else if (is_mac) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-apple-darwin" + } else if (current_cpu == "x64") { + rust_abi_target = "x86_64-apple-darwin" + } else { + assert(false, "Architecture not supported") + } +} else if (is_win) { + if (current_cpu == "arm64") { + rust_abi_target = "aarch64-pc-windows-msvc" + } else if (current_cpu == "x86" || current_cpu == "x64") { + rust_abi_target = "x86_64-pc-windows-msvc" + } else { + assert(false, "Architecture not supported") + } +} + +assert(!toolchain_has_rust || rust_abi_target != "") + +# This variable is passed to the Rust libstd build. +rust_target_arch = "" +if (current_cpu == "x86") { + rust_target_arch = "x86" +} else if (current_cpu == "x64") { + rust_target_arch = "x86_64" +} else if (current_cpu == "arm") { + rust_target_arch = "arm" +} else if (current_cpu == "arm64") { + rust_target_arch = "aarch64" +} else if (current_cpu == "mipsel") { + rust_target_arch = "mips" +} else if (current_cpu == "mips64el") { + rust_target_arch = "mips64" +} else if (current_cpu == "s390x") { + rust_target_arch = "s390x" +} else if (current_cpu == "ppc64") { + rust_target_arch = "powerpc64" +} else if (current_cpu == "riscv64") { + rust_target_arch = "riscv64" +} + +assert(!toolchain_has_rust || rust_target_arch != "") + +# Must use Chromium Rust toolchain to get precisely matching LLVM versions +# in order to enable LTO. Some say that LTO probably works if LLVM is "close +# enough", but we don't want to take that risk. +assert(!use_thin_lto || !enable_rust || use_chromium_rust_toolchain || + use_unverified_rust_toolchain, + "Must use Chromium Rust toolchain for LTO") + +# Determine whether the local libstd can and should be built. +local_libstd_supported = enable_local_libstd && use_chromium_rust_toolchain + +# Determine whether the prebuilt libstd can be used +prebuilt_libstd_supported = !use_chromium_rust_toolchain || + (target_os == "linux" && target_cpu == "x64") + +# Arguments for Rust invocation. +# This is common between gcc/clang, Mac and Windows toolchains so specify once, +# here. This is not the complete command-line: toolchains should add -o +# and probably --emit arguments too. +rustc_common_args = "--crate-name {{crate_name}} {{source}} --crate-type {{crate_type}} {{rustflags}}" + +# Rust procedural macros are shared objects loaded into a prebuilt host rustc +# binary. To build them, we obviously need to build for the host. Not only that, +# but because the host rustc is prebuilt, it lacks the machinery to be able to +# load shared objects built using sanitizers (ASAN etc.) For that reason, we need +# to use a host toolchain that lacks sanitizers. This is only strictly necessary +# for procedural macros, but we may also choose to build standalone Rust host +# executable tools using the same toolchain, as they're likely to depend upon +# similar dependencies (syn, quote etc.) and it saves a little build time. +if (using_sanitizer || toolchain_disables_sanitizers) { + host_toolchain_no_sanitizers = "${host_toolchain}_no_sanitizers" +} diff --git a/config/sanitizers/BUILD.gn b/config/sanitizers/BUILD.gn new file mode 100644 index 000000000000..c320ed8d9969 --- /dev/null +++ b/config/sanitizers/BUILD.gn @@ -0,0 +1,609 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/clang/clang.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +if (is_ios) { + import("//build/config/ios/ios_sdk.gni") +} + +# Contains the dependencies needed for sanitizers to link into executables and +# shared_libraries. +group("deps") { + if (using_sanitizer) { + public_configs = [ + # Even when a target removes default_sanitizer_flags, it may be depending + # on a library that did not remove default_sanitizer_flags. Thus, we need + # to add the ldflags here as well as in default_sanitizer_flags. + ":default_sanitizer_ldflags", + ] + deps = [] + if (!is_fuchsia) { + if (is_win) { + exe = ".exe" + } else { + exe = "" + } + data = [ + "//tools/valgrind/asan/", + "$clang_base_path/bin/llvm-symbolizer${exe}", + ] + } + if (is_asan || is_lsan || is_msan || is_tsan || is_ubsan || is_ubsan_vptr || + is_ubsan_security) { + public_configs += [ ":sanitizer_options_link_helper" ] + deps += [ ":options_sources" ] + } + if (use_prebuilt_instrumented_libraries || + use_locally_built_instrumented_libraries) { + deps += [ "//third_party/instrumented_libraries:deps" ] + } + } + if (is_asan) { + if (is_win || is_apple) { + data_deps = [ ":copy_asan_runtime" ] + } + if (is_apple) { + public_deps = [ ":asan_runtime_bundle_data" ] + } + } + if (use_centipede) { + # For executables which aren't actual fuzzers, we need stubs for + # the sanitizer coverage symbols, because we'll still be generating + # .o files which depend on them. + deps += [ "//third_party/centipede:centipede_weak_sancov_stubs" ] + } +} + +assert(!(is_win && is_asan && current_cpu == "x86"), + "ASan is only supported in 64-bit builds on Windows.") + +if ((is_apple || is_win) && is_asan) { + if (is_mac || (is_ios && target_environment == "catalyst")) { + _clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib" + } else if (is_ios) { + _clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib" + } else if (is_win && current_cpu == "x64") { + _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll" + } + + _clang_rt_dso_full_path = + "$clang_base_path/lib/clang/$clang_version/lib/$_clang_rt_dso_path" + + if (!is_ios) { + copy("copy_asan_runtime") { + sources = [ _clang_rt_dso_full_path ] + outputs = [ "$root_out_dir/{{source_file_part}}" ] + } + } else { + # On iOS, the runtime library need to be code signed (adhoc signature) + # starting with Xcode 8, so use an action instead of a copy on iOS. + action("copy_asan_runtime") { + script = "//build/config/ios/codesign.py" + sources = [ _clang_rt_dso_full_path ] + outputs = [ "$root_out_dir/" + get_path_info(sources[0], "file") ] + args = [ + "code-sign-file", + "--identity=" + ios_code_signing_identity, + "--output=" + rebase_path(outputs[0], root_build_dir), + rebase_path(sources[0], root_build_dir), + ] + } + } + + if (is_apple) { + bundle_data("asan_runtime_bundle_data") { + sources = get_target_outputs(":copy_asan_runtime") + outputs = [ "{{bundle_executable_dir}}/{{source_file_part}}" ] + public_deps = [ ":copy_asan_runtime" ] + } + } +} + +config("sanitizer_options_link_helper") { + if (is_apple) { + ldflags = [ "-Wl,-u,__sanitizer_options_link_helper" ] + } else if (!is_win) { + ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ] + } +} + +static_library("options_sources") { + # This is a static_library instead of a source_set, as it shouldn't be + # unconditionally linked into targets. + visibility = [ + ":deps", + "//:gn_all", + ] + sources = [ "//build/sanitizers/sanitizer_options.cc" ] + + # Don't compile this target with any sanitizer code. It can be called from + # the sanitizer runtimes, so instrumenting these functions could cause + # recursive calls into the runtime if there is an error. + configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ] + + if (is_asan) { + if (!defined(asan_suppressions_file)) { + asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc" + } + sources += [ asan_suppressions_file ] + } + + if (is_lsan) { + if (!defined(lsan_suppressions_file)) { + lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc" + } + sources += [ lsan_suppressions_file ] + } + + if (is_tsan) { + if (!defined(tsan_suppressions_file)) { + tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc" + } + sources += [ tsan_suppressions_file ] + } +} + +# Applies linker flags necessary when either :deps or :default_sanitizer_flags +# are used. +config("default_sanitizer_ldflags") { + visibility = [ + ":default_sanitizer_flags", + ":deps", + + # https://crbug.com/360158. + "//tools/ipc_fuzzer/fuzzer:ipc_fuzzer", + ] + + if (is_posix || is_fuchsia) { + sanitizers = [] # sanitizers applicable to both clang and rustc + ldflags = [] + rustflags = [] + if (is_asan) { + sanitizers += [ "address" ] + } + if (is_hwasan) { + sanitizers += [ "hwaddress" ] + } + if (is_lsan) { + # In Chromium, is_lsan always implies is_asan. ASAN includes LSAN. + # It seems harmless to pass both options to clang, but it doesn't + # work on rustc, so apply this option to clang only. + ldflags += [ "-fsanitize=leak" ] + } + if (is_tsan) { + sanitizers += [ "thread" ] + } + if (is_msan) { + sanitizers += [ "memory" ] + } + if (is_ubsan || is_ubsan_security) { + ldflags += [ "-fsanitize=undefined" ] + } + if (is_ubsan_vptr) { + ldflags += [ "-fsanitize=vptr" ] + } + foreach(sanitizer, sanitizers) { + ldflags += [ "-fsanitize=$sanitizer" ] + rustflags += [ "-Zsanitizer=$sanitizer" ] + } + + if (use_sanitizer_coverage) { + if (use_libfuzzer) { + ldflags += [ "-fsanitize=fuzzer-no-link" ] + if (is_mac) { + # TODO(crbug.com/926588): on macOS, dead code stripping does not work + # well with `pc-table` instrumentation enabled by `fuzzer-no-link`. + ldflags += [ "-fno-sanitize-coverage=pc-table" ] + } + } else { + ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ] + } + } + + if (is_cfi && current_toolchain == default_toolchain) { + ldflags += [ "-fsanitize=cfi-vcall" ] + if (use_cfi_cast) { + ldflags += [ + "-fsanitize=cfi-derived-cast", + "-fsanitize=cfi-unrelated-cast", + ] + } + if (use_cfi_icall) { + ldflags += [ "-fsanitize=cfi-icall" ] + } + if (use_cfi_diag) { + ldflags += [ "-fno-sanitize-trap=cfi" ] + if (use_cfi_recover) { + ldflags += [ "-fsanitize-recover=cfi" ] + } + } + } + } else if (is_win) { + # Windows directly calls link.exe instead of the compiler driver when + # linking. Hence, pass the runtime libraries instead of -fsanitize=address + # or -fsanitize=fuzzer. + if (is_asan && is_component_build) { + # In the static-library build, ASan libraries are different for + # executables and dlls, see link_executable and link_shared_library below. + # This here handles only the component build. + assert(current_cpu == "x64", "WinASan unsupported architecture") + libs = [ + "clang_rt.asan_dynamic-x86_64.lib", + "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib", + ] + } + if (use_libfuzzer) { + assert(current_cpu == "x64", "LibFuzzer unsupported architecture") + assert(!is_component_build, + "LibFuzzer only supports non-component builds on Windows") + + # Incremental linking causes padding that messes up SanitizerCoverage. + # Don't do it. + ldflags = [ "/INCREMENTAL:NO" ] + } + } +} + +config("common_sanitizer_flags") { + cflags = [] + + if (using_sanitizer) { + assert(is_clang, "sanitizers only supported with clang") + + # Allow non-default toolchains to enable sanitizers in toolchain_args even + # in official builds. + assert(current_toolchain != default_toolchain || !is_official_build, + "sanitizers not supported in official builds") + + cflags += [ + # Column info in debug data confuses Visual Studio's debugger, so don't + # use this by default. However, clusterfuzz needs it for good + # attribution of reports to CLs, so turn it on there. + "-gcolumn-info", + ] + + # Frame pointers are controlled in //build/config/compiler:default_stack_frames + } +} + +config("asan_flags") { + cflags = [] + if (is_asan) { + cflags += [ "-fsanitize=address" ] + if (is_win) { + if (!defined(asan_win_blocklist_path)) { + asan_win_blocklist_path = + rebase_path("//tools/memory/asan/blocklist_win.txt", root_build_dir) + } + cflags += [ "-fsanitize-ignorelist=$asan_win_blocklist_path" ] + } + } +} + +config("link_executable") { + if (is_asan && is_win && !is_component_build) { + assert(current_cpu == "x64", "WinASan unsupported architecture") + ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ] + } +} + +config("link_shared_library") { + if (is_asan && is_win && !is_component_build) { + assert(current_cpu == "x64", "WinASan unsupported architecture") + libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ] + } +} + +config("cfi_flags") { + cflags = [] + if (is_cfi && current_toolchain == default_toolchain) { + if (!defined(cfi_ignorelist_path)) { + cfi_ignorelist_path = + rebase_path("//tools/cfi/ignores.txt", root_build_dir) + } + cflags += [ + "-fsanitize=cfi-vcall", + "-fsanitize-ignorelist=$cfi_ignorelist_path", + ] + + if (use_cfi_cast) { + cflags += [ + "-fsanitize=cfi-derived-cast", + "-fsanitize=cfi-unrelated-cast", + ] + } + + if (use_cfi_icall) { + cflags += [ "-fsanitize=cfi-icall" ] + } + + if (use_cfi_diag) { + cflags += [ "-fno-sanitize-trap=cfi" ] + if (is_win) { + cflags += [ + "/Oy-", + "/Ob0", + ] + } else { + cflags += [ + "-fno-inline-functions", + "-fno-inline", + "-fno-omit-frame-pointer", + "-O1", + ] + } + if (use_cfi_recover) { + cflags += [ "-fsanitize-recover=cfi" ] + } + } + } +} + +# crbug.com/785442: Fix cfi-icall failures for code that casts pointer argument +# types in function pointer type signatures. +config("cfi_icall_generalize_pointers") { + if (is_clang && is_cfi && use_cfi_icall) { + cflags = [ "-fsanitize-cfi-icall-generalize-pointers" ] + } +} + +config("cfi_icall_disable") { + if (is_clang && is_cfi && use_cfi_icall) { + cflags = [ "-fno-sanitize=cfi-icall" ] + } +} + +config("coverage_flags") { + cflags = [] + if (use_sanitizer_coverage) { + # Used by sandboxing code to allow coverage dump to be written on the disk. + defines = [ "SANITIZER_COVERAGE" ] + + if (use_libfuzzer) { + cflags += [ "-fsanitize=fuzzer-no-link" ] + if (is_mac) { + # TODO(crbug.com/926588): on macOS, dead code stripping does not work + # well with `pc-table` instrumentation enabled by `fuzzer-no-link`. + cflags += [ "-fno-sanitize-coverage=pc-table" ] + } + } else { + cflags += [ + "-fsanitize-coverage=$sanitizer_coverage_flags", + "-mllvm", + "-sanitizer-coverage-prune-blocks=1", + ] + if (current_cpu == "arm") { + # http://crbug.com/517105 + cflags += [ + "-mllvm", + "-sanitizer-coverage-block-threshold=0", + ] + } + } + } +} + +config("hwasan_flags") { + if (is_hwasan) { + asmflags = [ "-fsanitize=hwaddress" ] + cflags = [ "-fsanitize=hwaddress" ] + } +} + +config("lsan_flags") { + if (is_lsan) { + cflags = [ "-fsanitize=leak" ] + } +} + +config("msan_flags") { + if (is_msan) { + assert(is_linux || is_chromeos, + "msan only supported on linux x86_64/ChromeOS") + if (!defined(msan_ignorelist_path)) { + msan_ignorelist_path = + rebase_path("//tools/msan/ignorelist.txt", root_build_dir) + } + cflags = [ + "-fsanitize=memory", + "-fsanitize-memory-track-origins=$msan_track_origins", + "-fsanitize-ignorelist=$msan_ignorelist_path", + + # TODO(https://crbug.com/1317909): evaluate and possibly enable + "-fno-sanitize-memory-use-after-dtor", + + # TODO(https://crbug.com/1369167): evaluate and possibly enable + "-fno-sanitize-memory-param-retval", + ] + } +} + +config("tsan_flags") { + if (is_tsan) { + assert(is_linux || is_chromeos, "tsan only supported on linux x86_64") + if (!defined(tsan_ignorelist_path)) { + tsan_ignorelist_path = + rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir) + } + cflags = [ + "-fsanitize=thread", + "-fsanitize-ignorelist=$tsan_ignorelist_path", + ] + } +} + +config("ubsan_flags") { + cflags = [] + if (is_ubsan) { + if (!defined(ubsan_ignorelist_path)) { + ubsan_ignorelist_path = + rebase_path("//tools/ubsan/ignorelist.txt", root_build_dir) + } + cflags += [ + "-fsanitize=alignment", + "-fsanitize=bool", + "-fsanitize=bounds", + "-fsanitize=builtin", + "-fsanitize=float-divide-by-zero", + "-fsanitize=integer-divide-by-zero", + "-fsanitize=null", + "-fsanitize=nonnull-attribute", + "-fsanitize=object-size", + "-fsanitize=return", + "-fsanitize=returns-nonnull-attribute", + "-fsanitize=shift-exponent", + "-fsanitize=signed-integer-overflow", + "-fsanitize=unreachable", + "-fsanitize=vla-bound", + "-fsanitize-ignorelist=$ubsan_ignorelist_path", + ] + + # Chromecast ubsan builds fail to compile with these + # experimental flags, so only add them to non-chromecast ubsan builds. + if (!is_castos && !is_cast_android) { + cflags += [ + # Employ the experimental PBQP register allocator to avoid slow + # compilation on files with too many basic blocks. + # See http://crbug.com/426271. + "-mllvm", + "-regalloc=pbqp", + + # Speculatively use coalescing to slightly improve the code generated + # by PBQP regallocator. May increase compile time. + "-mllvm", + "-pbqp-coalescing", + ] + } + } +} + +config("ubsan_no_recover") { + if (is_ubsan_no_recover) { + cflags = [ "-fno-sanitize-recover=undefined" ] + } +} + +config("ubsan_security_flags") { + if (is_ubsan_security) { + if (!defined(ubsan_security_ignorelist_path)) { + ubsan_security_ignorelist_path = + rebase_path("//tools/ubsan/security_ignorelist.txt", root_build_dir) + } + cflags = [ + "-fsanitize=function", + "-fsanitize=shift", + "-fsanitize=signed-integer-overflow", + "-fsanitize=vla-bound", + "-fsanitize-ignorelist=$ubsan_security_ignorelist_path", + ] + } +} + +config("ubsan_vptr_flags") { + if (is_ubsan_vptr) { + if (!defined(ubsan_vptr_ignorelist_path)) { + ubsan_vptr_ignorelist_path = + rebase_path("//tools/ubsan/vptr_ignorelist.txt", root_build_dir) + } + cflags = [ + "-fsanitize=vptr", + "-fsanitize-ignorelist=$ubsan_vptr_ignorelist_path", + ] + } +} + +config("fuzzing_build_mode") { + if (use_fuzzing_engine && optimize_for_fuzzing) { + defines = [ "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION" ] + } +} + +all_sanitizer_configs = [ + ":common_sanitizer_flags", + ":coverage_flags", + ":default_sanitizer_ldflags", + ":asan_flags", + ":cfi_flags", + ":hwasan_flags", + ":lsan_flags", + ":msan_flags", + ":tsan_flags", + ":ubsan_flags", + ":ubsan_no_recover", + ":ubsan_security_flags", + ":ubsan_vptr_flags", + ":fuzzing_build_mode", +] + +# This config is applied by default to all targets. It sets the compiler flags +# for sanitizer usage, or, if no sanitizer is set, does nothing. +# +# This needs to be in a separate config so that targets can opt out of +# sanitizers (by removing the config) if they desire. Even if a target +# removes this config, executables & shared libraries should still depend on +# :deps if any of their dependencies have not opted out of sanitizers. +# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr. +config("default_sanitizer_flags") { + configs = all_sanitizer_configs + + if (use_sanitizer_configs_without_instrumentation) { + configs = [] + } +} + +# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr. +# This allows to selectively disable ubsan_vptr, when needed. In particular, +# if some third_party code is required to be compiled without rtti, which +# is a requirement for ubsan_vptr. +config("default_sanitizer_flags_but_ubsan_vptr") { + configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ] + + if (use_sanitizer_configs_without_instrumentation) { + configs = [] + } +} + +config("default_sanitizer_flags_but_coverage") { + configs = all_sanitizer_configs - [ ":coverage_flags" ] + + if (use_sanitizer_configs_without_instrumentation) { + configs = [] + } +} + +# This config is used by parts of code that aren't targeted in fuzzers and +# therefore don't need coverage instrumentation and possibly wont need +# sanitizer instrumentation either. The config also tells the compiler to +# perform additional optimizations on the configured code and ensures that +# linking it to the rest of the binary which is instrumented with sanitizers +# works. The config only does anything if the build is a fuzzing build. +config("not_fuzzed") { + if (use_fuzzing_engine) { + # Since we aren't instrumenting with coverage, code size is less of a + # concern, so use a more aggressive optimization level than + # optimize_for_fuzzing (-O1). When given multiple optimization flags, clang + # obeys the last one, so as long as this flag comes after -O1, it should work. + # Since this config will always be depended on after + # "//build/config/compiler:default_optimization" (which adds -O1 when + # optimize_for_fuzzing is true), -O2 should always be the second flag. Even + # though this sounds fragile, it isn't a big deal if it breaks, since proto + # fuzzers will still work, they will just be slightly slower. + cflags = [ "-O2" ] + + # We need to include this config when we remove default_sanitizer_flags or + # else there will be linking errors. We would remove default_sanitizer_flags + # here as well, but gn doesn't permit this. + if (!is_msan) { + # We don't actually remove sanitization when MSan is being used so there + # is no need to add default_sanitizer_ldflags in that case + configs = [ ":default_sanitizer_ldflags" ] + } + } +} diff --git a/config/sanitizers/OWNERS b/config/sanitizers/OWNERS new file mode 100644 index 000000000000..331e8bcf5767 --- /dev/null +++ b/config/sanitizers/OWNERS @@ -0,0 +1 @@ +metzman@chromium.org diff --git a/config/sanitizers/sanitizers.gni b/config/sanitizers/sanitizers.gni new file mode 100644 index 000000000000..446736e790cc --- /dev/null +++ b/config/sanitizers/sanitizers.gni @@ -0,0 +1,330 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build/config/chromeos/args.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/profiling/profiling.gni") +import("//build/toolchain/toolchain.gni") + +declare_args() { + # Compile for Address Sanitizer to find memory bugs. + is_asan = false + + # Compile for Hardware-Assisted Address Sanitizer to find memory bugs + # (android/arm64 only). + # See http://clang.llvm.org/docs/HardwareAssistedAddressSanitizerDesign.html + is_hwasan = false + + # Compile for Leak Sanitizer to find leaks. + is_lsan = false + + # Compile for Memory Sanitizer to find uninitialized reads. + is_msan = false + + # Compile for Thread Sanitizer to find threading bugs. + is_tsan = false + + # Compile for Undefined Behaviour Sanitizer to find various types of + # undefined behaviour (excludes vptr checks). + is_ubsan = false + + # Halt the program if a problem is detected. + is_ubsan_no_recover = false + + # Track where uninitialized memory originates from. From fastest to slowest: + # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the + # chain of stores leading from allocation site to use site. + msan_track_origins = 2 + + # Use dynamic libraries instrumented by one of the sanitizers instead of the + # standard system libraries. Set this flag to build the libraries from source. + use_locally_built_instrumented_libraries = false + + # Compile with Control Flow Integrity to protect virtual calls and casts. + # See http://clang.llvm.org/docs/ControlFlowIntegrity.html + # + # TODO(pcc): Remove this flag if/when CFI is enabled in all official builds. + is_cfi = is_official_build && is_clang && + ((target_os == "linux" && target_cpu == "x64") || + (is_chromeos && is_chromeos_device)) + + # Enable checks for indirect function calls via a function pointer. + # TODO(pcc): remove this when we're ready to add these checks by default. + # https://crbug.com/701919 + use_cfi_icall = + target_os == "linux" && target_cpu == "x64" && is_official_build + + # Print detailed diagnostics when Control Flow Integrity detects a violation. + use_cfi_diag = false + + # Let Control Flow Integrity continue execution instead of crashing when + # printing diagnostics (use_cfi_diag = true). + use_cfi_recover = false + + # Compile for fuzzing with LLVM LibFuzzer. + # See http://www.chromium.org/developers/testing/libfuzzer + use_libfuzzer = false + + # Compile for fuzzing with centipede. + # See https://github.com/google/centipede + use_centipede = false + + # Compile for fuzzing with AFL. + use_afl = false + + # Compile for fuzzing with an external engine (e.g., Grammarinator). + use_external_fuzzing_engine = false + + # Enables core ubsan security features. Will later be removed once it matches + # is_ubsan. + is_ubsan_security = false + + # Helper variable for testing builds with disabled libfuzzer. + # Not for client use. + disable_libfuzzer = false + + # Optimize for coverage guided fuzzing (balance between speed and number of + # branches). Can be also used to remove non-determinism and other issues. + optimize_for_fuzzing = false + + # Value for -fsanitize-coverage flag. Setting this causes + # use_sanitizer_coverage to be enabled. + # This flag is not used for libFuzzer (use_libfuzzer=true). Instead, we use: + # -fsanitize=fuzzer-no-link + # Default value when unset and use_fuzzing_engine=true: + # trace-pc-guard + # Default value when unset and use_sanitizer_coverage=true: + # trace-pc-guard,indirect-calls + sanitizer_coverage_flags = "" + + # When enabled, only relevant sanitizer defines are set, but compilation + # happens with no extra flags. This is useful when in component build + # enabling sanitizers only in some of the components. + use_sanitizer_configs_without_instrumentation = false + + # When true, seed corpora archives are built. + archive_seed_corpus = true +} + +declare_args() { + # Enable checks for bad casts: derived cast and unrelated cast. + # TODO(krasin): remove this, when we're ready to add these checks by default. + # https://crbug.com/626794 + use_cfi_cast = is_cfi && is_chromeos + + # Compile for Undefined Behaviour Sanitizer's vptr checks. + is_ubsan_vptr = is_ubsan_security +} + +assert(!is_hwasan || (target_os == "android" && target_cpu == "arm64"), + "HWASan only supported on Android ARM64 builds.") + +# Disable sanitizers for non-target toolchains. +if (!is_a_target_toolchain || toolchain_disables_sanitizers) { + is_asan = false + is_cfi = false + is_hwasan = false + is_lsan = false + is_msan = false + is_tsan = false + is_ubsan = false + is_ubsan_no_recover = false + is_ubsan_security = false + is_ubsan_vptr = false + msan_track_origins = 0 + sanitizer_coverage_flags = "" + use_afl = false + use_centipede = false + use_cfi_diag = false + use_cfi_recover = false + use_libfuzzer = false + use_locally_built_instrumented_libraries = false + use_sanitizer_coverage = false +} else if (current_cpu != "arm64") { + is_hwasan = false +} + +# Use dynamic libraries instrumented by one of the sanitizers instead of the +# standard system libraries. We have instrumented system libraries for msan, +# which requires them to prevent false positives. +# TODO(thakis): Maybe remove this variable. +use_prebuilt_instrumented_libraries = is_msan + +# Whether we are doing a fuzzer build. Normally this should be checked instead +# of checking "use_libfuzzer || use_afl" because often developers forget to +# check for "use_afl", and "use_centipede" is new. +use_fuzzing_engine = + use_libfuzzer || use_afl || use_centipede || use_external_fuzzing_engine + +# Whether the current fuzzing engine supports libprotobuf_mutator. Right now +# this is just libfuzzer, but others are likely to support this in future, +# so it's preferable to check this. +use_fuzzing_engine_with_lpm = use_libfuzzer || use_centipede + +# Whether the fuzzing engine supports fuzzers which supply their own +# "main" function. +fuzzing_engine_supports_custom_main = use_libfuzzer || use_centipede + +# Args that are in turn dependent on other args must be in a separate +# declare_args block. User overrides are only applied at the end of a +# declare_args block. +declare_args() { + # Generates an owners file for each fuzzer test. + # TODO(crbug.com/1194183): Remove this arg when finding OWNERS is faster. + generate_fuzzer_owners = use_fuzzing_engine + + use_sanitizer_coverage = + !use_clang_coverage && + (use_fuzzing_engine || sanitizer_coverage_flags != "") + + # https://crbug.com/1002058: Code coverage works inside the sandbox via the + # help of several helper IPCs. Unfortunately, the sandbox-only path does not + # work well for fuzzing builds. Since fuzzing builds already disable the + # sandbox when dumping coverage, limit the sandbox-only path to non-fuzzing + # builds. + # Everything is IPC on Fuchsia, so this workaround for code coverage inside + # the sandbox does not apply. + use_clang_profiling_inside_sandbox = + use_clang_profiling && !use_fuzzing_engine && !is_fuchsia +} + +if (use_fuzzing_engine && sanitizer_coverage_flags == "") { + sanitizer_coverage_flags = "trace-pc-guard" + if (use_centipede) { + # Centipede's minimal flags are listed in //third_party/centipede/src/clang-flags.txt. + # But, for users like Chromium using an up-to-date clang, we can also + # enable extra optional types of coverage which may make Centipede more + # effective. This list is not currently documented and has been derived + # from discussion with centipede creators (though one is warned about at + # https://github.com/google/centipede/blob/main/centipede_callbacks.cc#L68) + sanitizer_coverage_flags = sanitizer_coverage_flags + + ",pc-table,trace-cmp,control-flow,trace-loads" + } +} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") { + sanitizer_coverage_flags = "trace-pc-guard,indirect-calls" +} + +# Whether we are linking against a sanitizer runtime library. Among other +# things, this changes the default symbol level and other settings in order to +# prepare to create stack traces "live" using the sanitizer runtime. +using_sanitizer = + is_asan || is_hwasan || is_lsan || is_tsan || is_msan || is_ubsan || + is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage || use_cfi_diag + +assert(!using_sanitizer || is_clang, + "Sanitizers (is_*san) require setting is_clang = true in 'gn args'") + +assert(!is_cfi || is_clang, + "is_cfi requires setting is_clang = true in 'gn args'") + +prebuilt_instrumented_libraries_available = + is_msan && (msan_track_origins == 0 || msan_track_origins == 2) + +if (use_libfuzzer && (is_linux || is_chromeos)) { + if (is_asan) { + # We do leak checking with libFuzzer on Linux. Set is_lsan for code that + # relies on LEAK_SANITIZER define to avoid false positives. + is_lsan = true + } +} + +# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The +# same is possibly true for the other non-ASan sanitizers. But regardless of +# whether it links, one would normally never run a sanitizer in debug mode. +# Running in debug mode probably indicates you forgot to set the "is_debug = +# false" flag in the build args. ASan seems to run fine in debug mode. +# +# If you find a use-case where you want to compile a sanitizer in debug mode +# and have verified it works, ask brettw and we can consider removing it from +# this condition. We may also be able to find another way to enable your case +# without having people accidentally get broken builds by compiling an +# unsupported or unadvisable configurations. +# +# For one-off testing, just comment this assertion out. +assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_vptr), + "Sanitizers should generally be used in release (set is_debug=false).") + +assert(!is_msan || ((is_linux || is_chromeos) && current_cpu == "x64"), + "MSan currently only works on 64-bit Linux and ChromeOS builds.") + +assert(!is_lsan || is_asan, "is_lsan = true requires is_asan = true also.") + +# ASAN build on Windows is not working in debug mode. Intercepting memory +# allocation functions is hard on Windows and not yet implemented in LLVM. +assert(!is_win || !is_debug || !is_asan, + "ASan on Windows doesn't work in debug (set is_debug=false).") + +# libFuzzer targets can fail to build or behave incorrectly when built without +# ASAN on Windows. +assert(!is_win || !use_libfuzzer || is_asan, + "use_libfuzzer on Windows requires setting is_asan = true") + +# Make sure that if we recover on detection (i.e. not crash), diagnostics are +# printed. +assert(!use_cfi_recover || use_cfi_diag, + "Only use CFI recovery together with diagnostics.") + +# TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently +# not supported by the Chromium mac_clang_x64 toolchain on iOS distribution. +# The coverage works with iOS toolchain but it is broken when the mac +# toolchain is used as a secondary one on iOS distribution. E.g., it should be +# possible to build the "net" target for iOS with the sanitizer coverage +# enabled. +assert( + !(use_sanitizer_coverage && is_mac && target_os == "ios"), + "crbug.com/753445: use_sanitizer_coverage=true is not supported by the " + + "Chromium mac_clang_x64 toolchain on iOS distribution. Please set " + + "the argument value to false.") + +# Use these lists of configs to disable instrumenting code that is part of a +# fuzzer, but which isn't being targeted (such as libprotobuf-mutator, *.pb.cc +# and libprotobuf when they are built as part of a proto fuzzer). Adding or +# removing these lists does not have any effect if use_libfuzzer or use_afl are +# not passed as arguments to gn. +not_fuzzed_remove_configs = [] +not_fuzzed_remove_nonasan_configs = [] + +if (use_fuzzing_engine) { + # Removing coverage should always just work. + not_fuzzed_remove_configs += [ "//build/config/coverage:default_coverage" ] + not_fuzzed_remove_nonasan_configs += + [ "//build/config/coverage:default_coverage" ] + + if (!is_msan) { + # Allow sanitizer instrumentation to be removed if we are not using MSan + # since binaries cannot be partially instrumented with MSan. + not_fuzzed_remove_configs += + [ "//build/config/sanitizers:default_sanitizer_flags" ] + + # Certain parts of binaries must be instrumented with ASan if the rest of + # the binary is. For these, only remove non-ASan sanitizer instrumentation. + if (!is_asan) { + not_fuzzed_remove_nonasan_configs += + [ "//build/config/sanitizers:default_sanitizer_flags" ] + + assert(not_fuzzed_remove_nonasan_configs == not_fuzzed_remove_configs) + } + } +} + +# Options common to different fuzzer engines. +# Engine should be compiled without coverage (infinite loop in trace_cmp). +fuzzing_engine_remove_configs = [ + "//build/config/coverage:default_coverage", + "//build/config/sanitizers:default_sanitizer_flags", +] + +# Add any sanitizer flags back. In MSAN builds, instrumenting libfuzzer with +# MSAN is necessary since all parts of the binary need to be instrumented for it +# to work. ASAN builds are more subtle: libfuzzer depends on features from the +# C++ STL. If it were not instrumented, templates would be insantiated without +# ASAN from libfuzzer and with ASAN in other TUs. The linker might merge +# instrumented template instantiations with non-instrumented ones (which could +# have a different ABI) in the final binary, which is problematic for TUs +# expecting one particular ABI (https://crbug.com/915422). The other sanitizers +# are added back for the same reason. +fuzzing_engine_add_configs = + [ "//build/config/sanitizers:default_sanitizer_flags_but_coverage" ] diff --git a/config/siso/.gitignore b/config/siso/.gitignore new file mode 100644 index 000000000000..522449bd69b0 --- /dev/null +++ b/config/siso/.gitignore @@ -0,0 +1 @@ +/.sisoenv diff --git a/config/siso/OWNERS b/config/siso/OWNERS new file mode 100644 index 000000000000..03122b7b39f1 --- /dev/null +++ b/config/siso/OWNERS @@ -0,0 +1,6 @@ +# All current members of the Chrome Build Team. +jwata@google.com +philwo@google.com +richardwa@google.com +tikuta@chromium.org +ukai@google.com diff --git a/config/siso/README.md b/config/siso/README.md new file mode 100644 index 000000000000..ff38eba47501 --- /dev/null +++ b/config/siso/README.md @@ -0,0 +1,8 @@ +# Build config for Siso + +This directory contains configurations for +[siso](https://chromium.googlesource.com/infra/infra/+/refs/heads/main/go/src/infra/build/siso/) +build tool. + +Please refer to [the config specifications](https://chromium.googlesource.com/infra/infra/+/refs/heads/main/go/src/infra/build/siso/docs/starlark_config.md) in the Siso repo. + diff --git a/config/siso/clang_linux.star b/config/siso/clang_linux.star new file mode 100644 index 000000000000..b88af3b93552 --- /dev/null +++ b/config/siso/clang_linux.star @@ -0,0 +1,109 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for clang/linux.""" + +load("@builtin//path.star", "path") +load("@builtin//struct.star", "module") + +__filegroups = {} + +def __clang_compile_coverage(ctx, cmd): + # TODO(b/278225415): add better support for coverage build. + # The instrument file contains the list of files affected by a patch. + # Including this file to remote action input prevents cache hits. + inputs = [] + deps_args = [] + for i, arg in enumerate(cmd.args): + if i == 0: + continue + if arg == "../../build/toolchain/clang_code_coverage_wrapper.py": + continue + if arg.startswith("--files-to-instrument="): + inputs.append(ctx.fs.canonpath(arg.removeprefix("--files-to-instrument="))) + continue + if len(deps_args) == 0 and path.base(arg).find("clang") >= 0: + deps_args.append(arg) + continue + if deps_args: + if arg in ["-MD", "-MMD", "-c"]: + continue + if arg.startswith("-MF") or arg.startswith("-o"): + continue + if i > 1 and cmd.args[i - 1] in ["-MF", "-o"]: + continue + deps_args.append(arg) + if deps_args: + deps_args.append("-M") + ctx.actions.fix( + tool_inputs = cmd.tool_inputs + inputs, + deps_args = deps_args, + ) + +__handlers = { + "clang_compile_coverage": __clang_compile_coverage, +} + +def __step_config(ctx, step_config): + step_config["input_deps"].update({ + # clang++ is a symlink to clang + # but siso doesn't add symlink target automatically. + "third_party/llvm-build/Release+Asserts/bin/clang++": [ + "third_party/llvm-build/Release+Asserts/bin/clang", + ], + }) + step_config["rules"].extend([ + { + "name": "clang/cxx", + "action": "(.*_)?cxx", + "command_prefix": "../../third_party/llvm-build/Release+Asserts/bin/clang++ ", + "inputs": [ + "third_party/llvm-build/Release+Asserts/bin/clang++", + ], + "remote": True, + "canonicalize_dir": True, + }, + { + "name": "clang/cc", + "action": "(.*_)?cc", + "command_prefix": "../../third_party/llvm-build/Release+Asserts/bin/clang ", + "inputs": [ + "third_party/llvm-build/Release+Asserts/bin/clang", + ], + "remote": True, + "canonicalize_dir": True, + }, + { + "name": "clang-coverage/cxx", + "action": "(.*_)?cxx", + "command_prefix": "\"python3\" ../../build/toolchain/clang_code_coverage_wrapper.py", + "inputs": [ + "build/toolchain/clang_code_coverage_wrapper.py", + "third_party/llvm-build/Release+Asserts/bin/clang++", + ], + "handler": "clang_compile_coverage", + "remote": True, + "canonicalize_dir": True, + }, + { + "name": "clang-coverage/cc", + "action": "(.*_)?cc", + "command_prefix": "\"python3\" ../../build/toolchain/clang_code_coverage_wrapper.py", + "inputs": [ + "build/toolchain/clang_code_coverage_wrapper.py", + "third_party/llvm-build/Release+Asserts/bin/clang", + ], + "handler": "clang_compile_coverage", + "remote": True, + "canonicalize_dir": True, + }, + ]) + return step_config + +clang = module( + "clang", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/config/siso/configure_siso.py b/config/siso/configure_siso.py new file mode 100755 index 000000000000..2770f6e72b95 --- /dev/null +++ b/config/siso/configure_siso.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""This script is used to configure siso.""" + +import argparse +import os +import sys + +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + + +def main(): + parser = argparse.ArgumentParser(description='configure siso') + parser.add_argument('--rbe_instance', help='RBE instance to use for Siso') + args = parser.parse_args() + + project = None + if not args.rbe_instance: + return 0 + rbe_instance = args.rbe_instance + elems = rbe_instance.split('/') + if len(elems) == 4 and elems[0] == 'projects': + project = elems[1] + rbe_instance = elems[-1] + siso_env_path = os.path.join(THIS_DIR, '.sisoenv') + with open(siso_env_path, 'w') as f: + if project: + f.write('SISO_PROJECT=%s\n' % project) + f.write('SISO_REAPI_INSTANCE=%s\n' % rbe_instance) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/config/siso/linux.star b/config/siso/linux.star new file mode 100644 index 000000000000..d02318e7c708 --- /dev/null +++ b/config/siso/linux.star @@ -0,0 +1,43 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for linux.""" + +load("@builtin//struct.star", "module") +load("./clang_linux.star", "clang") +load("./mojo.star", "mojo") +load("./nacl_linux.star", "nacl") +load("./remote_exec_wrapper.star", "remote_exec_wrapper") + +__filegroups = {} +__filegroups.update(clang.filegroups) +__filegroups.update(mojo.filegroups) +__filegroups.update(nacl.filegroups) + +__handlers = {} +__handlers.update(clang.handlers) +__handlers.update(mojo.handlers) +__handlers.update(nacl.handlers) + +def __step_config(ctx, step_config): + step_config["platforms"] = { + "default": { + "OSFamily": "Linux", + "container-image": "docker://gcr.io/chops-private-images-prod/rbe/siso-chromium/linux@sha256:d4fcda628ebcdb3dd79b166619c56da08d5d7bd43d1a7b1f69734904cc7a1bb2", + }, + } + if remote_exec_wrapper.enabled(ctx): + step_config = remote_exec_wrapper.step_config(ctx, step_config) + else: + step_config = clang.step_config(ctx, step_config) + step_config = mojo.step_config(ctx, step_config) + step_config = nacl.step_config(ctx, step_config) + return step_config + +chromium = module( + "chromium", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/config/siso/mac.star b/config/siso/mac.star new file mode 100644 index 000000000000..7a638b9d33a3 --- /dev/null +++ b/config/siso/mac.star @@ -0,0 +1,23 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for macOS.""" + +load("@builtin//struct.star", "module") +load("./remote_exec_wrapper.star", "remote_exec_wrapper") + +__filegroups = {} +__handlers = {} + +def __step_config(ctx, step_config): + if remote_exec_wrapper.enabled(ctx): + step_config = remote_exec_wrapper.step_config(ctx, step_config) + return step_config + +chromium = module( + "chromium", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/config/siso/main.star b/config/siso/main.star new file mode 100644 index 000000000000..67121dca9498 --- /dev/null +++ b/config/siso/main.star @@ -0,0 +1,47 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration main entry.""" + +load("@builtin//encoding.star", "json") +load("@builtin//runtime.star", "runtime") +load("@builtin//struct.star", "module") +load("./linux.star", chromium_linux = "chromium") +load("./mac.star", chromium_mac = "chromium") +load("./simple.star", "simple") +load("./windows.star", chromium_windows = "chromium") + +def init(ctx): + print("runtime: os:%s arch:%s run:%d" % ( + runtime.os, + runtime.arch, + runtime.num_cpu, + )) + host = { + "linux": chromium_linux, + "darwin": chromium_mac, + "windows": chromium_windows, + }[runtime.os] + step_config = { + "platforms": {}, + "input_deps": {}, + "rules": [], + } + step_config = host.step_config(ctx, step_config) + step_config = simple.step_config(ctx, step_config) + + filegroups = {} + filegroups.update(host.filegroups) + filegroups.update(simple.filegroups) + + handlers = {} + handlers.update(host.handlers) + handlers.update(simple.handlers) + + return module( + "config", + step_config = json.encode(step_config), + filegroups = filegroups, + handlers = handlers, + ) diff --git a/config/siso/mojo.star b/config/siso/mojo.star new file mode 100644 index 000000000000..45502e745dcd --- /dev/null +++ b/config/siso/mojo.star @@ -0,0 +1,129 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for mojo.""" + +load("@builtin//struct.star", "module") + +__filegroups = {} + +__handlers = {} + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "mojo/mojom_bindigns_generator", + "command_prefix": "python3 ../../mojo/public/tools/bindings/mojom_bindings_generator.py", + "inputs": [ + "mojo/public/tools/bindings/mojom_bindings_generator.py", + ], + "indirect_inputs": { + "includes": [ + "*.js", + "*.mojom", + "*.mojom-module", + "*.test-mojom", + "*.test-mojom-module", + "*.zip", + ], + }, + "exclude_input_patterns": [ + "*.stamp", + ], + # TODO(crbug.com/1437820): unspecified outputs of mojom_bindings_generator.py + "outputs_map": { + "./gen/components/aggregation_service/aggregation_service.mojom.js": { + "outputs": [ + "./gen/mojom-webui/components/aggregation_service/aggregation_service.mojom-webui.js", + ], + }, + "./gen/components/attribution_reporting/eligibility_error.mojom.js": { + "outputs": [ + "./gen/mojom-webui/components/attribution_reporting/eligibility_error.mojom-webui.js", + "./gen/mojom-webui/components/attribution_reporting/registration_type.mojom-webui.js", + "./gen/mojom-webui/components/attribution_reporting/source_registration_error.mojom-webui.js", + "./gen/mojom-webui/components/attribution_reporting/trigger_registration_error.mojom-webui.js", + ], + }, + "./gen/components/attribution_reporting/registration.mojom.js": { + "outputs": [ + "./gen/mojom-webui/components/attribution_reporting/registration.mojom-webui.js", + ], + }, + "./gen/media/capture/mojom/image_capture.mojom.js": { + "outputs": [ + "./gen/mojom-webui/media/capture/mojom/image_capture.mojom-webui.js", + ], + }, + "./gen/services/device/public/mojom/usb_device.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/device/public/mojom/usb_device.mojom-webui.js", + "./gen/mojom-webui/services/device/public/mojom/usb_enumeration_options.mojom-webui.js", + "./gen/mojom-webui/services/device/public/mojom/usb_manager.mojom-webui.js", + "./gen/mojom-webui/services/device/public/mojom/usb_manager_client.mojom-webui.js", + ], + }, + "./gen/services/media_session/public/mojom/audio_focus.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/media_session/public/mojom/audio_focus.mojom-webui.js", + "./gen/mojom-webui/services/media_session/public/mojom/constants.mojom-webui.js", + "./gen/mojom-webui/services/media_session/public/mojom/media_controller.mojom-webui.js", + "./gen/mojom-webui/services/media_session/public/mojom/media_session.mojom-webui.js", + ], + }, + "./gen/services/network/public/mojom/attribution.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/network/public/mojom/attribution.mojom-webui.js", + ], + }, + "./gen/services/network/public/mojom/schemeful_site.mojom.js": { + "outputs": [ + "./gen/mojom-webui/services/network/public/mojom/schemeful_site.mojom-webui.js", + ], + }, + "./gen/third_party/blink/public/mojom/quota/quota_manager_host.mojom.js": { + "outputs": [ + "./gen/mojom-webui/third_party/blink/public/mojom/quota/quota_manager_host.mojom-webui.js", + "./gen/mojom-webui/third_party/blink/public/mojom/quota/quota_types.mojom-webui.js", + ], + }, + "./gen/third_party/blink/public/mojom/storage_key/ancestor_chain_bit.mojom.js": { + "outputs": [ + "./gen/mojom-webui/third_party/blink/public/mojom/storage_key/ancestor_chain_bit.mojom-webui.js", + "./gen/mojom-webui/third_party/blink/public/mojom/storage_key/storage_key.mojom-webui.js", + ], + }, + "./gen/ui/base/mojom/ui_base_types.mojom.js": { + "outputs": [ + "./gen/mojom-webui/ui/base/mojom/ui_base_types.mojom-webui.js", + "./gen/mojom-webui/ui/base/mojom/window_open_disposition.mojom-webui.js", + ], + }, + "./gen/ui/gfx/image/mojom/image.mojom.js": { + "outputs": [ + "./gen/mojom-webui/ui/gfx/image/mojom/image.mojom-webui.js", + ], + }, + }, + "restat": True, + "remote": True, + "output_local": True, + "platform": { + # mojo_bindings_generators.py will run faster on n2-highmem-8 + # than n2-custom-2-3840 + # e.g. + # n2-highmem-8: exec: 880.202978ms + # n2-custom-2-3840: exec: 2.42808488s + "gceMachineType": "n2-highmem-8", + }, + }, + ]) + return step_config + +mojo = module( + "mojo", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/config/siso/nacl_linux.star b/config/siso/nacl_linux.star new file mode 100644 index 000000000000..b6026ac2005c --- /dev/null +++ b/config/siso/nacl_linux.star @@ -0,0 +1,179 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for nacl/linux.""" + +load("@builtin//struct.star", "module") + +__filegroups = { + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pydir:pydir": { + "type": "glob", + "includes": ["*.py"], + }, + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm": { + "type": "glob", + "includes": ["libLLVM*.so"], + }, + "native_client/toolchain/linux_x86/saigo_newlib/bin:clang": { + "type": "glob", + "includes": ["clang-*"], + }, + "native_client/toolchain/linux_x86/saigo_newlib/lib:libso": { + "type": "glob", + "includes": ["*.so*"], + }, + "native_client/toolchain/linux_x86/nacl_x86_glibc/lib/gcc/x86_64-nacl:crtbegin": { + "type": "glob", + "includes": ["crtbegin.o"], + }, + "native_client/toolchain/linux_x86/nacl_x86_glibc/libexec/gcc/x86_64-nacl:ccbackend": { + "type": "glob", + "includes": ["cc1", "cc1plus", "collect2"], + }, +} + +__handlers = {} + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "nacl_linux/pnacl-clang++", + "action": "newlib_pnacl.*_cxx", + "command_prefix": "../../native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang++", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang++", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/pnacl-clang", + "action": "newlib_pnacl.*_cc", + "command_prefix": "../../native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/glibc/x86_64-nacl-gcc", + "action": "glibc_x64_cc", + "inputs": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-gcc", + ], + # ELF-32 doesn't work on gVisor, + # so will local-fallback if gVisor is used. + # TODO(b/278485912): remote=True for trusted instance. + "remote": False, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/glibc/x86_64-nacl-g++", + "action": "glibc_x64_cxx", + "inputs": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-g++", + ], + # ELF-32 doesn't work on gVisor, + # so will local-fallback if gVisor is used. + # TODO(b/278485912): remote=True for trusted instance. + "remote": False, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/pnacl_newlib/x86_64-nacl-clang++", + "action": "clang_newlib_x64_cxx", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang++", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/pnacl_newlib/x86_64-nacl-clang", + "action": "clang_newlib_x64_cc", + "inputs": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/saigo_newlib/x86_64-nacl-clang++", + "action": "irt_x64_cxx", + "command_prefix": "../../native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang++", + "inputs": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang++", + ], + "remote": True, + "input_root_absolute_path": True, + }, + { + "name": "nacl_linux/saigo_newlib/x86_64-nacl-clang", + "action": "irt_x64_cc", + "command_prefix": "../../native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang", + "inputs": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang", + ], + "remote": True, + "input_root_absolute_path": True, + }, + ]) + + step_config["input_deps"].update({ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/clang", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/driver.conf", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-llc", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pydir:pydir", + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-clang++": [ + "native_client/toolchain/linux_x86/pnacl_newlib/bin/clang", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/driver.conf", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pnacl-llc", + "native_client/toolchain/linux_x86/pnacl_newlib/bin/pydir:pydir", + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + "native_client/toolchain/linux_x86/pnacl_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang": [ + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + ], + "native_client/toolchain/linux_x86/pnacl_newlib/bin/x86_64-nacl-clang++": [ + "native_client/toolchain/linux_x86/pnacl_newlib/lib:libllvm", + ], + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin:clang", + "native_client/toolchain/linux_x86/saigo_newlib/lib:libso", + "native_client/toolchain/linux_x86/saigo_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/saigo_newlib/bin/x86_64-nacl-clang++": [ + "native_client/toolchain/linux_x86/saigo_newlib/bin:clang", + "native_client/toolchain/linux_x86/saigo_newlib/lib:libso", + "native_client/toolchain/linux_x86/saigo_newlib/x86_64-nacl/bin/ld", + ], + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-gcc": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-as", + "native_client/toolchain/linux_x86/nacl_x86_glibc/lib/gcc/x86_64-nacl:crtbegin", + "native_client/toolchain/linux_x86/nacl_x86_glibc/libexec/gcc/x86_64-nacl:ccbackend", + "native_client/toolchain/linux_x86/nacl_x86_glibc/x86_64-nacl/bin/as", + ], + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-g++": [ + "native_client/toolchain/linux_x86/nacl_x86_glibc/bin/x86_64-nacl-as", + "native_client/toolchain/linux_x86/nacl_x86_glibc/lib/gcc/x86_64-nacl:crtbegin", + "native_client/toolchain/linux_x86/nacl_x86_glibc/libexec/gcc/x86_64-nacl:ccbackend", + "native_client/toolchain/linux_x86/nacl_x86_glibc/x86_64-nacl/bin/as", + ], + }) + return step_config + +nacl = module( + "nacl", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/config/siso/remote_exec_wrapper.star b/config/siso/remote_exec_wrapper.star new file mode 100644 index 000000000000..dcd516920e00 --- /dev/null +++ b/config/siso/remote_exec_wrapper.star @@ -0,0 +1,58 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for remote exec wrapper.""" + +load("@builtin//lib/gn.star", "gn") +load("@builtin//struct.star", "module") + +__filegroups = {} +__handlers = {} + +def __enabled(ctx): + if "args.gn" in ctx.metadata: + gn_args = gn.parse_args(ctx.metadata["args.gn"]) + if gn_args.get("use_goma") == "true": + return True + if gn_args.get("use_remoteexec") == "true": + return True + return False + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "clang/cxx", + "action": "(.*_)?cxx", + "use_remote_exec_wrapper": True, + }, + { + "name": "clang/cc", + "action": "(.*_)?cc", + "use_remote_exec_wrapper": True, + }, + { + "name": "clang/objcxx", + "action": "(.*_)?objcxx", + "use_remote_exec_wrapper": True, + }, + { + "name": "clang/objc", + "action": "(.*_)?objc", + "use_remote_exec_wrapper": True, + }, + { + "name": "action_remote", + "command_prefix": "python3 ../../build/util/action_remote.py", + "use_remote_exec_wrapper": True, + }, + ]) + return step_config + +remote_exec_wrapper = module( + "remote_exec_wrapper", + enabled = __enabled, + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/config/siso/simple.star b/config/siso/simple.star new file mode 100644 index 000000000000..71b18d0a797d --- /dev/null +++ b/config/siso/simple.star @@ -0,0 +1,46 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for simple steps.""" + +load("@builtin//struct.star", "module") + +def __copy(ctx, cmd): + input = cmd.inputs[0] + out = cmd.outputs[0] + ctx.actions.copy(input, out, recursive = ctx.fs.is_dir(input)) + ctx.actions.exit(exit_status = 0) + +def __stamp(ctx, cmd): + out = cmd.outputs[0] + ctx.actions.write(out) + ctx.actions.exit(exit_status = 0) + +__handlers = { + "copy": __copy, + "stamp": __stamp, +} + +def __step_config(ctx, step_config): + step_config["rules"].extend([ + { + "name": "simple/copy", + "action": "(.*_)?copy", + "handler": "copy", + }, + { + "name": "simple/stamp", + "action": "(.*_)?stamp", + "handler": "stamp", + "replace": True, + }, + ]) + return step_config + +simple = module( + "simple", + step_config = __step_config, + filegroups = {}, + handlers = __handlers, +) diff --git a/config/siso/windows.star b/config/siso/windows.star new file mode 100644 index 000000000000..88636f7b8ca6 --- /dev/null +++ b/config/siso/windows.star @@ -0,0 +1,23 @@ +# -*- bazel-starlark -*- +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Siso configuration for Windows.""" + +load("@builtin//struct.star", "module") +load("./remote_exec_wrapper.star", "remote_exec_wrapper") + +__filegroups = {} +__handlers = {} + +def __step_config(ctx, step_config): + if remote_exec_wrapper.enabled(ctx): + step_config = remote_exec_wrapper.step_config(ctx, step_config) + return step_config + +chromium = module( + "chromium", + step_config = __step_config, + filegroups = __filegroups, + handlers = __handlers, +) diff --git a/config/sysroot.gni b/config/sysroot.gni new file mode 100644 index 000000000000..dea380727e73 --- /dev/null +++ b/config/sysroot.gni @@ -0,0 +1,79 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This header file defines the "sysroot" variable which is the absolute path +# of the sysroot. If no sysroot applies, the variable will be an empty string. + +declare_args() { + # The path of the sysroot that is applied when compiling using the target + # toolchain. + target_sysroot = "" + + # The path to directory containing linux sysroot images. + target_sysroot_dir = "//build/linux" + + # The path of the sysroot for the current toolchain. If empty, default + # sysroot is used. + sysroot = "" + + # Controls default is_linux sysroot. If set to true, and sysroot + # is empty, default sysroot is calculated. + use_sysroot = current_cpu == "x86" || current_cpu == "x64" || + current_cpu == "arm" || current_cpu == "arm64" || + current_cpu == "mipsel" || current_cpu == "mips64el" +} + +if (sysroot == "") { + if (current_os == target_os && current_cpu == target_cpu && + target_sysroot != "") { + sysroot = target_sysroot + } else if (is_android) { + import("//build/config/android/config.gni") + + # Android uses unified headers, and thus a single compile time sysroot + sysroot = "$android_toolchain_root/sysroot" + } else if ((is_linux || is_chromeos) && use_sysroot) { + # By default build against a sysroot image downloaded from Cloud Storage + # during gclient runhooks. + if (current_cpu == "x64") { + sysroot = "$target_sysroot_dir/debian_bullseye_amd64-sysroot" + } else if (current_cpu == "x86") { + sysroot = "$target_sysroot_dir/debian_bullseye_i386-sysroot" + } else if (current_cpu == "mipsel") { + sysroot = "$target_sysroot_dir/debian_bullseye_mips-sysroot" + } else if (current_cpu == "mips64el") { + sysroot = "$target_sysroot_dir/debian_bullseye_mips64el-sysroot" + } else if (current_cpu == "arm") { + sysroot = "$target_sysroot_dir/debian_bullseye_arm-sysroot" + } else if (current_cpu == "arm64") { + sysroot = "$target_sysroot_dir/debian_bullseye_arm64-sysroot" + } else { + assert(false, "No linux sysroot for cpu: $target_cpu") + } + + if (sysroot != "") { + _script_arch = current_cpu + if (_script_arch == "x86") { + _script_arch = "i386" + } else if (_script_arch == "x64") { + _script_arch = "amd64" + } + assert( + exec_script("//build/dir_exists.py", + [ rebase_path(sysroot) ], + "string") == "True", + "Missing sysroot ($sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=$_script_arch") + } + } else if (is_mac) { + import("//build/config/mac/mac_sdk.gni") + sysroot = mac_sdk_path + } else if (is_ios) { + import("//build/config/ios/ios_sdk.gni") + sysroot = ios_sdk_path + } else if (is_fuchsia) { + if (current_cpu == "arm64" || current_cpu == "x64") { + sysroot = "//third_party/fuchsia-sdk/sdk/arch/$current_cpu/sysroot" + } + } +} diff --git a/config/ui.gni b/config/ui.gni new file mode 100644 index 000000000000..b560f372c638 --- /dev/null +++ b/config/ui.gni @@ -0,0 +1,55 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# These flags are effectively global. Your feature flag should go near the +# code it controls. Most of these items are here now because they control +# legacy global #defines passed to the compiler (now replaced with generated +# buildflag headers -- see //build/buildflag_header.gni). +# +# These flags are ui-related so should eventually be moved to various places +# in //ui/*. +# +# There is more advice on where to put build flags in the "Build flag" section +# of //build/config/BUILDCONFIG.gn. + +import("//build/config/chromecast_build.gni") +import("//build/config/chromeos/args.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/ozone.gni") + +declare_args() { + # Indicates if Aura is enabled. Aura is a low-level windowing library, sort + # of a replacement for GDI or GTK. + use_aura = is_win || is_linux || is_chromeos || is_fuchsia +} + +declare_args() { + # True means the UI is built using the "views" framework. + toolkit_views = is_mac || is_win || is_linux || is_chromeos || is_fuchsia + + use_glib = + is_linux && !is_castos && + # Avoid the need for glib when Android is building things via secondary + # toolchains. + target_os != "android" +} + +assert(!use_glib || (is_linux && !is_castos)) + +use_atk = use_glib && current_toolchain == default_toolchain + +# Whether using Xvfb to provide a display server for a test might be +# necessary. +use_xvfb_in_this_config = + is_linux || (is_chromeos_lacros && !is_chromeos_device) +# +# ============================================= +# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE +# ============================================= +# +# See comment at the top. diff --git a/config/v8_target_cpu.gni b/config/v8_target_cpu.gni new file mode 100644 index 000000000000..6c41226a657e --- /dev/null +++ b/config/v8_target_cpu.gni @@ -0,0 +1,64 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") + +declare_args() { + # This arg is used when we want to tell the JIT-generating v8 code + # that we want to have it generate for an architecture that is different + # than the architecture that v8 will actually run on; we then run the + # code under an emulator. For example, we might run v8 on x86, but + # generate arm code and run that under emulation. + # + # This arg is defined here rather than in the v8 project because we want + # some of the common architecture-specific args (like arm_float_abi or + # mips_arch_variant) to be set to their defaults either if the current_cpu + # applies *or* if the v8_current_cpu applies. + # + # As described below, you can also specify the v8_target_cpu to use + # indirectly by specifying a `custom_toolchain` that contains v8_$cpu in the + # name after the normal toolchain. + # + # For example, `gn gen --args="custom_toolchain=...:clang_x64_v8_arm64"` + # is equivalent to setting --args=`v8_target_cpu="arm64"`. Setting + # `custom_toolchain` is more verbose but makes the toolchain that is + # (effectively) being used explicit. + # + # v8_target_cpu can only be used to target one architecture in a build, + # so if you wish to build multiple copies of v8 that are targeting + # different architectures, you will need to do something more + # complicated involving multiple toolchains along the lines of + # custom_toolchain, above. + v8_target_cpu = "" +} + +if (v8_target_cpu == "") { + if (current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64") { + v8_target_cpu = "arm64" + } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm") { + v8_target_cpu = "arm" + } else if (current_toolchain == + "//build/toolchain/linux:clang_x86_v8_mips64el") { + v8_target_cpu = "mips64el" + } else if (current_toolchain == + "//build/toolchain/linux:clang_x86_v8_mipsel") { + v8_target_cpu = "mipsel" + } else if (current_toolchain == + "//build/toolchain/linux:clang_x64_v8_riscv64") { + v8_target_cpu = "riscv64" + } else if (is_msan) { + # If we're running under a sanitizer, if we configure v8 to generate + # code that will be run under a simulator, then the generated code + # also gets the benefits of the sanitizer. + v8_target_cpu = "arm64" + } else { + v8_target_cpu = target_cpu + } +} + +declare_args() { + # This argument is declared here so that it can be overridden in toolchains. + # It should never be explicitly set by the user. + v8_current_cpu = v8_target_cpu +} diff --git a/config/win/BUILD.gn b/config/win/BUILD.gn new file mode 100644 index 000000000000..8a3bfbbb526e --- /dev/null +++ b/config/win/BUILD.gn @@ -0,0 +1,607 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/c++/c++.gni") +import("//build/config/chrome_build.gni") +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/win/control_flow_guard.gni") +import("//build/config/win/visual_studio_version.gni") +import("//build/timestamp.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") + +assert(is_win) + +declare_args() { + # Turn this on to have the linker output extra timing information. + win_linker_timing = false + + # possible values for target_winuwp_version: + # "10" - Windows UWP 10 + # "8.1" - Windows RT 8.1 + # "8.0" - Windows RT 8.0 + target_winuwp_version = "10" + + # possible values: + # "app" - Windows Store Applications + # "phone" - Windows Phone Applications + # "system" - Windows Drivers and Tools + # "server" - Windows Server Applications + # "desktop" - Windows Desktop Applications + target_winuwp_family = "app" + + # Set this to use clang-style diagnostics format instead of MSVC-style, which + # is useful in e.g. Emacs compilation mode. + # E.g.: + # Without this, clang emits a diagnostic message like this: + # foo/bar.cc(12,34): error: something went wrong + # and with this switch, clang emits it like this: + # foo/bar.cc:12:34: error: something went wrong + use_clang_diagnostics_format = false +} + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic that is +# Windows-only. +config("compiler") { + if (current_cpu == "x86") { + asmflags = [ + # When /safeseh is specified, the linker will only produce an image if it + # can also produce a table of the image's safe exception handlers. This + # table specifies for the operating system which exception handlers are + # valid for the image. Note that /SAFESEH isn't accepted on the command + # line, only /safeseh. This is only accepted by ml.exe, not ml64.exe. + "/safeseh", + ] + } + + cflags = [ + "/Gy", # Enable function-level linking. + "/FS", # Preserve previous PDB behavior. + "/bigobj", # Some of our files are bigger than the regular limits. + "/utf-8", # Assume UTF-8 by default to avoid code page dependencies. + ] + + if (is_clang) { + cflags += [ + "/Zc:twoPhase", + + # Consistently use backslash as the path separator when expanding the + # __FILE__ macro when targeting Windows regardless of the build + # environment. + "-ffile-reproducible", + ] + } + + # Force C/C++ mode for the given GN detected file type. This is necessary + # for precompiled headers where the same source file is compiled in both + # modes. + cflags_c = [ "/TC" ] + cflags_cc = [ "/TP" ] + + cflags += [ + # Work around crbug.com/526851, bug in VS 2015 RTM compiler. + "/Zc:sizedDealloc-", + ] + + if (is_clang) { + # Required to make the 19041 SDK compatible with clang-cl. + # See https://crbug.com/1089996 issue #2 for details. + cflags += [ "/D__WRL_ENABLE_FUNCTION_STATICS__" ] + + # Tell clang which version of MSVC to emulate. + cflags += [ "-fmsc-version=1916" ] + + if (is_component_build) { + cflags += [ + # Do not export inline member functions. This makes component builds + # faster. This is similar to -fvisibility-inlines-hidden. + "/Zc:dllexportInlines-", + ] + } + + if (current_cpu == "x86") { + if (host_cpu == "x86" || host_cpu == "x64") { + cflags += [ "-m32" ] + } else { + cflags += [ "--target=i386-windows" ] + } + } else if (current_cpu == "x64") { + if (host_cpu == "x86" || host_cpu == "x64") { + cflags += [ "-m64" ] + } else { + cflags += [ "--target=x86_64-windows" ] + } + } else if (current_cpu == "arm64") { + cflags += [ "--target=arm64-windows" ] + } else { + assert(false, "unknown current_cpu " + current_cpu) + } + + # Chrome currently requires SSE3. Clang supports targeting any Intel + # microarchitecture. MSVC only supports a subset of architectures, and the + # next step after SSE2 will be AVX. + if (current_cpu == "x86" || current_cpu == "x64") { + cflags += [ "-msse3" ] + } + + # Enable ANSI escape codes if something emulating them is around (cmd.exe + # doesn't understand ANSI escape codes by default). Make sure to not enable + # this if goma/remoteexec is in use, because this will lower cache hits. + if (!use_goma && !use_remoteexec && + exec_script("//build/win/use_ansi_codes.py", [], "trim string") == + "True") { + cflags += [ "-fansi-escape-codes" ] + } + + if (use_clang_diagnostics_format) { + cflags += [ "/clang:-fdiagnostics-format=clang" ] + } + } + + # Disabled with cc_wrapper because of https://github.com/mozilla/sccache/issues/264 + if (use_lld && !use_thin_lto && (is_clang || !use_goma) && cc_wrapper == "") { + # /Brepro lets the compiler not write the mtime field in the .obj output. + # link.exe /incremental relies on this field to work correctly, but lld + # never looks at this timestamp, so it's safe to pass this flag with + # lld and get more deterministic compiler output in return. + # In LTO builds, the compiler doesn't write .obj files containing mtimes, + # so /Brepro is ignored there. + cflags += [ "/Brepro" ] + } + + ldflags = [] + + if (use_lld) { + # lld defaults to writing the current time in the pe/coff header. + # For build reproducibility, pass an explicit timestamp. See + # build/compute_build_timestamp.py for how the timestamp is chosen. + # (link.exe also writes the current time, but it doesn't have a flag to + # override that behavior.) + ldflags += [ "/TIMESTAMP:" + build_timestamp ] + + # Don't look for libpaths in %LIB%, similar to /X in cflags above. + ldflags += [ "/lldignoreenv" ] + } + + # Some binaries create PDBs larger than 4 GiB. Increasing the PDB page size + # to 8 KiB allows 8 GiB PDBs. The larger page size also allows larger block maps + # which is a PDB limit that was hit in https://crbug.com/1406510. The page size + # can easily be increased in the future to allow even larger PDBs or larger + # block maps. + # This flag requires lld-link.exe or link.exe from VS 2022 or later to create + # the PDBs, and tools from circa 22H2 or later to consume the PDBs. + ldflags += [ "/pdbpagesize:8192" ] + + if (!is_debug && !is_component_build) { + # Enable standard linker optimizations like GC (/OPT:REF) and ICF in static + # release builds. + # Release builds always want these optimizations, so enable them explicitly. + ldflags += [ + "/OPT:REF", + "/OPT:ICF", + "/INCREMENTAL:NO", + "/FIXED:NO", + ] + + if (use_lld) { + # String tail merging leads to smaller binaries, but they don't compress + # as well, leading to increased mini_installer size (crbug.com/838449). + ldflags += [ "/OPT:NOLLDTAILMERGE" ] + } + + # TODO(siggi): Is this of any use anymore? + # /PROFILE ensures that the PDB file contains FIXUP information (growing the + # PDB file by about 5%) but does not otherwise alter the output binary. It + # is enabled opportunistically for builds where it is not prohibited (not + # supported when incrementally linking, or using /debug:fastlink). + ldflags += [ "/PROFILE" ] + } + + # arflags apply only to static_libraries. The normal linker configs are only + # set for executable and shared library targets so arflags must be set + # elsewhere. Since this is relatively contained, we just apply them in this + # more general config and they will only have an effect on static libraries. + arflags = [ + # "No public symbols found; archive member will be inaccessible." This + # means that one or more object files in the library can never be + # pulled in to targets that link to this library. It's just a warning that + # the source file is a no-op. + "/ignore:4221", + ] +} + +# This is included by reference in the //build/config/compiler:runtime_library +# config that is applied to all targets. It is here to separate out the logic +# that is Windows-only. Please see that target for advice on what should go in +# :runtime_library vs. :compiler. +config("runtime_library") { + cflags = [] + cflags_cc = [] + + # Defines that set up the CRT. + defines = [ + "__STD_C", + "_CRT_RAND_S", + "_CRT_SECURE_NO_DEPRECATE", + "_SCL_SECURE_NO_DEPRECATE", + ] + + # Defines that set up the Windows SDK. + defines += [ + "_ATL_NO_OPENGL", + "_WINDOWS", + "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS", + "PSAPI_VERSION=2", + "WIN32", + "_SECURE_ATL", + ] + + if (current_os == "winuwp") { + # When targeting Windows Runtime, certain compiler/linker flags are + # necessary. + defines += [ + "WINUWP", + "__WRL_NO_DEFAULT_LIB__", + ] + if (target_winuwp_family == "app") { + defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PC_APP" ] + } else if (target_winuwp_family == "phone") { + defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP" ] + } else if (target_winuwp_family == "system") { + defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SYSTEM" ] + } else if (target_winuwp_family == "server") { + defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_SERVER" ] + } else { + defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ] + } + cflags_cc += [ "/EHsc" ] + + # This warning is given because the linker cannot tell the difference + # between consuming WinRT APIs versus authoring WinRT within static + # libraries as such this warning is always given by the linker. Since + # consuming WinRT APIs within a library is legitimate but authoring + # WinRT APis is not allowed, this warning is disabled to ignore the + # legitimate consumption of WinRT APIs within static library builds. + arflags = [ "/IGNORE:4264" ] + + if (target_winuwp_version == "10") { + defines += [ "WIN10=_WIN32_WINNT_WIN10" ] + } else if (target_winuwp_version == "8.1") { + defines += [ "WIN8_1=_WIN32_WINNT_WINBLUE" ] + } else if (target_winuwp_version == "8.0") { + defines += [ "WIN8=_WIN32_WINNT_WIN8" ] + } + } else { + # When not targeting Windows Runtime, make sure the WINAPI family is set + # to desktop. + defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ] + } +} + +# Chromium only supports Windowes 10+. +# Some third-party libraries assume that these defines set what version of +# Windows is available at runtime. Targets using these libraries need to +# manually override this config for their compiles. +config("winver") { + defines = [ + "NTDDI_VERSION=NTDDI_WIN10_NI", + + # We can't say `=_WIN32_WINNT_WIN10` here because some files do + # `#if WINVER < 0x0600` without including windows.h before, + # and then _WIN32_WINNT_WIN10 isn't yet known to be 0x0A00. + "_WIN32_WINNT=0x0A00", + "WINVER=0x0A00", + ] +} + +# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs. +config("sdk_link") { + if (current_cpu == "x86") { + ldflags = [ + "/SAFESEH", # Not compatible with x64 so use only for x86. + "/largeaddressaware", + ] + } +} + +# This default linker setup is provided separately from the SDK setup so +# targets who want different library configurations can remove this and specify +# their own. +config("common_linker_setup") { + ldflags = [ + "/FIXED:NO", + "/ignore:4199", + "/ignore:4221", + "/NXCOMPAT", + "/DYNAMICBASE", + ] + + if (win_linker_timing) { + ldflags += [ + "/time", + "/verbose:incr", + ] + } +} + +# Flags that should be applied to building .exe files but not .dll files. +config("exe_flags") { + rustflags = [ "-Ctarget-feature=+crt-static" ] +} + +config("default_cfg_compiler") { + # Emit table of address-taken functions for Control-Flow Guard (CFG). + # This is needed to allow functions to be called by code that is built + # with CFG enabled, such as system libraries. + # The CFG guards are only emitted if |win_enable_cfg_guards| is enabled. + if (win_enable_cfg_guards) { + if (is_clang) { + cflags = [ "/guard:cf" ] + } + rustflags = [ "-Ccontrol-flow-guard" ] + } else { + if (is_clang) { + cflags = [ "/guard:cf,nochecks" ] + } + rustflags = [ "-Ccontrol-flow-guard=nochecks" ] + } +} + +# To disable CFG guards for a target, remove the "default_cfg_compiler" +# config, and add "disable_guards_cfg_compiler" config. +config("disable_guards_cfg_compiler") { + # Emit table of address-taken functions for Control-Flow Guard (CFG). + # This is needed to allow functions to be called by code that is built + # with CFG enabled, such as system libraries. + if (is_clang) { + cflags = [ "/guard:cf,nochecks" ] + } + rustflags = [ "-Ccontrol-flow-guard=nochecks" ] +} + +config("cfi_linker") { + # Control Flow Guard (CFG) + # https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx + # /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG cannot be + # turned on either. + # ASan and CFG leads to slow process startup. Chromium's test runner uses + # lots of child processes, so this means things are really slow. Disable CFG + # for now. https://crbug.com/846966 + if (!is_debug && !is_asan) { + # Turn on CFG bitmap generation and CFG load config. + ldflags = [ "/guard:cf" ] + } +} + +# This is a superset of all the delayloads needed for chrome.exe, chrome.dll, +# chrome_child.dll, and chrome_elf.dll. The linker will automatically ignore +# anything which is not linked to the binary at all. +# Most of the dlls are simply not required at startup (or at all, depending +# on how the browser is used). The following dlls are interconnected and need to +# be delayloaded together to ensure user32 does not load too early or at all, +# depending on the process type: user32, gdi32, comctl32, comdlg32, cryptui, +# d3d9, dwmapi, imm32, msi, ole32, oleacc, rstrtmgr, shell32, shlwapi, and +# uxtheme. +# There are some exceptions to this list which need to be declared separately. +# Some dlls cannot be delayloaded by chrome_child.dll due to the sandbox +# restrictions that prevent them from being loaded properly. Those dlls are +# specified in the separate config below. +# This config should also be used for any test binary whose goal is to run +# tests with the full browser. +config("delayloads") { + ldflags = [ + "/DELAYLOAD:api-ms-win-core-winrt-error-l1-1-0.dll", + "/DELAYLOAD:api-ms-win-core-winrt-l1-1-0.dll", + "/DELAYLOAD:api-ms-win-core-winrt-string-l1-1-0.dll", + "/DELAYLOAD:comctl32.dll", + "/DELAYLOAD:comdlg32.dll", + "/DELAYLOAD:credui.dll", + "/DELAYLOAD:cryptui.dll", + "/DELAYLOAD:d3d11.dll", + "/DELAYLOAD:d3d9.dll", + "/DELAYLOAD:dwmapi.dll", + "/DELAYLOAD:dxgi.dll", + "/DELAYLOAD:dxva2.dll", + "/DELAYLOAD:esent.dll", + "/DELAYLOAD:gdi32.dll", + "/DELAYLOAD:hid.dll", + "/DELAYLOAD:imagehlp.dll", + "/DELAYLOAD:imm32.dll", + "/DELAYLOAD:msi.dll", + "/DELAYLOAD:netapi32.dll", + "/DELAYLOAD:ncrypt.dll", + "/DELAYLOAD:ole32.dll", + "/DELAYLOAD:oleacc.dll", + "/DELAYLOAD:propsys.dll", + "/DELAYLOAD:psapi.dll", + "/DELAYLOAD:rpcrt4.dll", + "/DELAYLOAD:rstrtmgr.dll", + "/DELAYLOAD:setupapi.dll", + "/DELAYLOAD:shell32.dll", + "/DELAYLOAD:shlwapi.dll", + "/DELAYLOAD:urlmon.dll", + "/DELAYLOAD:user32.dll", + "/DELAYLOAD:usp10.dll", + "/DELAYLOAD:uxtheme.dll", + "/DELAYLOAD:wer.dll", + "/DELAYLOAD:wevtapi.dll", + "/DELAYLOAD:wininet.dll", + "/DELAYLOAD:winusb.dll", + "/DELAYLOAD:wsock32.dll", + "/DELAYLOAD:wtsapi32.dll", + ] +} + +config("delayloads_not_for_child_dll") { + ldflags = [ + "/DELAYLOAD:advapi32.dll", + "/DELAYLOAD:crypt32.dll", + "/DELAYLOAD:dbghelp.dll", + "/DELAYLOAD:dhcpcsvc.dll", + "/DELAYLOAD:dwrite.dll", + "/DELAYLOAD:iphlpapi.dll", + "/DELAYLOAD:oleaut32.dll", + "/DELAYLOAD:secur32.dll", + "/DELAYLOAD:uiautomationcore.dll", + "/DELAYLOAD:userenv.dll", + "/DELAYLOAD:winhttp.dll", + "/DELAYLOAD:winmm.dll", + "/DELAYLOAD:winspool.drv", + "/DELAYLOAD:wintrust.dll", + "/DELAYLOAD:ws2_32.dll", + ] +} + +# CRT -------------------------------------------------------------------------- + +# Configures how the runtime library (CRT) is going to be used. +# See https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx for a reference of +# what each value does. +config("default_crt") { + if (is_component_build) { + # Component mode: dynamic CRT. Since the library is shared, it requires + # exceptions or will give errors about things not matching, so keep + # exceptions on. + configs = [ ":dynamic_crt" ] + } else { + if (current_os == "winuwp") { + # https://blogs.msdn.microsoft.com/vcblog/2014/06/10/the-great-c-runtime-crt-refactoring/ + # contains a details explanation of what is happening with the Windows + # CRT in Visual Studio releases related to Windows store applications. + configs = [ ":dynamic_crt" ] + } else { + # Desktop Windows: static CRT. + configs = [ ":static_crt" ] + } + } +} + +# Use this to force use of the release CRT when building perf-critical build +# tools that need to be fully optimized even in debug builds, for those times +# when the debug CRT is part of the bottleneck. This also avoids *implicitly* +# defining _DEBUG. +config("release_crt") { + if (is_component_build) { + cflags = [ "/MD" ] + + if (use_custom_libcxx) { + # On Windows, including libcpmt[d]/msvcprt[d] explicitly links the C++ + # standard library, which libc++ needs for exception_ptr internals. + ldflags = [ "/DEFAULTLIB:msvcprt.lib" ] + } + } else { + cflags = [ "/MT" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:libcpmt.lib" ] + } + } +} + +config("dynamic_crt") { + if (is_debug) { + # This pulls in the DLL debug CRT and defines _DEBUG + cflags = [ "/MDd" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:msvcprtd.lib" ] + } + } else { + cflags = [ "/MD" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:msvcprt.lib" ] + } + } +} + +config("static_crt") { + if (is_debug) { + # This pulls in the static debug CRT and defines _DEBUG + cflags = [ "/MTd" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:libcpmtd.lib" ] + } + } else { + cflags = [ "/MT" ] + if (use_custom_libcxx) { + ldflags = [ "/DEFAULTLIB:libcpmt.lib" ] + } + } +} + +# Subsystem -------------------------------------------------------------------- + +# This is appended to the subsystem to specify a minimum version. +if (current_cpu == "x64") { + # The number after the comma is the minimum required OS version. + # 5.02 = Windows Server 2003. + subsystem_version_suffix = ",5.02" +} else if (current_cpu == "arm64") { + # Windows ARM64 requires Windows 10. + subsystem_version_suffix = ",10.0" +} else { + # 5.01 = Windows XP. + subsystem_version_suffix = ",5.01" +} + +config("console") { + ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ] +} +config("windowed") { + ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ] +} + +# Incremental linking ---------------------------------------------------------- + +# Applies incremental linking or not depending on the current configuration. +config("default_incremental_linking") { + # Enable incremental linking for debug builds and all component builds - any + # builds where performance is not job one. + # TODO(thakis): Always turn this on with lld, no reason not to. + if (is_debug || is_component_build) { + ldflags = [ "/INCREMENTAL" ] + if (use_lld) { + # lld doesn't use ilk files and doesn't really have an incremental link + # mode; the only effect of the flag is that the .lib file timestamp isn't + # updated if the .lib doesn't change. + # TODO(thakis): Why pass /OPT:NOREF for lld, but not otherwise? + # TODO(thakis): /INCREMENTAL is on by default in link.exe, but not in + # lld. + ldflags += [ "/OPT:NOREF" ] + } + } else { + ldflags = [ "/INCREMENTAL:NO" ] + } +} + +# Character set ---------------------------------------------------------------- + +# Not including this config means "ansi" (8-bit system codepage). +config("unicode") { + defines = [ + "_UNICODE", + "UNICODE", + ] +} + +# Lean and mean ---------------------------------------------------------------- + +# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have +# to have a separate config for it. Remove this config from your target to +# get the "bloaty and accommodating" version of windows.h. +config("lean_and_mean") { + defines = [ "WIN32_LEAN_AND_MEAN" ] +} + +# Nominmax -------------------------------------------------------------------- + +# Some third party code defines NOMINMAX before including windows.h, which +# then causes warnings when it's been previously defined on the command line. +# For such targets, this config can be removed. + +config("nominmax") { + defines = [ "NOMINMAX" ] +} diff --git a/config/win/console_app.gni b/config/win/console_app.gni new file mode 100644 index 000000000000..038801e9fa00 --- /dev/null +++ b/config/win/console_app.gni @@ -0,0 +1,18 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") + +declare_args() { + # If true, builds as a console app (rather than a windowed app), which allows + # logging to be printed to the user. This will cause a terminal window to pop + # up when the executable is not run from the command line, so should only be + # used for development. Only has an effect on Windows builds. + win_console_app = false +} + +if (is_win && is_asan) { + # AddressSanitizer build should be a console app since it writes to stderr. + win_console_app = true +} diff --git a/config/win/control_flow_guard.gni b/config/win/control_flow_guard.gni new file mode 100644 index 000000000000..176947f7fa67 --- /dev/null +++ b/config/win/control_flow_guard.gni @@ -0,0 +1,23 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") + +declare_args() { + # Set this to true to enable generation of CFG indirect call dispatch + # guards. + win_enable_cfg_guards = !is_debug && !is_asan +} + +if (win_enable_cfg_guards) { + # Control Flow Guard (CFG) + # https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx + # /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG can't be + # turned on either. + # ASan and CFG leads to slow process startup. Chromium's test runner uses + # lots of child processes, so this means things are really slow. Disable CFG + # for now. https://crbug.com/846966 + assert(!is_debug && !is_asan, + "CFG does not work well in debug builds or with ASAN") +} diff --git a/config/win/manifest.gni b/config/win/manifest.gni new file mode 100644 index 000000000000..e1859eacded3 --- /dev/null +++ b/config/win/manifest.gni @@ -0,0 +1,118 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# HOW MANIFESTS WORK IN THE GN BUILD +# +# Use the windows_manifest template to declare a manifest generation step. +# This will combine all listed .manifest files. To link this manifest, just +# depend on the manifest target from your executable or shared library. +# +# This will define an empty placeholder target on non-Windows platforms so +# the manifest declarations and dependencies do not need to be inside of OS +# conditionals. +# +# A binary can depend on only one manifest target, but the manifest target +# can depend on many individual .manifest files which will be merged. As a +# result, only executables and shared libraries should depend on manifest +# targets. If you want to add a manifest to a component, put the dependency +# behind a "if (is_component_build)" conditional. +# +# Generally you will just want the defaults for the Chrome build. In this case +# the binary should just depend on one of the targets in //build/win/. There +# are also individual manifest files in that directory you can reference via +# the *_manifest variables defined below to pick and choose only some defaults. +# You might combine these with a custom manifest file to get specific behavior. + +# Reference this manifest as a source from windows_manifest targets to get +# the default Chrome OS compatibility list. +default_compatibility_manifest = "//build/win/compatibility.manifest" + +# Reference this manifest as a source from windows_manifest targets to get +# the default Chrome common constrols compatibility. +common_controls_manifest = "//build/win/common_controls.manifest" + +# Reference this manifest to request that Windows not perform any elevation +# when running your program. Otherwise, it might do some autodetection and +# request elevated privileges from the user. This is normally what you want. +as_invoker_manifest = "//build/win/as_invoker.manifest" + +# An alternative to as_invoker_manifest when you want the application to always +# elevate. +require_administrator_manifest = "//build/win/require_administrator.manifest" + +# Request the segment heap. See https://crbug.com/1014701 for details. +declare_args() { + enable_segment_heap = false +} +segment_heap_manifest = "//build/win/segment_heap.manifest" + +# Construct a target to combine the given manifest files into a .rc file. +# +# Variables for the windows_manifest template: +# +# sources: (required) +# List of source .manifest files to add. +# +# deps: (optional) +# visibility: (optional) +# Normal meaning. +# +# Example: +# +# windows_manifest("doom_melon_manifest") { +# sources = [ +# "doom_melon.manifest", # Custom values in here. +# default_compatibility_manifest, # Want the normal OS compat list. +# ] +# } +# +# executable("doom_melon") { +# deps = [ ":doom_melon_manifest" ] +# ... +# } + +if (is_win) { + template("windows_manifest") { + config_name = "${target_name}__config" + source_set_name = target_name + + config(config_name) { + visibility = [ ":$source_set_name" ] + assert(defined(invoker.sources), + "\"sources\" must be defined for a windows_manifest target") + manifests = [] + foreach(i, rebase_path(invoker.sources, root_build_dir)) { + manifests += [ "/manifestinput:" + i ] + } + ldflags = [ + "/manifest:embed", + + # We handle UAC by adding explicit .manifest files instead. + "/manifestuac:no", + ] + manifests + } + + # This source set only exists to add a dep on the invoker's deps and to + # add a public_config that sets ldflags on dependents. + source_set(source_set_name) { + forward_variables_from(invoker, [ "visibility" ]) + public_configs = [ ":$config_name" ] + + # Apply any dependencies from the invoker to this target, since those + # dependencies may have created the input manifest files. + forward_variables_from(invoker, [ "deps" ]) + } + } +} else { + # Make a no-op group on non-Windows platforms so windows_manifest + # instantiations don't need to be inside windows blocks. + template("windows_manifest") { + group(target_name) { + # Prevent unused variable warnings on non-Windows platforms. + assert(invoker.sources != "") + assert(!defined(invoker.deps) || invoker.deps != "") + assert(!defined(invoker.visibility) || invoker.visibility != "") + } + } +} diff --git a/config/win/visual_studio_version.gni b/config/win/visual_studio_version.gni new file mode 100644 index 000000000000..1da479dd5eeb --- /dev/null +++ b/config/win/visual_studio_version.gni @@ -0,0 +1,44 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Path to Visual Studio. If empty, the default is used which is to use the + # automatic toolchain in depot_tools. If set, you must also set the + # visual_studio_version, wdk_path and windows_sdk_version. + visual_studio_path = "" + + # Version of Visual Studio pointed to by the visual_studio_path. + visual_studio_version = "" + + # Directory of the Windows driver kit. If visual_studio_path is empty, this + # will be auto-filled. + wdk_path = "" + + # Full path to the Windows SDK, not including a backslash at the end. + # This value is the default location, override if you have a different + # installation location. + windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10" + + # Version of the Windows SDK pointed to by the windows_sdk_path. + windows_sdk_version = "" +} + +if (visual_studio_path == "") { + toolchain_data = + exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope") + visual_studio_path = toolchain_data.vs_path + windows_sdk_version = toolchain_data.sdk_version + windows_sdk_path = toolchain_data.sdk_path + visual_studio_version = toolchain_data.vs_version + wdk_path = toolchain_data.wdk_dir + visual_studio_runtime_dirs = toolchain_data.runtime_dirs +} else { + assert(visual_studio_version != "", + "You must set the visual_studio_version if you set the path") + assert(windows_sdk_version != "", + "You must set the windows_sdk_version if you set the path") + assert(wdk_path != "", + "You must set the wdk_path if you set the visual studio path") + visual_studio_runtime_dirs = [] +} diff --git a/config/zip.gni b/config/zip.gni new file mode 100644 index 000000000000..d623a0d0a9c7 --- /dev/null +++ b/config/zip.gni @@ -0,0 +1,59 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("python.gni") + +# Creates a zip archive of the inputs. +# +# output (required) +# Path to output zip. +# inputs (required) +# List of input files to zip. +# base_dir (optional) +# If provided, the archive paths will be relative to this directory. +# Applies only to |inputs|. +# zip_comment_values (optional) +# A list of key=value strings to store in a JSON-encoded archive comment. +# +# deps, public_deps, data, data_deps, testonly, visibility +# Normal meaning. +template("zip") { + action_with_pydeps(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + "deps", + "public_deps", + "testonly", + "visibility", + ]) + script = "//build/android/gyp/zip.py" + inputs = invoker.inputs + outputs = [ invoker.output ] + + args = [ + "--output", + rebase_path(invoker.output, root_build_dir), + ] + + if (defined(invoker.zip_comment_values)) { + foreach(comment, invoker.zip_comment_values) { + args += [ + "--comment-json", + comment, + ] + } + } + + _rebased_inputs = rebase_path(invoker.inputs, root_build_dir) + args += [ "--input-files=$_rebased_inputs" ] + if (defined(invoker.base_dir)) { + args += [ + "--input-files-base-dir", + rebase_path(invoker.base_dir, root_build_dir), + ] + } + } +} diff --git a/config/zos/BUILD.gn b/config/zos/BUILD.gn new file mode 100644 index 000000000000..082ac1d389d7 --- /dev/null +++ b/config/zos/BUILD.gn @@ -0,0 +1,57 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +# This is included by reference in the //build/config/compiler config that +# is applied to all targets. It is here to separate out the logic. + +config("compiler") { + defines = [ + "_AE_BIMODAL=1", + "_ALL_SOURCE", + "_ENHANCED_ASCII_EXT=0xFFFFFFFF", + "_Export=extern", + "_LARGE_TIME_API", + "_OPEN_MSGQ_EXT", + "_OPEN_SYS_FILE_EXT=1", + "_OPEN_SYS_SOCK_IPV6 ", + "_UNIX03_SOURCE ", + "_UNIX03_THREADS", + "_UNIX03_WITHDRAWN", + "_XOPEN_SOURCE=600", + "_XOPEN_SOURCE_EXTENDED", + "__static_assert=static_assert", + "PATH_MAX=1024", + ] + + cflags = [ + "-q64", + "-qASCII", + "-Wc,DLL", + "-Wa,GOFF", + "-qENUM=INT", + "-qEXPORTALL", + "-qASM", + "-qmakedep", + "-qARCH=10", + "-qTUNE=10", + "-qasmlib=sys1.maclib:sys1.modgen", + "-qfloat=IEEE", + "-qlibansi", + "-qgonumber", + "-qlongname", + ] + + cflags_cc = [ + ] + + asmflags = [ + "-Wa,GOFF", + ] + + ldflags = [ + ] +} diff --git a/copy_test_data_ios.py b/copy_test_data_ios.py new file mode 100755 index 000000000000..69b957a72e20 --- /dev/null +++ b/copy_test_data_ios.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copies test data files or directories into a given output directory.""" + + +import optparse +import os +import shutil +import sys + +class WrongNumberOfArgumentsException(Exception): + pass + +def EscapePath(path): + """Returns a path with spaces escaped.""" + return path.replace(" ", "\\ ") + +def ListFilesForPath(path): + """Returns a list of all the files under a given path.""" + output = [] + # Ignore revision control metadata directories. + if (os.path.basename(path).startswith('.git') or + os.path.basename(path).startswith('.svn')): + return output + + # Files get returned without modification. + if not os.path.isdir(path): + output.append(path) + return output + + # Directories get recursively expanded. + contents = os.listdir(path) + for item in contents: + full_path = os.path.join(path, item) + output.extend(ListFilesForPath(full_path)) + return output + +def CalcInputs(inputs): + """Computes the full list of input files for a set of command-line arguments. + """ + # |inputs| is a list of paths, which may be directories. + output = [] + for input in inputs: + output.extend(ListFilesForPath(input)) + return output + +def CopyFiles(relative_filenames, output_basedir): + """Copies files to the given output directory.""" + for file in relative_filenames: + relative_dirname = os.path.dirname(file) + output_dir = os.path.join(output_basedir, relative_dirname) + output_filename = os.path.join(output_basedir, file) + + # In cases where a directory has turned into a file or vice versa, delete it + # before copying it below. + if os.path.exists(output_dir) and not os.path.isdir(output_dir): + os.remove(output_dir) + if os.path.exists(output_filename) and os.path.isdir(output_filename): + shutil.rmtree(output_filename) + + if not os.path.exists(output_dir): + os.makedirs(output_dir) + shutil.copy(file, output_filename) + +def DoMain(argv): + parser = optparse.OptionParser() + usage = 'Usage: %prog -o [--inputs] [--outputs] ' + parser.set_usage(usage) + parser.add_option('-o', dest='output_dir') + parser.add_option('--inputs', action='store_true', dest='list_inputs') + parser.add_option('--outputs', action='store_true', dest='list_outputs') + options, arglist = parser.parse_args(argv) + + if len(arglist) == 0: + raise WrongNumberOfArgumentsException(' required.') + + files_to_copy = CalcInputs(arglist) + escaped_files = [EscapePath(x) for x in CalcInputs(arglist)] + if options.list_inputs: + return '\n'.join(escaped_files) + + if not options.output_dir: + raise WrongNumberOfArgumentsException('-o required.') + + if options.list_outputs: + outputs = [os.path.join(options.output_dir, x) for x in escaped_files] + return '\n'.join(outputs) + + CopyFiles(files_to_copy, options.output_dir) + return + +def main(argv): + try: + result = DoMain(argv[1:]) + except WrongNumberOfArgumentsException as e: + print(e, file=sys.stderr) + return 1 + if result: + print(result) + return 0 + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/cp.py b/cp.py new file mode 100755 index 000000000000..2bcf55cbc255 --- /dev/null +++ b/cp.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copy a file. + +This module works much like the cp posix command - it takes 2 arguments: +(src, dst) and copies the file with path |src| to |dst|. +""" + +import os +import shutil +import sys + + +def Main(src, dst): + # Use copy instead of copyfile to ensure the executable bit is copied. + return shutil.copy(src, os.path.normpath(dst)) + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1], sys.argv[2])) diff --git a/del_ninja_deps_cache.py b/del_ninja_deps_cache.py new file mode 100755 index 000000000000..c2560de660a2 --- /dev/null +++ b/del_ninja_deps_cache.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Delete .ninja_deps if it references files inside a libc++ dir which has +since been reverted back to a file, and would cause Ninja fail on Windows. See +crbug.com/1337238""" + +import os +import sys + + +def main(): + os.chdir(os.path.join(os.path.dirname(__file__), '..')) + + # Paths that have switched between being a directory and regular file. + bad_dirs = [ + 'buildtools/third_party/libc++/trunk/include/__string', + 'buildtools/third_party/libc++/trunk/include/__tuple', + ] + + for bad_dir in bad_dirs: + if os.path.isdir(bad_dir): + # If it's a dir, .ninja_deps referencing files in it is not a problem. + continue + + for out_dir in os.listdir('out'): + ninja_deps = os.path.join('out', out_dir, '.ninja_deps') + try: + if str.encode(bad_dir) + b'/' in open(ninja_deps, 'rb').read(): + print('Deleting', ninja_deps) + os.remove(ninja_deps) + except FileNotFoundError: + pass + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/detect_host_arch.py b/detect_host_arch.py new file mode 100755 index 000000000000..c9d47e9139fb --- /dev/null +++ b/detect_host_arch.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Outputs host CPU architecture in format recognized by gyp.""" + + +import platform +import re +import sys + + +def HostArch(): + """Returns the host architecture with a predictable string.""" + host_arch = platform.machine() + + # Convert machine type to format recognized by gyp. + if re.match(r'i.86', host_arch) or host_arch == 'i86pc': + host_arch = 'ia32' + elif host_arch in ['x86_64', 'amd64']: + host_arch = 'x64' + elif host_arch.startswith('arm'): + host_arch = 'arm' + elif host_arch.startswith('aarch64'): + host_arch = 'arm64' + elif host_arch.startswith('mips64'): + host_arch = 'mips64' + elif host_arch.startswith('mips'): + host_arch = 'mips' + elif host_arch.startswith('ppc'): + host_arch = 'ppc' + elif host_arch.startswith('s390'): + host_arch = 's390' + + + # platform.machine is based on running kernel. It's possible to use 64-bit + # kernel with 32-bit userland, e.g. to give linker slightly more memory. + # Distinguish between different userland bitness by querying + # the python binary. + if host_arch == 'x64' and platform.architecture()[0] == '32bit': + host_arch = 'ia32' + if host_arch == 'arm64' and platform.architecture()[0] == '32bit': + host_arch = 'arm' + + return host_arch + +def DoMain(_): + """Hook to be called from gyp without starting a separate python + interpreter.""" + return HostArch() + +if __name__ == '__main__': + print(DoMain([])) diff --git a/dir_exists.py b/dir_exists.py new file mode 100755 index 000000000000..da9813f6093f --- /dev/null +++ b/dir_exists.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# Copyright 2011 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Writes True if the argument is a directory.""" + +import os.path +import sys + +def main(): + sys.stdout.write(_is_dir(sys.argv[1])) + return 0 + +def _is_dir(dir_name): + return str(os.path.isdir(dir_name)) + +def DoMain(args): + """Hook to be called from gyp without starting a separate python + interpreter.""" + return _is_dir(args[0]) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/docs/debugging_slow_builds.md b/docs/debugging_slow_builds.md new file mode 100644 index 000000000000..9bba5530bca8 --- /dev/null +++ b/docs/debugging_slow_builds.md @@ -0,0 +1,48 @@ +# Debugging Slow Builds + +Did you know that Ninja writes a log to disk after each build? + +To see what kinds of files took the longest for your previous build: + +```sh +cd out/Default +# Lives in depot_tools: +post_build_ninja_summary.py +``` + +You can also set `NINJA_SUMMARIZE_BUILD=1` to have this command run +after each `autoninja` invocation (also runs ninja with `-d stats`). + +To generate a Chrome trace of your most recent build: + +```sh +git clone https://github.com/nico/ninjatracing +ninjatracing/ninjatracing out/Default/.ninja_log > trace.json +# Then open in https://ui.perfetto.dev/ +``` + +## Slow Bot Builds + +Our bots run `ninjatracing` and `post_build_ninja_summary.py` as well. + +Find the trace at: `postprocess_for_goma > upload_log > ninja_log`: + + * _".ninja_log in table format (full)"_ is for `post_build_ninja_summary.py`. + * _"trace viewer (sort_by_end)"_ is for `ninjatracing`. + +## Advanced(ish) Tips + +* Use `gn gen --tracelog trace.json` to create a trace for `gn gen`. +* Many Android templates make use of + [`md5_check.py`](https://cs.chromium.org/chromium/src/build/android/gyp/util/md5_check.py) + to optimize incremental builds. + * Set `PRINT_BUILD_EXPLANATIONS=1` to have these commands log which inputs + changed. +* If you suspect files are being rebuilt unnecessarily during incremental + builds: + * Use `ninja -n -d explain` to figure out why ninja thinks a target is dirty. + * Ensure actions are taking advantage of ninja's `restat=1` feature by not + updating timestamps on outputs when their contents do not change. + * E.g. by using [`build_utils.AtomicOutput()`] + +[`build_utils.AtomicOutput()`]: https://source.chromium.org/search?q=symbol:AtomicOutput%20f:build diff --git a/docs/mac_hermetic_toolchain.md b/docs/mac_hermetic_toolchain.md new file mode 100644 index 000000000000..d5c88deefb30 --- /dev/null +++ b/docs/mac_hermetic_toolchain.md @@ -0,0 +1,34 @@ +# Mac and iOS hermetic toolchain instructions + +The following is a short explanation of why we use a the hermetic toolchain +and instructions on how to roll a new toolchain. This toolchain is only +available to Googlers and infra bots. + +## How to roll a new hermetic toolchain. + +1. Download a new version of Xcode, and confirm either mac or ios builds + properly with this new version. + +2. Create a new CIPD package by moving Xcode.app to the `build/` directory, then + follow the instructions in + [build/xcode_binaries.yaml](../xcode_binaries.yaml). + + The CIPD package creates a subset of the toolchain necessary for a build. + +2. Create a CL with the updated `MAC_BINARIES_TAG` in + [mac_toolchain.py](../mac_toolchain.py) with the version created by the + previous command. + +3. Run the CL through the trybots to confirm the roll works. + +## Why we use a hermetic toolchain. + +Building Chrome Mac currently requires many binaries that come bundled with +Xcode, as well the macOS and iphoneOS SDK (also bundled with Xcode). Note that +Chrome ships its own version of clang (compiler), but is dependent on Xcode +for these other binaries. Using a hermetic toolchain has two main benefits: + +1. Build Chrome with a well-defined toolchain (rather than whatever happens to + be installed on the machine). + +2. Easily roll/update the toolchain. diff --git a/docs/writing_gn_templates.md b/docs/writing_gn_templates.md new file mode 100644 index 000000000000..9171265ec1dd --- /dev/null +++ b/docs/writing_gn_templates.md @@ -0,0 +1,351 @@ +# Writing GN Templates +GN and Ninja are documented here: +* GN: https://gn.googlesource.com/gn/+/main/docs/ +* Ninja: https://ninja-build.org/manual.html + +[TOC] + +## Things to Consider When Writing Templates +### Inputs and Depfiles +List all files read (or executed) by an action as `inputs`. + * It is not enough to have inputs listed by dependent targets. They must be + listed directly by targets that use them, or added by a depfile. + * Non-system Python imports are inputs! For scripts that import such modules, + use [`action_with_pydeps`] to ensure all dependent Python files are captured + as inputs. + +[`action_with_pydeps`]: https://cs.chromium.org/chromium/src/build/config/python.gni?rcl=320ee4295eb7fabaa112f08d1aacc88efd1444e5&l=75 + +To understand *why* actions must list all inputs directly, you need to +understand ninja's "restat" directive, which is used for all GN `action()`s. + +From https://ninja-build.org/manual.html: + +> if present, causes Ninja to re-stat the command’s outputs after execution of +> the command. Each output whose modification time the command did not change +> will be treated as though it had never needed to be built. This may cause the +> output’s reverse dependencies to be removed from the list of pending build +> actions. + +So, if your action depends on target "X", and "X" does not change its outputs +when rebuilt, then ninja will not bother to rebuild your target. + +For action inputs that are not computable during "gn gen", actions can write +depfiles (.d files) to add additional input files as dependencies for +subsequent builds. They are relevant only for incremental builds since they +won't exist for the initial build. + * Depfiles should not list files that GN already lists as `inputs`. + * Besides being redundant, listing them also makes it harder to remove + inputs, since removing them from GN does not immediately remove them from + depfiles. + * Stale paths in depfiles can cause ninja to complain of circular + dependencies [in some cases](https://bugs.chromium.org/p/chromium/issues/detail?id=639042). + * Use [`action_helpers.write_depfile()`] to write these. + +[`action_helpers.write_depfile()`]: https://source.chromium.org/chromium/chromium/src/+/main:build/action_helpers.py?q=symbol:%5Cbwrite_depfile + +### Ensuring "gn analyze" Knows About your Inputs +"gn analyze" is used by bots to run only affected tests and build only affected +targets. Try it out locally via: +```bash +echo "compute_inputs_for_analyze = true" >> out/Debug/args.gn +gn analyze //out/Debug <(echo '{ + "files": ["//BUILD.gn"], + "test_targets": ["//base"], + "additional_compile_targets":[]}') result.txt; cat result.txt +``` +* For analyze to work properly, GN must know about all inputs. +* Inputs added by depfiles are *not available* to "gn analyze". + * When paths listed in a target's depfile are listed as `inputs` to a + dependent target, analyze will be correct. + * Example: An `AndroidManifest.xml` file is an input to an + `android_library()` and is included in an `android_apk()`'s depfile. + `gn analyze` will know that a change to the file will require the APK + to be rebuilt, because the file is marked as an input to the library, and + the library is a dep of the APK. + * When paths listed in a target's depfile are *not* listed as `inputs` to a + dependent target, a few options exist: + * Rather than putting the inputs in a depfile, force users of your template + to list them, and then have your action re-compute them and assert that + they were correct. + * `jinja_template()` does this. + * Rather than putting the inputs in a depfile, compute them beforehand and + save them to a text file. Have your template Use `read_file()` to read + them in. + * `action_with_pydeps()` does this. + * Continue using a depfile, but use an `exec_script()` to compute them when + [`compute_inputs_for_analyze`](https://cs.chromium.org/chromium/src/build/config/compute_inputs_for_analyze.gni) + is set. + * `grit()` does this. + +### Outputs +#### What to List as Outputs +Do not list files as `outputs` unless they are important. Outputs are important +if they are: + * used as an input by another target, or + * are roots in the dependency graph (e.g. binaries, apks, etc). + +Example: +* An action runs a binary that creates an output as well as a log file. Do not + list the log file as an output. + +Rationale: +* Inputs and outputs are a node's public API on the build graph. Not listing + "implementation detail"-style outputs prevents other targets from depending on + them as inputs. +* Not listing them also helps to minimize the size of the build graph (although + this would be noticeable only for frequently used templates). + +#### Where to Place Outputs +**Option 1:** To make outputs visible in codesearch (e.g. generated sources): +* use `$target_gen_dir/$target_name.$EXTENSION`. + +**Option 2:** Otherwise (for binary files): +* use `$target_out_dir/$target_name.$EXTENSION`. + +**Option 3:** For outputs that are required at runtime +(e.g. [runtime_deps](https://gn.googlesource.com/gn/+/main/docs/reference.md#runtime_deps)), +options 1 & 2 do not work because they are not archived in builder/tester bot +configurations. In this case: +* use `$root_out_dir/gen.runtime` or `$root_out_dir/obj.runtime`. + +Example: +```python +# This .json file is used at runtime and thus cannot go in target_gen_dir. +_target_dir_name = rebase_path(get_label_info(":$target_name", "dir"), "//") +_output_path = "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.json" +``` + +**Option 4:** For outputs that map 1:1 with executables, and whose paths cannot +be derived at runtime: +* use `$root_build_dir/YOUR_NAME_HERE/$target_name`. + +Examples: +```python +# Wrapper scripts for apks: +_output_path = "$root_build_dir/bin/$target_name" +# Metadata for apks. Used by binary size tools. +_output_path = "$root_build_dir/size-info/${invoker.name}.apk.jar.info" +``` + +## Best Practices for Python Actions +Outputs should be atomic and take advantage of `restat=1`. +* Make outputs atomic by writing to temporary files and then moving them to + their final location. + * Rationale: An interrupted write can leave a file with an updated timestamp + and corrupt contents. Ninja looks only at timestamps. +* Do not overwrite an existing output with identical contents. + * Rationale: `restat=1` is a ninja feature enabled for all actions that + short-circuits a build when output timestamps do not change. This feature is + the reason that the total number of build steps sometimes decreases when + building.. +* Use [`action_helpers.atomic_output()`] to perform both of these techniques. + +[`action_helpers.atomic_output()`]: https://source.chromium.org/chromium/chromium/src/+/main:build/action_helpers.py?q=symbol:%5Cbatomic_output + +Actions should be deterministic in order to avoid hard-to-reproduce bugs. +Given identical inputs, they should produce byte-for-byte identical outputs. +* Some common mistakes: + * Depending on filesystem iteration order. + * Writing absolute paths in outputs. + * Writing timestamps in files (or in zip entries). + * Tip: Use [`zip_helpers.py`] when writing `.zip` files. + +[`zip_helpers.py`]: https://source.chromium.org/chromium/chromium/src/+/main:build/zip_helpers.py + +## Style Guide +Chromium GN files follow +[GN's Style Guide](https://gn.googlesource.com/gn/+/main/docs/style_guide.md) +with a few additions. + +### Action Granularity + * Prefer writing new Python scripts that do what you want over + composing multiple separate actions within a template. + * Fewer targets makes for a simpler build graph. + * GN logic and build logic winds up much simpler. + +Bad: +```python +template("generate_zipped_sources") { + generate_files("${target_name}__gen") { + ... + outputs = [ "$target_gen_dir/$target_name.temp" ] + } + zip(target_name) { + deps = [ ":${target_name}__gen" ] + inputs = [ "$target_gen_dir/$target_name.temp" ] + outputs = [ invoker.output_zip ] + } +} +``` + +Good: +```python +template("generate_zipped_sources") { + action(target_name) { + script = "generate_and_zip.py" + ... + outputs = [ invoker.output_zip ] + } +} +``` + +### Naming for Intermediate Targets +Targets that are not relevant to users of your template should be named as: +`${target_name}__$something`. + +Example: +```python +template("my_template") { + action("${target_name}__helper") { + ... + } + action(target_name) { + deps = [ ":${target_name}__helper" ] + ... + } +} +``` + +This scheme ensures that subtargets defined in templates do not conflict with +top-level targets. + +### Visibility for Intermediate Targets + +You can restrict what targets can depend on one another using [visibility]. +When writing templates, with multiple intermediate targets, `visibility` should +only be applied to the final target (the one named `target_name`). Applying only +to the final target ensures that the invoker-provided visibility does not +prevent intermediate targets from depending on each other. + +[visibility]: https://gn.googlesource.com/gn/+/main/docs/reference.md#var_visibility + +Example: +```python +template("my_template") { + # Do not forward visibility here. + action("${target_name}__helper") { + # Do not forward visibility here. + ... + } + action(target_name) { + # Forward visibility here. + forward_variables_from(invoker, [ "visibility" ]) + deps = [ ":${target_name}__helper" ] + ... + } +} +``` + +### Variables +Prefix variables within templates and targets with an underscore. For example: + +```python +template("example") { + _outer_sources = invoker.extra_sources + + source_set(target_name) { + _inner_sources = invoker.sources + sources = _outer_sources + _inner_sources + } +} +``` + +This convention conveys that `sources` is relevant to `source_set`, while +`_outer_sources` and `_inner_sources` are not. + +### Passing Arguments to Targets +Pass arguments to targets by assigning them directly within target definitions. + +When a GN template goes to resolve `invoker.FOO`, GN will look in all enclosing +scopes of the target's definition. It is hard to figure out where `invoker.FOO` +is coming from when it is not assigned directly within the target definition. + +Bad: +```python +template("hello") { + script = "..." + action(target_name) { + # This action will see "script" from the enclosing scope. + } +} +``` + +Good: +```python +template("hello") { + action(target_name) { + script = "..." # This is equivalent, but much more clear. + } +} +``` + +**Exception:** `testonly` and `visibility` can be set in the outer scope so that +they are implicitly passed to all targets within a template. + +This is okay: +```python +template("hello") { + testonly = true # Applies to all nested targets. + action(target_name) { + script = "..." + } +} +``` + +### Using forward_variables_from() +Using [forward_variables_from()] is encouraged, but special care needs to be +taken when forwarding `"*"`. The variables `testonly` and `visibility` should +always be listed explicitly in case they are assigned in an enclosing +scope. +See [this bug] for more a full example. + +To make this easier, `//build/config/BUILDCONFIG.gn` defines: +```python +TESTONLY_AND_VISIBILITY = [ "testonly", "visibility" ] +``` + +Example usage: +```python +template("action_wrapper") { + action(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + ... + } +} +``` + +If your template defines multiple targets, be careful to apply `testonly` to +both, but `visibility` only to the primary one (so that the primary one is not +prevented from depending on the other ones). + +Example: +```python +template("template_with_multiple_targets") { + action("${target_name}__helper") { + forward_variables_from(invoker, [ "testonly" ]) + ... + } + action(target_name) { + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + ... + } +} +``` + +An alternative would be to explicitly set `visibility` on all inner targets, +but doing so tends to be tedious and has little benefit. + +[this bug]: https://bugs.chromium.org/p/chromium/issues/detail?id=862232 +[forward_variables_from]: https://gn.googlesource.com/gn/+/main/docs/reference.md#func_forward_variables_from + +## Useful Ninja Flags +Useful ninja flags when developing build rules: +* `ninja -v` - log the full command-line of every target. +* `ninja -v -n` - log the full command-line of every target without having + to wait for a build. +* `ninja -w dupbuild=err` - fail if multiple targets have the same output. +* `ninja -d keeprsp` - prevent ninja from deleting response files. +* `ninja -n -d explain` - print why ninja thinks a target is dirty. +* `ninja -j1` - execute only one command at a time. diff --git a/dotfile_settings.gni b/dotfile_settings.gni new file mode 100644 index 000000000000..50c04a8c0caa --- /dev/null +++ b/dotfile_settings.gni @@ -0,0 +1,43 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file contains variables that can be imported into a repo's dotfile (.gn) +# to make it easier to roll new versions of //build in. + +build_dotfile_settings = { + exec_script_whitelist = [ + "//build/config/android/rules.gni", + "//build/config/chromeos/rules.gni", + "//build/config/compiler/BUILD.gn", + "//build/config/compiler/pgo/BUILD.gn", + "//build/config/gcc/gcc_version.gni", + "//build/config/host_byteorder.gni", + "//build/config/ios/ios_sdk.gni", + "//build/config/ios/rules.gni", + "//build/config/linux/atk/BUILD.gn", + "//build/config/linux/atspi2/BUILD.gn", + "//build/config/linux/BUILD.gn", + "//build/config/linux/dri/BUILD.gn", + "//build/config/linux/pkg_config.gni", + "//build/config/mac/mac_sdk.gni", + "//build/config/mac/rules.gni", + "//build/config/posix/BUILD.gn", + "//build/config/rust.gni", + "//build/config/sysroot.gni", + "//build/config/win/BUILD.gn", + "//build/config/win/visual_studio_version.gni", + "//build/rust/analyze.gni", + "//build/timestamp.gni", + "//build/toolchain/apple/toolchain.gni", + "//build/toolchain/BUILD.gn", + "//build/toolchain/concurrent_links.gni", + "//build/toolchain/goma.gni", + "//build/toolchain/nacl/BUILD.gn", + "//build/toolchain/toolchain.gni", + "//build/toolchain/win/BUILD.gn", + "//build/toolchain/win/win_toolchain_data.gni", + "//build/toolchain/zos/BUILD.gn", + "//build/util/branding.gni", + ] +} diff --git a/download_nacl_toolchains.py b/download_nacl_toolchains.py new file mode 100755 index 000000000000..1b86a4bb9e19 --- /dev/null +++ b/download_nacl_toolchains.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Shim to run nacl toolchain download script only if there is a nacl dir.""" + + +import os +import shutil +import sys + + +def Main(args): + script_dir = os.path.dirname(os.path.abspath(__file__)) + src_dir = os.path.dirname(script_dir) + nacl_dir = os.path.join(src_dir, 'native_client') + nacl_build_dir = os.path.join(nacl_dir, 'build') + package_version_dir = os.path.join(nacl_build_dir, 'package_version') + package_version = os.path.join(package_version_dir, 'package_version.py') + if not os.path.exists(package_version): + print("Can't find '%s'" % package_version) + print('Presumably you are intentionally building without NativeClient.') + print('Skipping NativeClient toolchain download.') + sys.exit(0) + sys.path.insert(0, package_version_dir) + import package_version + + # BUG: + # We remove this --optional-pnacl argument, and instead replace it with + # --no-pnacl for most cases. However, if the bot name is an sdk + # bot then we will go ahead and download it. This prevents increasing the + # gclient sync time for developers, or standard Chrome bots. + if '--optional-pnacl' in args: + args.remove('--optional-pnacl') + use_pnacl = False + buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '') + if 'pnacl' in buildbot_name and 'sdk' in buildbot_name: + use_pnacl = True + if use_pnacl: + print('\n*** DOWNLOADING PNACL TOOLCHAIN ***\n') + else: + args = ['--exclude', 'pnacl_newlib'] + args + + # Only download the ARM gcc toolchain if we are building for ARM + # TODO(olonho): we need to invent more reliable way to get build + # configuration info, to know if we're building for ARM. + if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''): + args = ['--exclude', 'nacl_arm_newlib'] + args + + return package_version.main(args) + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1:])) diff --git a/env_dump.py b/env_dump.py new file mode 100755 index 000000000000..1eaf8dc92125 --- /dev/null +++ b/env_dump.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script can either source a file and dump the enironment changes done by +# it, or just simply dump the current environment as JSON into a file. + +import json +import optparse +import os +import pipes +import subprocess +import sys + + +def main(): + parser = optparse.OptionParser() + parser.add_option('-f', '--output-json', + help='File to dump the environment as JSON into.') + parser.add_option( + '-d', '--dump-mode', action='store_true', + help='Dump the environment to sys.stdout and exit immediately.') + + parser.disable_interspersed_args() + options, args = parser.parse_args() + if options.dump_mode: + if args or options.output_json: + parser.error('Cannot specify args or --output-json with --dump-mode.') + json.dump(dict(os.environ), sys.stdout) + else: + if not options.output_json: + parser.error('Requires --output-json option.') + + envsetup_cmd = ' '.join(map(pipes.quote, args)) + full_cmd = [ + 'bash', '-c', + '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__)) + ] + try: + output = subprocess.check_output(full_cmd) + except Exception as e: + sys.exit('Error running %s and dumping environment.' % envsetup_cmd) + + env_diff = {} + new_env = json.loads(output) + for k, val in new_env.items(): + if k == '_' or (k in os.environ and os.environ[k] == val): + continue + env_diff[k] = val + with open(options.output_json, 'w') as f: + json.dump(env_diff, f) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/extract_from_cab.py b/extract_from_cab.py new file mode 100755 index 000000000000..c7ae6d9f499f --- /dev/null +++ b/extract_from_cab.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Extracts a single file from a CAB archive.""" + + +import os +import shutil +import subprocess +import sys +import tempfile + +def run_quiet(*args): + """Run 'expand' suppressing noisy output. Returns returncode from process.""" + popen = subprocess.Popen(args, stdout=subprocess.PIPE) + out, _ = popen.communicate() + if popen.returncode: + # expand emits errors to stdout, so if we fail, then print that out. + print(out) + return popen.returncode + +def main(): + if len(sys.argv) != 4: + print('Usage: extract_from_cab.py cab_path archived_file output_dir') + return 1 + + [cab_path, archived_file, output_dir] = sys.argv[1:] + + # Expand.exe does its work in a fixed-named temporary directory created within + # the given output directory. This is a problem for concurrent extractions, so + # create a unique temp dir within the desired output directory to work around + # this limitation. + temp_dir = tempfile.mkdtemp(dir=output_dir) + + try: + # Invoke the Windows expand utility to extract the file. + level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir) + if level == 0: + # Move the output file into place, preserving expand.exe's behavior of + # paving over any preexisting file. + output_file = os.path.join(output_dir, archived_file) + try: + os.remove(output_file) + except OSError: + pass + os.rename(os.path.join(temp_dir, archived_file), output_file) + finally: + shutil.rmtree(temp_dir, True) + + if level != 0: + return level + + # The expand utility preserves the modification date and time of the archived + # file. Touch the extracted file. This helps build systems that compare the + # modification times of input and output files to determine whether to do an + # action. + os.utime(os.path.join(output_dir, archived_file), None) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/extract_partition.py b/extract_partition.py new file mode 100755 index 000000000000..319ce8fc7f45 --- /dev/null +++ b/extract_partition.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Extracts an LLD partition from an ELF file.""" + +import argparse +import hashlib +import math +import os +import struct +import subprocess +import sys +import tempfile + + +def _ComputeNewBuildId(old_build_id, file_path): + """ + Computes the new build-id from old build-id and file_path. + + Args: + old_build_id: Original build-id in bytearray. + file_path: Path to output ELF file. + + Returns: + New build id with the same length as |old_build_id|. + """ + m = hashlib.sha256() + m.update(old_build_id) + m.update(os.path.basename(file_path).encode('utf-8')) + hash_bytes = m.digest() + # In case build_id is longer than hash computed, repeat the hash + # to the desired length first. + id_size = len(old_build_id) + hash_size = len(hash_bytes) + return (hash_bytes * (id_size // hash_size + 1))[:id_size] + + +def _ExtractPartition(objcopy, input_elf, output_elf, partition): + """ + Extracts a partition from an ELF file. + + For partitions other than main partition, we need to rewrite + the .note.gnu.build-id section so that the build-id remains + unique. + + Note: + - `objcopy` does not modify build-id when partitioning the + combined ELF file by default. + - The new build-id is calculated as hash of original build-id + and partitioned ELF file name. + + Args: + objcopy: Path to objcopy binary. + input_elf: Path to input ELF file. + output_elf: Path to output ELF file. + partition: Partition to extract from combined ELF file. None when + extracting main partition. + """ + if not partition: # main partition + # We do not overwrite build-id on main partition to allow the expected + # partition build ids to be synthesized given a libchrome.so binary, + # if necessary. + subprocess.check_call( + [objcopy, '--extract-main-partition', input_elf, output_elf]) + return + + # partitioned libs + build_id_section = '.note.gnu.build-id' + + with tempfile.TemporaryDirectory() as tempdir: + temp_elf = os.path.join(tempdir, 'obj_without_id.so') + old_build_id_file = os.path.join(tempdir, 'old_build_id') + new_build_id_file = os.path.join(tempdir, 'new_build_id') + + # Dump out build-id section and remove original build-id section from + # ELF file. + subprocess.check_call([ + objcopy, + '--extract-partition', + partition, + # Note: Not using '--update-section' here as it is not supported + # by llvm-objcopy. + '--remove-section', + build_id_section, + '--dump-section', + '{}={}'.format(build_id_section, old_build_id_file), + input_elf, + temp_elf, + ]) + + with open(old_build_id_file, 'rb') as f: + note_content = f.read() + + # .note section has following format according to + # typedef struct { + # unsigned char namesz[4]; /* Size of entry's owner string */ + # unsigned char descsz[4]; /* Size of the note descriptor */ + # unsigned char type[4]; /* Interpretation of the descriptor */ + # char name[1]; /* Start of the name+desc data */ + # } Elf_External_Note; + # `build-id` rewrite is only required on Android platform, + # where we have partitioned lib. + # Android platform uses little-endian. + # <: little-endian + # 4x: Skip 4 bytes + # L: unsigned long, 4 bytes + descsz, = struct.Struct('<4xL').unpack_from(note_content) + prefix = note_content[:-descsz] + build_id = note_content[-descsz:] + + with open(new_build_id_file, 'wb') as f: + f.write(prefix + _ComputeNewBuildId(build_id, output_elf)) + + # Write back the new build-id section. + subprocess.check_call([ + objcopy, + '--add-section', + '{}={}'.format(build_id_section, new_build_id_file), + # Add alloc section flag, or else the section will be removed by + # objcopy --strip-all when generating unstripped lib file. + '--set-section-flags', + '{}={}'.format(build_id_section, 'alloc'), + temp_elf, + output_elf, + ]) + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + '--partition', + help='Name of partition if not the main partition', + metavar='PART') + parser.add_argument( + '--objcopy', + required=True, + help='Path to llvm-objcopy binary', + metavar='FILE') + parser.add_argument( + '--unstripped-output', + required=True, + help='Unstripped output file', + metavar='FILE') + parser.add_argument( + '--stripped-output', + required=True, + help='Stripped output file', + metavar='FILE') + parser.add_argument('--split-dwarf', action='store_true') + parser.add_argument('input', help='Input file') + args = parser.parse_args() + + _ExtractPartition(args.objcopy, args.input, args.unstripped_output, + args.partition) + subprocess.check_call([ + args.objcopy, + '--strip-all', + args.unstripped_output, + args.stripped_output, + ]) + + # Debug info for partitions is the same as for the main library, so just + # symlink the .dwp files. + if args.split_dwarf: + dest = args.unstripped_output + '.dwp' + try: + os.unlink(dest) + except OSError: + pass + relpath = os.path.relpath(args.input + '.dwp', os.path.dirname(dest)) + os.symlink(relpath, dest) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/find_depot_tools.py b/find_depot_tools.py new file mode 100755 index 000000000000..f891a414837a --- /dev/null +++ b/find_depot_tools.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +# Copyright 2011 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Small utility function to find depot_tools and add it to the python path. + +Will throw an ImportError exception if depot_tools can't be found since it +imports breakpad. + +This can also be used as a standalone script to print out the depot_tools +directory location. +""" + + +import os +import sys + + +# Path to //src +SRC = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + + +def IsRealDepotTools(path): + expanded_path = os.path.expanduser(path) + return os.path.isfile(os.path.join(expanded_path, 'gclient.py')) + + +def add_depot_tools_to_path(): + """Search for depot_tools and add it to sys.path.""" + # First, check if we have a DEPS'd in "depot_tools". + deps_depot_tools = os.path.join(SRC, 'third_party', 'depot_tools') + if IsRealDepotTools(deps_depot_tools): + # Put the pinned version at the start of the sys.path, in case there + # are other non-pinned versions already on the sys.path. + sys.path.insert(0, deps_depot_tools) + return deps_depot_tools + + # Then look if depot_tools is already in PYTHONPATH. + for i in sys.path: + if i.rstrip(os.sep).endswith('depot_tools') and IsRealDepotTools(i): + return i + # Then look if depot_tools is in PATH, common case. + for i in os.environ['PATH'].split(os.pathsep): + if IsRealDepotTools(i): + sys.path.append(i.rstrip(os.sep)) + return i + # Rare case, it's not even in PATH, look upward up to root. + root_dir = os.path.dirname(os.path.abspath(__file__)) + previous_dir = os.path.abspath(__file__) + while root_dir and root_dir != previous_dir: + i = os.path.join(root_dir, 'depot_tools') + if IsRealDepotTools(i): + sys.path.append(i) + return i + previous_dir = root_dir + root_dir = os.path.dirname(root_dir) + print('Failed to find depot_tools', file=sys.stderr) + return None + +DEPOT_TOOLS_PATH = add_depot_tools_to_path() + +# pylint: disable=W0611 +import breakpad + + +def main(): + if DEPOT_TOOLS_PATH is None: + return 1 + print(DEPOT_TOOLS_PATH) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fix_gn_headers.py b/fix_gn_headers.py new file mode 100755 index 000000000000..5111b5db4d4c --- /dev/null +++ b/fix_gn_headers.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Fix header files missing in GN. + +This script takes the missing header files from check_gn_headers.py, and +try to fix them by adding them to the GN files. +Manual cleaning up is likely required afterwards. +""" + + +import argparse +import os +import re +import subprocess +import sys + + +def GitGrep(pattern): + p = subprocess.Popen( + ['git', 'grep', '-En', pattern, '--', '*.gn', '*.gni'], + stdout=subprocess.PIPE) + out, _ = p.communicate() + return out, p.returncode + + +def ValidMatches(basename, cc, grep_lines): + """Filter out 'git grep' matches with header files already.""" + matches = [] + for line in grep_lines: + gnfile, linenr, contents = line.split(':') + linenr = int(linenr) + new = re.sub(cc, basename, contents) + lines = open(gnfile).read().splitlines() + assert contents in lines[linenr - 1] + # Skip if it's already there. It could be before or after the match. + if lines[linenr] == new: + continue + if lines[linenr - 2] == new: + continue + print(' ', gnfile, linenr, new) + matches.append((gnfile, linenr, new)) + return matches + + +def AddHeadersNextToCC(headers, skip_ambiguous=True): + """Add header files next to the corresponding .cc files in GN files. + + When skip_ambiguous is True, skip if multiple .cc files are found. + Returns unhandled headers. + + Manual cleaning up is likely required, especially if not skip_ambiguous. + """ + edits = {} + unhandled = [] + for filename in headers: + filename = filename.strip() + if not (filename.endswith('.h') or filename.endswith('.hh')): + continue + basename = os.path.basename(filename) + print(filename) + cc = r'\b' + os.path.splitext(basename)[0] + r'\.(cc|cpp|mm)\b' + out, returncode = GitGrep('(/|")' + cc + '"') + if returncode != 0 or not out: + unhandled.append(filename) + continue + + matches = ValidMatches(basename, cc, out.splitlines()) + + if len(matches) == 0: + continue + if len(matches) > 1: + print('\n[WARNING] Ambiguous matching for', filename) + for i in enumerate(matches, 1): + print('%d: %s' % (i[0], i[1])) + print() + if skip_ambiguous: + continue + + picked = raw_input('Pick the matches ("2,3" for multiple): ') + try: + matches = [matches[int(i) - 1] for i in picked.split(',')] + except (ValueError, IndexError): + continue + + for match in matches: + gnfile, linenr, new = match + print(' ', gnfile, linenr, new) + edits.setdefault(gnfile, {})[linenr] = new + + for gnfile in edits: + lines = open(gnfile).read().splitlines() + for l in sorted(edits[gnfile].keys(), reverse=True): + lines.insert(l, edits[gnfile][l]) + open(gnfile, 'w').write('\n'.join(lines) + '\n') + + return unhandled + + +def AddHeadersToSources(headers, skip_ambiguous=True): + """Add header files to the sources list in the first GN file. + + The target GN file is the first one up the parent directories. + This usually does the wrong thing for _test files if the test and the main + target are in the same .gn file. + When skip_ambiguous is True, skip if multiple sources arrays are found. + + "git cl format" afterwards is required. Manually cleaning up duplicated items + is likely required. + """ + for filename in headers: + filename = filename.strip() + print(filename) + dirname = os.path.dirname(filename) + while not os.path.exists(os.path.join(dirname, 'BUILD.gn')): + dirname = os.path.dirname(dirname) + rel = filename[len(dirname) + 1:] + gnfile = os.path.join(dirname, 'BUILD.gn') + + lines = open(gnfile).read().splitlines() + matched = [i for i, l in enumerate(lines) if ' sources = [' in l] + if skip_ambiguous and len(matched) > 1: + print('[WARNING] Multiple sources in', gnfile) + continue + + if len(matched) < 1: + continue + print(' ', gnfile, rel) + index = matched[0] + lines.insert(index + 1, '"%s",' % rel) + open(gnfile, 'w').write('\n'.join(lines) + '\n') + + +def RemoveHeader(headers, skip_ambiguous=True): + """Remove non-existing headers in GN files. + + When skip_ambiguous is True, skip if multiple matches are found. + """ + edits = {} + unhandled = [] + for filename in headers: + filename = filename.strip() + if not (filename.endswith('.h') or filename.endswith('.hh')): + continue + basename = os.path.basename(filename) + print(filename) + out, returncode = GitGrep('(/|")' + basename + '"') + if returncode != 0 or not out: + unhandled.append(filename) + print(' Not found') + continue + + grep_lines = out.splitlines() + matches = [] + for line in grep_lines: + gnfile, linenr, contents = line.split(':') + print(' ', gnfile, linenr, contents) + linenr = int(linenr) + lines = open(gnfile).read().splitlines() + assert contents in lines[linenr - 1] + matches.append((gnfile, linenr, contents)) + + if len(matches) == 0: + continue + if len(matches) > 1: + print('\n[WARNING] Ambiguous matching for', filename) + for i in enumerate(matches, 1): + print('%d: %s' % (i[0], i[1])) + print() + if skip_ambiguous: + continue + + picked = raw_input('Pick the matches ("2,3" for multiple): ') + try: + matches = [matches[int(i) - 1] for i in picked.split(',')] + except (ValueError, IndexError): + continue + + for match in matches: + gnfile, linenr, contents = match + print(' ', gnfile, linenr, contents) + edits.setdefault(gnfile, set()).add(linenr) + + for gnfile in edits: + lines = open(gnfile).read().splitlines() + for l in sorted(edits[gnfile], reverse=True): + lines.pop(l - 1) + open(gnfile, 'w').write('\n'.join(lines) + '\n') + + return unhandled + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('input_file', help="missing or non-existing headers, " + "output of check_gn_headers.py") + parser.add_argument('--prefix', + help="only handle path name with this prefix") + parser.add_argument('--remove', action='store_true', + help="treat input_file as non-existing headers") + + args, _extras = parser.parse_known_args() + + headers = open(args.input_file).readlines() + + if args.prefix: + headers = [i for i in headers if i.startswith(args.prefix)] + + if args.remove: + RemoveHeader(headers, False) + else: + unhandled = AddHeadersNextToCC(headers) + AddHeadersToSources(unhandled) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/COMMON_METADATA b/fuchsia/COMMON_METADATA new file mode 100644 index 000000000000..f7f8861f038a --- /dev/null +++ b/fuchsia/COMMON_METADATA @@ -0,0 +1,5 @@ +monorail { + component: "Fuchsia" +} +team_email: "fuchsia-dev@chromium.org" +os: FUCHSIA diff --git a/fuchsia/DIR_METADATA b/fuchsia/DIR_METADATA new file mode 100644 index 000000000000..210aa6a954b8 --- /dev/null +++ b/fuchsia/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/fuchsia/OWNERS b/fuchsia/OWNERS new file mode 100644 index 000000000000..887630d46eba --- /dev/null +++ b/fuchsia/OWNERS @@ -0,0 +1,18 @@ +# When picking a reviewer for Fuchsia-related OWNERShip, please start by looking +# at git history to find the most relevant owner. The team would appreciate it +# if you would also add chromium-fuchsia-reviews@google.com so that a shadowed +# reviewer is added automatically. Thank you. + +ddorwin@chromium.org +grt@chromium.org +sergeyu@chromium.org +wez@chromium.org + +per-file *.py=chonggu@google.com +per-file *.py=rohpavone@chromium.org +per-file *.py=zijiehe@google.com + +per-file linux_internal.sdk.sha1=chromium-internal-autoroll@skia-corp.google.com.iam.gserviceaccount.com + +per-file SECURITY_OWNERS=set noparent +per-file SECURITY_OWNERS=file://build/fuchsia/SECURITY_OWNERS diff --git a/fuchsia/PRESUBMIT.py b/fuchsia/PRESUBMIT.py new file mode 100644 index 000000000000..f42f4c2309a5 --- /dev/null +++ b/fuchsia/PRESUBMIT.py @@ -0,0 +1,47 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit script for Fuchsia. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for +details on the presubmit API built into depot_tools. +""" + +USE_PYTHON3 = True + +import os + + +def CommonChecks(input_api, output_api): + build_fuchsia_dir = input_api.PresubmitLocalPath() + + def J(*dirs): + """Returns a path relative to presubmit directory.""" + return input_api.os_path.join(build_fuchsia_dir, *dirs) + + tests = [] + unit_tests = [ + J('binary_sizes_test.py'), + J('binary_size_differ_test.py'), + J('gcs_download_test.py'), + J('update_images_test.py'), + J('update_product_bundles_test.py'), + J('update_sdk_test.py'), + ] + + tests.extend( + input_api.canned_checks.GetUnitTests(input_api, + output_api, + unit_tests=unit_tests, + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True)) + return input_api.RunTests(tests) + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/fuchsia/SECURITY_OWNERS b/fuchsia/SECURITY_OWNERS new file mode 100644 index 000000000000..17e8b7c653a4 --- /dev/null +++ b/fuchsia/SECURITY_OWNERS @@ -0,0 +1,16 @@ +# Changes to integration with the Fuchsia platform, or peer components, require +# security review to avoid introducing sandbox escapes. These include: +# - Critical platform integrations (e.g. shared memory, process launching). +# - Changes to Chromium-defined Fuchsia IPC (aka FIDL) protocols. +# - Addition of new FIDL services to child process sandboxes. +# - Addition of new FIDL clients and implementations. +# +# Security team: If you are uncomfortable reviewing a particular bit of code +# yourself, don't hesitate to seek help from another security team member! +# Nobody knows everything, and the only way to learn is from experience. + +# Please keep reviewers ordered alphabetically by LDAP. +ajgo@chromium.org +rsesek@chromium.org +tsepez@chromium.org +wez@chromium.org diff --git a/fuchsia/__init__.py b/fuchsia/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/fuchsia/binary_size_differ.py b/fuchsia/binary_size_differ.py new file mode 100755 index 000000000000..190a1731cfdd --- /dev/null +++ b/fuchsia/binary_size_differ.py @@ -0,0 +1,153 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +'''Implements Chrome-Fuchsia package binary size differ.''' + +import argparse +import json +import os +import sys +import traceback + +from binary_sizes import ReadPackageSizesJson +from binary_sizes import PACKAGES_SIZES_FILE + +# Eng is not responsible for changes that cause "reasonable growth" if the +# uncompressed binary size does not grow. +# First-warning will fail the test if the uncompressed and compressed size +# grow, while always-fail will fail the test regardless of uncompressed growth +# (solely based on compressed growth). +_FIRST_WARNING_DELTA_BYTES = 12 * 1024 # 12 KiB +_ALWAYS_FAIL_DELTA_BYTES = 100 * 1024 # 100 KiB +_TRYBOT_DOC = 'https://chromium.googlesource.com/chromium/src/+/main/docs/speed/binary_size/fuchsia_binary_size_trybot.md' + +SIZE_FAILURE = 1 +ROLLER_SIZE_WARNING = 2 +SUCCESS = 0 + + +def ComputePackageDiffs(before_sizes_file, after_sizes_file, author=None): + '''Computes difference between after and before diff, for each package.''' + before_sizes = ReadPackageSizesJson(before_sizes_file) + after_sizes = ReadPackageSizesJson(after_sizes_file) + + assert before_sizes.keys() == after_sizes.keys(), ( + 'Package files cannot' + ' be compared with different packages: ' + '{} vs {}'.format(before_sizes.keys(), after_sizes.keys())) + + growth = {'compressed': {}, 'uncompressed': {}} + status_code = SUCCESS + summary = '' + for package_name in before_sizes: + growth['compressed'][package_name] = (after_sizes[package_name].compressed - + before_sizes[package_name].compressed) + growth['uncompressed'][package_name] = ( + after_sizes[package_name].uncompressed - + before_sizes[package_name].uncompressed) + # Developers are only responsible if uncompressed increases. + if ((growth['compressed'][package_name] >= _FIRST_WARNING_DELTA_BYTES + and growth['uncompressed'][package_name] > 0) + # However, if compressed growth is unusually large, fail always. + or growth['compressed'][package_name] >= _ALWAYS_FAIL_DELTA_BYTES): + if not summary: + summary = ('Size check failed! The following package(s) are affected:' + '
    ') + status_code = SIZE_FAILURE + summary += (('- {} (compressed) grew by {} bytes (uncompressed growth:' + ' {} bytes).
    ').format( + package_name, growth['compressed'][package_name], + growth['uncompressed'][package_name])) + summary += ('Note that this bot compares growth against trunk, and is ' + 'not aware of CL chaining.
    ') + + # Allow rollers to pass even with size increases. See crbug.com/1355914. + if author and '-autoroll' in author and status_code == SIZE_FAILURE: + summary = summary.replace('Size check failed! ', '') + summary = ( + 'The following growth by an autoroller will be ignored:

    ' + + summary) + status_code = ROLLER_SIZE_WARNING + growth['status_code'] = status_code + summary += ('
    See the following document for more information about' + ' this trybot:
    {}'.format(_TRYBOT_DOC)) + growth['summary'] = summary + + # TODO(crbug.com/1266085): Investigate using these fields. + growth['archive_filenames'] = [] + growth['links'] = [] + return growth + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--before-dir', + type=os.path.realpath, + required=True, + help='Location of the build without the patch', + ) + parser.add_argument( + '--after-dir', + type=os.path.realpath, + required=True, + help='Location of the build with the patch', + ) + parser.add_argument('--author', help='Author of change') + parser.add_argument( + '--results-path', + type=os.path.realpath, + required=True, + help='Output path for the trybot result .json file', + ) + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable verbose output') + args = parser.parse_args() + + if args.verbose: + print('Fuchsia binary sizes') + print('Working directory', os.getcwd()) + print('Args:') + for var in vars(args): + print(' {}: {}'.format(var, getattr(args, var) or '')) + + if not os.path.isdir(args.before_dir) or not os.path.isdir(args.after_dir): + raise Exception( + 'Could not find build output directory "{}" or "{}".'.format( + args.before_dir, args.after_dir)) + + test_name = 'sizes' + before_sizes_file = os.path.join(args.before_dir, test_name, + PACKAGES_SIZES_FILE) + after_sizes_file = os.path.join(args.after_dir, test_name, + PACKAGES_SIZES_FILE) + if not os.path.isfile(before_sizes_file): + raise Exception( + 'Could not find before sizes file: "{}"'.format(before_sizes_file)) + + if not os.path.isfile(after_sizes_file): + raise Exception( + 'Could not find after sizes file: "{}"'.format(after_sizes_file)) + + test_completed = False + try: + growth = ComputePackageDiffs(before_sizes_file, + after_sizes_file, + author=args.author) + test_completed = True + with open(args.results_path, 'wt') as results_file: + json.dump(growth, results_file) + except: + _, value, trace = sys.exc_info() + traceback.print_tb(trace) + print(str(value)) + finally: + return 0 if test_completed else 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/binary_size_differ_test.py b/fuchsia/binary_size_differ_test.py new file mode 100755 index 000000000000..6192bf2d6539 --- /dev/null +++ b/fuchsia/binary_size_differ_test.py @@ -0,0 +1,171 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import copy +import os +import tempfile +from typing import MutableMapping, Optional +import unittest + +import binary_size_differ +import binary_sizes + +_EXAMPLE_BLOBS_BEFORE = """ +{ + "web_engine": [ + { + "merkle": "77e876447dd2daaaab7048d646e87fe8b6d9fecef6cbfcc4af30b8fbfa50b881", + "path": "locales/ta.pak", + "bytes": 17916, + "is_counted": true, + "size": 16384 + }, + { + "merkle": "5f1932b8c9fe954f3c3fdb34ab2089d2af34e5a0cef90cad41a1cd37d92234bf", + "path": "lib/libEGL.so", + "bytes": 226960, + "is_counted": true, + "size": 90112 + }, + { + "merkle": "9822fc0dd95cdd1cc46b5c6632a928a6ad19b76ed0157397d82a2f908946fc34", + "path": "meta.far", + "bytes": 24576, + "is_counted": false, + "size": 16384 + }, + { + "merkle": "090aed4593c4f7d04a3ad80e9971c0532dd5b1d2bdf4754202cde510a88fd220", + "path": "locales/ru.pak", + "bytes": 11903, + "is_counted": true, + "size": 16384 + } + ] +} +""" + + +class BinarySizeDifferTest(unittest.TestCase): + def ChangePackageSize( + self, + packages: MutableMapping[str, binary_sizes.PackageSizes], + name: str, + compressed_increase: int, + uncompressed_increase: Optional[int] = None): + if uncompressed_increase is None: + uncompressed_increase = compressed_increase + original_package = packages[name] + new_package = binary_sizes.PackageSizes( + compressed=original_package.compressed + compressed_increase, + uncompressed=original_package.uncompressed + uncompressed_increase) + packages[name] = new_package + + def testComputePackageDiffs(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + + SUCCESS = 0 + FAILURE = 1 + ROLLER_SIZE_WARNING = 2 + with tempfile.NamedTemporaryFile(mode='w') as before_file: + before_file.write(_EXAMPLE_BLOBS_BEFORE) + before_file.flush() + blobs = binary_sizes.ReadPackageBlobsJson(before_file.name) + sizes = binary_sizes.GetPackageSizes(blobs) + binary_sizes.WritePackageSizesJson(before_file.name, sizes) + + # No change. + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + before_file.name) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 0) + + after_file = tempfile.NamedTemporaryFile(mode='w', delete=True) + after_file.close() + try: + # Increase a blob, but below the limit. + other_sizes = copy.deepcopy(sizes) + self.ChangePackageSize(other_sizes, 'web_engine', 8 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 8 * 1024) + + # Increase beyond the limit (adds another 8k) + self.ChangePackageSize(other_sizes, 'web_engine', 8 * 1024 + 1) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], FAILURE) + self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) + self.assertIn('check failed', growth['summary']) + self.assertIn(f'web_engine (compressed) grew by {16 * 1024 + 1} bytes', + growth['summary']) + + # Increase beyond the limit, but compressed does not increase. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 16 * 1024 + 1, + uncompressed_increase=0) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['uncompressed']['web_engine'], SUCCESS) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) + + # Increase beyond the limit, but compressed goes down. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 16 * 1024 + 1, + uncompressed_increase=-4 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], SUCCESS) + self.assertEqual(growth['compressed']['web_engine'], 16 * 1024 + 1) + + # Increase beyond the second limit. Fails, regardless of uncompressed. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 100 * 1024 + 1, + uncompressed_increase=-4 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name) + self.assertEqual(growth['status_code'], FAILURE) + self.assertEqual(growth['compressed']['web_engine'], 100 * 1024 + 1) + + # Increase beyond the second limit, but roller authored CL. + binary_sizes.WritePackageSizesJson(before_file.name, other_sizes) + self.ChangePackageSize(other_sizes, + 'web_engine', + 100 * 1024 + 1, + uncompressed_increase=-4 * 1024) + binary_sizes.WritePackageSizesJson(after_file.name, other_sizes) + growth = binary_size_differ.ComputePackageDiffs(before_file.name, + after_file.name, + author='big-autoroller') + self.assertEqual(growth['status_code'], ROLLER_SIZE_WARNING) + self.assertEqual(growth['compressed']['web_engine'], 100 * 1024 + 1) + self.assertNotIn('check failed', growth['summary']) + self.assertIn('growth by an autoroller will be ignored', + growth['summary']) + self.assertIn(f'web_engine (compressed) grew by {100 * 1024 + 1} bytes', + growth['summary']) + finally: + os.remove(after_file.name) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/binary_sizes.py b/fuchsia/binary_sizes.py new file mode 100755 index 000000000000..b1aa938c4f7d --- /dev/null +++ b/fuchsia/binary_sizes.py @@ -0,0 +1,618 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +'''Implements Chrome-Fuchsia package binary size checks.''' + +import argparse +import collections +import json +import math +import os +import re +import shutil +import subprocess +import sys +import tempfile +import time +import traceback +import uuid + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +from common import DIR_SRC_ROOT, SDK_ROOT, get_host_tool_path + +PACKAGES_BLOBS_FILE = 'package_blobs.json' +PACKAGES_SIZES_FILE = 'package_sizes.json' + +# Structure representing the compressed and uncompressed sizes for a Fuchsia +# package. +PackageSizes = collections.namedtuple('PackageSizes', + ['compressed', 'uncompressed']) + +# Structure representing a Fuchsia package blob and its compressed and +# uncompressed sizes. +Blob = collections.namedtuple( + 'Blob', ['name', 'hash', 'compressed', 'uncompressed', 'is_counted']) + + +def CreateSizesExternalDiagnostic(sizes_guid): + """Creates a histogram external sizes diagnostic.""" + + benchmark_diagnostic = { + 'type': 'GenericSet', + 'guid': str(sizes_guid), + 'values': ['sizes'], + } + + return benchmark_diagnostic + + +def CreateSizesHistogramItem(name, size, sizes_guid): + """Create a performance dashboard histogram from the histogram template and + binary size data.""" + + # Chromium performance dashboard histogram containing binary size data. + histogram = { + 'name': name, + 'unit': 'sizeInBytes_smallerIsBetter', + 'diagnostics': { + 'benchmarks': str(sizes_guid), + }, + 'sampleValues': [size], + 'running': [1, size, math.log(size), size, size, size, 0], + 'description': 'chrome-fuchsia package binary sizes', + 'summaryOptions': { + 'avg': True, + 'count': False, + 'max': False, + 'min': False, + 'std': False, + 'sum': False, + }, + } + + return histogram + + +def CreateSizesHistogram(package_sizes): + """Create a performance dashboard histogram from binary size data.""" + + sizes_guid = uuid.uuid1() + histogram = [CreateSizesExternalDiagnostic(sizes_guid)] + for name, size in package_sizes.items(): + histogram.append( + CreateSizesHistogramItem('%s_%s' % (name, 'compressed'), + size.compressed, sizes_guid)) + histogram.append( + CreateSizesHistogramItem('%s_%s' % (name, 'uncompressed'), + size.uncompressed, sizes_guid)) + return histogram + + +def CreateTestResults(test_status, timestamp): + """Create test results data to write to JSON test results file. + + The JSON data format is defined in + https://chromium.googlesource.com/chromium/src/+/main/docs/testing/json_test_results_format.md + """ + + results = { + 'tests': {}, + 'interrupted': False, + 'metadata': { + 'test_name_prefix': 'build/fuchsia/' + }, + 'version': 3, + 'seconds_since_epoch': timestamp, + } + + num_failures_by_type = {result: 0 for result in ['FAIL', 'PASS', 'CRASH']} + for metric in test_status: + actual_status = test_status[metric] + num_failures_by_type[actual_status] += 1 + results['tests'][metric] = { + 'expected': 'PASS', + 'actual': actual_status, + } + results['num_failures_by_type'] = num_failures_by_type + + return results + + +def GetTestStatus(package_sizes, sizes_config, test_completed): + """Checks package sizes against size limits. + + Returns a tuple of overall test pass/fail status and a dictionary mapping size + limit checks to PASS/FAIL/CRASH status.""" + + if not test_completed: + test_status = {'binary_sizes': 'CRASH'} + else: + test_status = {} + for metric, limit in sizes_config['size_limits'].items(): + # Strip the "_compressed" suffix from |metric| if it exists. + match = re.match(r'(?P\w+)_compressed', metric) + package_name = match.group('name') if match else metric + if package_name not in package_sizes: + raise Exception('package "%s" not in sizes "%s"' % + (package_name, str(package_sizes))) + if package_sizes[package_name].compressed <= limit: + test_status[metric] = 'PASS' + else: + test_status[metric] = 'FAIL' + + all_tests_passed = all(status == 'PASS' for status in test_status.values()) + + return all_tests_passed, test_status + + +def WriteSimpleTestResults(results_path, test_completed): + """Writes simplified test results file. + + Used when test status is not available. + """ + + simple_isolated_script_output = { + 'valid': test_completed, + 'failures': [], + 'version': 'simplified', + } + with open(results_path, 'w') as output_file: + json.dump(simple_isolated_script_output, output_file) + + +def WriteTestResults(results_path, test_completed, test_status, timestamp): + """Writes test results file containing test PASS/FAIL/CRASH statuses.""" + + if test_status: + test_results = CreateTestResults(test_status, timestamp) + with open(results_path, 'w') as results_file: + json.dump(test_results, results_file) + else: + WriteSimpleTestResults(results_path, test_completed) + + +def WriteGerritPluginSizeData(output_path, package_sizes): + """Writes a package size dictionary in json format for the Gerrit binary + sizes plugin.""" + + with open(output_path, 'w') as sizes_file: + sizes_data = {name: size.compressed for name, size in package_sizes.items()} + json.dump(sizes_data, sizes_file) + + +def ReadPackageBlobsJson(json_path): + """Reads package blob info from json file. + + Opens json file of blob info written by WritePackageBlobsJson, + and converts back into package blobs used in this script. + """ + with open(json_path, 'rt') as json_file: + formatted_blob_info = json.load(json_file) + + package_blobs = {} + for package in formatted_blob_info: + package_blobs[package] = {} + for blob_info in formatted_blob_info[package]: + blob = Blob(name=blob_info['path'], + hash=blob_info['merkle'], + uncompressed=blob_info['bytes'], + compressed=blob_info['size'], + is_counted=blob_info['is_counted']) + package_blobs[package][blob.name] = blob + + return package_blobs + + +def WritePackageBlobsJson(json_path, package_blobs): + """Writes package blob information in human-readable JSON format. + + The json data is an array of objects containing these keys: + 'path': string giving blob location in the local file system + 'merkle': the blob's Merkle hash + 'bytes': the number of uncompressed bytes in the blod + 'size': the size of the compressed blob in bytes. A multiple of the blobfs + block size (8192) + 'is_counted: true if the blob counts towards the package budget, or false + if not (for ICU blobs or blobs distributed in the SDK)""" + + formatted_blob_stats_per_package = {} + for package in package_blobs: + blob_data = [] + for blob_name in package_blobs[package]: + blob = package_blobs[package][blob_name] + blob_data.append({ + 'path': str(blob.name), + 'merkle': str(blob.hash), + 'bytes': blob.uncompressed, + 'size': blob.compressed, + 'is_counted': blob.is_counted + }) + formatted_blob_stats_per_package[package] = blob_data + + with (open(json_path, 'w')) as json_file: + json.dump(formatted_blob_stats_per_package, json_file, indent=2) + + +def WritePackageSizesJson(json_path, package_sizes): + """Writes package sizes into a human-readable JSON format. + + JSON data is a dictionary of each package name being a key, with + the following keys within the sub-object: + 'compressed': compressed size of the package in bytes. + 'uncompressed': uncompressed size of the package in bytes. + """ + formatted_package_sizes = {} + for package, size_info in package_sizes.items(): + formatted_package_sizes[package] = { + 'uncompressed': size_info.uncompressed, + 'compressed': size_info.compressed + } + with (open(json_path, 'w')) as json_file: + json.dump(formatted_package_sizes, json_file, indent=2) + + +def ReadPackageSizesJson(json_path): + """Reads package_sizes from a given JSON file. + + Opens json file of blob info written by WritePackageSizesJson, + and converts back into package sizes used in this script. + """ + with open(json_path, 'rt') as json_file: + formatted_package_info = json.load(json_file) + + package_sizes = {} + for package, size_info in formatted_package_info.items(): + package_sizes[package] = PackageSizes( + compressed=size_info['compressed'], + uncompressed=size_info['uncompressed']) + return package_sizes + + +def GetCompressedSize(file_path): + """Measures file size after blobfs compression.""" + + compressor_path = get_host_tool_path('blobfs-compression') + try: + temp_dir = tempfile.mkdtemp() + compressed_file_path = os.path.join(temp_dir, os.path.basename(file_path)) + compressor_cmd = [ + compressor_path, + '--source_file=%s' % file_path, + '--compressed_file=%s' % compressed_file_path + ] + proc = subprocess.Popen(compressor_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + proc.wait() + compressor_output = proc.stdout.read().decode('utf-8') + if proc.returncode != 0: + print(compressor_output, file=sys.stderr) + raise Exception('Error while running %s' % compressor_path) + finally: + shutil.rmtree(temp_dir) + + # Match a compressed bytes total from blobfs-compression output like + # Wrote 360830 bytes (40% compression) + blobfs_compressed_bytes_re = r'Wrote\s+(?P\d+)\s+bytes' + + match = re.search(blobfs_compressed_bytes_re, compressor_output) + if not match: + print(compressor_output, file=sys.stderr) + raise Exception('Could not get compressed bytes for %s' % file_path) + + # Round the compressed file size up to an integer number of blobfs blocks. + BLOBFS_BLOCK_SIZE = 8192 # Fuchsia's blobfs file system uses 8KiB blocks. + blob_bytes = int(match.group('bytes')) + return int(math.ceil(blob_bytes / BLOBFS_BLOCK_SIZE)) * BLOBFS_BLOCK_SIZE + + +def ExtractFarFile(file_path, extract_dir): + """Extracts contents of a Fuchsia archive file to the specified directory.""" + + far_tool = get_host_tool_path('far') + + if not os.path.isfile(far_tool): + raise Exception('Could not find FAR host tool "%s".' % far_tool) + if not os.path.isfile(file_path): + raise Exception('Could not find FAR file "%s".' % file_path) + + subprocess.check_call([ + far_tool, 'extract', + '--archive=%s' % file_path, + '--output=%s' % extract_dir + ]) + + +def GetBlobNameHashes(meta_dir): + """Returns mapping from Fuchsia pkgfs paths to blob hashes. The mapping is + read from the extracted meta.far archive contained in an extracted package + archive.""" + + blob_name_hashes = {} + contents_path = os.path.join(meta_dir, 'meta', 'contents') + with open(contents_path) as lines: + for line in lines: + (pkgfs_path, blob_hash) = line.strip().split('=') + blob_name_hashes[pkgfs_path] = blob_hash + return blob_name_hashes + + +# Compiled regular expression matching strings like *.so, *.so.1, *.so.2, ... +SO_FILENAME_REGEXP = re.compile(r'\.so(\.\d+)?$') + + +def GetSdkModules(): + """Finds shared objects (.so) under the Fuchsia SDK arch directory in dist or + lib subdirectories. + + Returns a set of shared objects' filenames. + """ + + # Fuchsia SDK arch directory path (contains all shared object files). + sdk_arch_dir = os.path.join(SDK_ROOT, 'arch') + # Leaf subdirectories containing shared object files. + sdk_so_leaf_dirs = ['dist', 'lib'] + # Match a shared object file name. + sdk_so_filename_re = r'\.so(\.\d+)?$' + + lib_names = set() + for dirpath, _, file_names in os.walk(sdk_arch_dir): + if os.path.basename(dirpath) in sdk_so_leaf_dirs: + for name in file_names: + if SO_FILENAME_REGEXP.search(name): + lib_names.add(name) + return lib_names + + +def FarBaseName(name): + _, name = os.path.split(name) + name = re.sub(r'\.far$', '', name) + return name + + +def GetPackageMerkleRoot(far_file_path): + """Returns a package's Merkle digest.""" + + # The digest is the first word on the first line of the merkle tool's output. + merkle_tool = get_host_tool_path('merkleroot') + output = subprocess.check_output([merkle_tool, far_file_path]) + return output.splitlines()[0].split()[0] + + +def GetBlobs(far_file, build_out_dir): + """Calculates compressed and uncompressed blob sizes for specified FAR file. + Marks ICU blobs and blobs from SDK libraries as not counted.""" + + base_name = FarBaseName(far_file) + + extract_dir = tempfile.mkdtemp() + + # Extract files and blobs from the specified Fuchsia archive. + far_file_path = os.path.join(build_out_dir, far_file) + far_extract_dir = os.path.join(extract_dir, base_name) + ExtractFarFile(far_file_path, far_extract_dir) + + # Extract the meta.far archive contained in the specified Fuchsia archive. + meta_far_file_path = os.path.join(far_extract_dir, 'meta.far') + meta_far_extract_dir = os.path.join(extract_dir, '%s_meta' % base_name) + ExtractFarFile(meta_far_file_path, meta_far_extract_dir) + + # Map Linux filesystem blob names to blob hashes. + blob_name_hashes = GetBlobNameHashes(meta_far_extract_dir) + + # "System" files whose sizes are not charged against component size budgets. + # Fuchsia SDK modules and the ICU icudtl.dat file sizes are not counted. + system_files = GetSdkModules() | set(['icudtl.dat']) + + # Add the meta.far file blob. + blobs = {} + meta_name = 'meta.far' + meta_hash = GetPackageMerkleRoot(meta_far_file_path) + compressed = GetCompressedSize(meta_far_file_path) + uncompressed = os.path.getsize(meta_far_file_path) + blobs[meta_name] = Blob(meta_name, meta_hash, compressed, uncompressed, True) + + # Add package blobs. + for blob_name, blob_hash in blob_name_hashes.items(): + extracted_blob_path = os.path.join(far_extract_dir, blob_hash) + compressed = GetCompressedSize(extracted_blob_path) + uncompressed = os.path.getsize(extracted_blob_path) + is_counted = os.path.basename(blob_name) not in system_files + blobs[blob_name] = Blob(blob_name, blob_hash, compressed, uncompressed, + is_counted) + + shutil.rmtree(extract_dir) + + return blobs + + +def GetPackageBlobs(far_files, build_out_dir): + """Returns dictionary mapping package names to blobs contained in the package. + + Prints package blob size statistics.""" + + package_blobs = {} + for far_file in far_files: + package_name = FarBaseName(far_file) + if package_name in package_blobs: + raise Exception('Duplicate FAR file base name "%s".' % package_name) + package_blobs[package_name] = GetBlobs(far_file, build_out_dir) + + # Print package blob sizes (does not count sharing). + for package_name in sorted(package_blobs.keys()): + print('Package blob sizes: %s' % package_name) + print('%-64s %12s %12s %s' % + ('blob hash', 'compressed', 'uncompressed', 'path')) + print('%s %s %s %s' % (64 * '-', 12 * '-', 12 * '-', 20 * '-')) + for blob_name in sorted(package_blobs[package_name].keys()): + blob = package_blobs[package_name][blob_name] + if blob.is_counted: + print('%64s %12d %12d %s' % + (blob.hash, blob.compressed, blob.uncompressed, blob.name)) + + return package_blobs + + +def GetPackageSizes(package_blobs): + """Calculates compressed and uncompressed package sizes from blob sizes.""" + + # TODO(crbug.com/1126177): Use partial sizes for blobs shared by + # non Chrome-Fuchsia packages. + + # Count number of packages sharing blobs (a count of 1 is not shared). + blob_counts = collections.defaultdict(int) + for package_name in package_blobs: + for blob_name in package_blobs[package_name]: + blob = package_blobs[package_name][blob_name] + blob_counts[blob.hash] += 1 + + # Package sizes are the sum of blob sizes divided by their share counts. + package_sizes = {} + for package_name in package_blobs: + compressed_total = 0 + uncompressed_total = 0 + for blob_name in package_blobs[package_name]: + blob = package_blobs[package_name][blob_name] + if blob.is_counted: + count = blob_counts[blob.hash] + compressed_total += blob.compressed // count + uncompressed_total += blob.uncompressed // count + package_sizes[package_name] = PackageSizes(compressed_total, + uncompressed_total) + + return package_sizes + + +def GetBinarySizesAndBlobs(args, sizes_config): + """Get binary size data and contained blobs for packages specified in args. + + If "total_size_name" is set, then computes a synthetic package size which is + the aggregated sizes across all packages.""" + + # Calculate compressed and uncompressed package sizes. + package_blobs = GetPackageBlobs(sizes_config['far_files'], args.build_out_dir) + package_sizes = GetPackageSizes(package_blobs) + + # Optionally calculate total compressed and uncompressed package sizes. + if 'far_total_name' in sizes_config: + compressed = sum([a.compressed for a in package_sizes.values()]) + uncompressed = sum([a.uncompressed for a in package_sizes.values()]) + package_sizes[sizes_config['far_total_name']] = PackageSizes( + compressed, uncompressed) + + for name, size in package_sizes.items(): + print('%s: compressed size %d, uncompressed size %d' % + (name, size.compressed, size.uncompressed)) + + return package_sizes, package_blobs + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--build-out-dir', + '--output-directory', + type=os.path.realpath, + required=True, + help='Location of the build artifacts.', + ) + parser.add_argument( + '--isolated-script-test-output', + type=os.path.realpath, + help='File to which simplified JSON results will be written.') + parser.add_argument( + '--size-plugin-json-path', + help='Optional path for json size data for the Gerrit binary size plugin', + ) + parser.add_argument( + '--sizes-path', + default=os.path.join('tools', 'fuchsia', 'size_tests', 'fyi_sizes.json'), + help='path to package size limits json file. The path is relative to ' + 'the workspace src directory') + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable verbose output') + # Accepted to conform to the isolated script interface, but ignored. + parser.add_argument('--isolated-script-test-filter', help=argparse.SUPPRESS) + parser.add_argument('--isolated-script-test-perf-output', + help=argparse.SUPPRESS) + args = parser.parse_args() + + if args.verbose: + print('Fuchsia binary sizes') + print('Working directory', os.getcwd()) + print('Args:') + for var in vars(args): + print(' {}: {}'.format(var, getattr(args, var) or '')) + + if not os.path.isdir(args.build_out_dir): + raise Exception('Could not find build output directory "%s".' % + args.build_out_dir) + + with open(os.path.join(DIR_SRC_ROOT, args.sizes_path)) as sizes_file: + sizes_config = json.load(sizes_file) + + if args.verbose: + print('Sizes Config:') + print(json.dumps(sizes_config)) + + for far_rel_path in sizes_config['far_files']: + far_abs_path = os.path.join(args.build_out_dir, far_rel_path) + if not os.path.isfile(far_abs_path): + raise Exception('Could not find FAR file "%s".' % far_abs_path) + + test_name = 'sizes' + timestamp = time.time() + test_completed = False + all_tests_passed = False + test_status = {} + package_sizes = {} + package_blobs = {} + sizes_histogram = [] + + results_directory = None + if args.isolated_script_test_output: + results_directory = os.path.join( + os.path.dirname(args.isolated_script_test_output), test_name) + if not os.path.exists(results_directory): + os.makedirs(results_directory) + + try: + package_sizes, package_blobs = GetBinarySizesAndBlobs(args, sizes_config) + sizes_histogram = CreateSizesHistogram(package_sizes) + test_completed = True + except: + _, value, trace = sys.exc_info() + traceback.print_tb(trace) + print(str(value)) + finally: + all_tests_passed, test_status = GetTestStatus(package_sizes, sizes_config, + test_completed) + + if results_directory: + WriteTestResults(os.path.join(results_directory, 'test_results.json'), + test_completed, test_status, timestamp) + with open(os.path.join(results_directory, 'perf_results.json'), 'w') as f: + json.dump(sizes_histogram, f) + WritePackageBlobsJson( + os.path.join(results_directory, PACKAGES_BLOBS_FILE), package_blobs) + WritePackageSizesJson( + os.path.join(results_directory, PACKAGES_SIZES_FILE), package_sizes) + + if args.isolated_script_test_output: + WriteTestResults(args.isolated_script_test_output, test_completed, + test_status, timestamp) + + if args.size_plugin_json_path: + WriteGerritPluginSizeData(args.size_plugin_json_path, package_sizes) + + return 0 if all_tests_passed else 1 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/binary_sizes_test.py b/fuchsia/binary_sizes_test.py new file mode 100755 index 000000000000..2f9dcf2177f5 --- /dev/null +++ b/fuchsia/binary_sizes_test.py @@ -0,0 +1,132 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import os +import shutil +import tempfile +import unittest + +import binary_sizes + + +_EXAMPLE_BLOBS = """ +{ + "web_engine": [ + { + "merkle": "77e876447dd2daaaab7048d646e87fe8b6d9fecef6cbfcc4af30b8fbfa50b881", + "path": "locales/ta.pak", + "bytes": 17916, + "is_counted": true, + "size": 16384 + }, + { + "merkle": "5f1932b8c9fe954f3c3fdb34ab2089d2af34e5a0cef90cad41a1cd37d92234bf", + "path": "lib/libEGL.so", + "bytes": 226960, + "is_counted": true, + "size": 90112 + }, + { + "merkle": "9822fc0dd95cdd1cc46b5c6632a928a6ad19b76ed0157397d82a2f908946fc34", + "path": "meta.far", + "bytes": 24576, + "is_counted": true, + "size": 16384 + }, + { + "merkle": "090aed4593c4f7d04a3ad80e9971c0532dd5b1d2bdf4754202cde510a88fd220", + "path": "locales/ru.pak", + "bytes": 11903, + "is_counted": true, + "size": 16384 + } + ] +} +""" + + +class TestBinarySizes(unittest.TestCase): + tmpdir = None + + @classmethod + def setUpClass(cls): + cls.tmpdir = tempfile.mkdtemp() + + @classmethod + def tearDownClass(cls): + shutil.rmtree(cls.tmpdir) + + + def testReadAndWritePackageBlobs(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + tmp_file.write(_EXAMPLE_BLOBS) + tmp_file.flush() + + package_blobs = binary_sizes.ReadPackageBlobsJson(tmp_file.name) + + tmp_package_file = tempfile.NamedTemporaryFile(mode='w', delete=False) + tmp_package_file.close() + try: + binary_sizes.WritePackageBlobsJson(tmp_package_file.name, package_blobs) + + self.assertEqual(binary_sizes.ReadPackageBlobsJson(tmp_package_file.name), + package_blobs) + finally: + os.remove(tmp_package_file.name) + + def testReadAndWritePackageSizes(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + tmp_file.write(_EXAMPLE_BLOBS) + tmp_file.flush() + blobs = binary_sizes.ReadPackageBlobsJson(tmp_file.name) + + sizes = binary_sizes.GetPackageSizes(blobs) + + new_sizes = {} + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + binary_sizes.WritePackageSizesJson(tmp_file.name, sizes) + new_sizes = binary_sizes.ReadPackageSizesJson(tmp_file.name) + self.assertEqual(new_sizes, sizes) + self.assertIn('web_engine', new_sizes) + + def testGetPackageSizesUsesBlobMerklesForCount(self): + # TODO(1309977): Disabled on Windows because Windows doesn't allow opening a + # NamedTemporaryFile by name. + if os.name == 'nt': + return + blobs = json.loads(_EXAMPLE_BLOBS) + + # Make a duplicate of the last blob. + last_blob = dict(blobs['web_engine'][-1]) + blobs['cast_runner'] = [] + last_blob['path'] = 'foo' # Give a non-sense name, but keep merkle. + + # If the merkle is the same, the blob_count increases by 1. + # This effectively reduces the size of the blobs size by half. + # In both packages, despite it appearing in both and under different + # names. + blobs['cast_runner'].append(last_blob) + + with tempfile.NamedTemporaryFile(mode='w') as tmp_file: + tmp_file.write(json.dumps(blobs)) + tmp_file.flush() + blobs = binary_sizes.ReadPackageBlobsJson(tmp_file.name) + + sizes = binary_sizes.GetPackageSizes(blobs) + + self.assertEqual(sizes['cast_runner'].compressed, last_blob['size'] / 2) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/cipd/BUILD.gn b/fuchsia/cipd/BUILD.gn new file mode 100644 index 000000000000..0019b8645b8d --- /dev/null +++ b/fuchsia/cipd/BUILD.gn @@ -0,0 +1,436 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Build targets for constructing CIPD release archives. + +assert(is_fuchsia) + +import("//build/cipd/cipd.gni") +import("//build/config/chrome_build.gni") +import("//build/util/process_version.gni") +import("//third_party/fuchsia-sdk/sdk/build/build_id_dir.gni") +import("//third_party/fuchsia-sdk/sdk/build/cipd.gni") + +visibility = [ ":*" ] + +# Allows a builder to explicitly declare the CIPD path. The base path is what +# comes after `.../p/` in the CIPD URL. +declare_args() { + fuchsia_cipd_package_base_path = "" +} + +# TODO(zijiehe): Eliminate the use of 'package_base_path' during the +# refactoring. +if (fuchsia_cipd_package_base_path == "") { + if (is_chrome_branded) { + package_base_path = "chrome_internal/fuchsia" + } else { + package_base_path = "chromium/fuchsia" + } +} else { + package_base_path = fuchsia_cipd_package_base_path +} + +# Archives related specifically to `fuchsia.web` +_web_engine_directory = "web_engine" + +# Archives related specifically to Chrome browser. +_chrome_directory = "chrome" + +# Archives of tools intended to be run on a Linux/Mac host rather than the +# Fuchsia device. +_host_tools_directory = "host_tools" + +_archive_suffix = "_archive" + +# Extracts the numeric Chrome version and writes it to a file in the output +# directory. +# +# To check out the repository on the commit where the version was generated, +# simply call `git checkout `, and Git will check out the commit +# associated with the tag. +process_version("version") { + template_file = "version.template" + sources = [ "//chrome/VERSION" ] + output = "${target_gen_dir}/VERSION" + process_only = true +} + +if (target_cpu == "x64") { + targetarch = "amd64" +} else { + targetarch = "arm64" +} + +# Prepares a CIPD archive, produces a corresponding LICENSE file, +# LICENSE.spdx.json file and generates a manifest file. +# +# Parameters: +# package_subdirectory: Specify the subdirectory relative to +# |package_base_path| in which the package is put. +# description: Sets the "description" field in CIPD package definition. +# +# Optional parameters used directly by fuchsia_cipd_package template: +# "install_mode", +# "sources", +# "data", +# "data_deps" +# "deps", +# "testonly", + +template("cipd_archive") { + forward_variables_from(invoker, + [ + "package_subdirectory", + "description", + "install_mode", + "sources", + "data", + "data_deps", + "deps", + "testonly", + ]) + + _license_path = "${target_gen_dir}/${target_name}/LICENSE" + _invoker_dir = get_label_info(":${invoker.target_name}", "dir") + _license_target = "${_invoker_dir}:${invoker.target_name}${_archive_suffix}" + + # GN is used by the script and is thus an input. + if (host_os == "mac") { + _gn_path = "//buildtools/mac/gn" + } else if (host_os == "linux") { + _gn_path = "//buildtools/linux64/gn" + } + + # Produces a consolidated license file. + action("${target_name}_license") { + script = "//tools/licenses/licenses.py" + inputs = [ "$_gn_path" ] + outputs = [ _license_path ] + args = [ + "license_file", + rebase_path(_license_path, root_build_dir), + "--gn-target", + _license_target, + "--gn-out-dir", + ".", + ] + } + + # Produces a consolidated license file in spdx format. + action("${target_name}_license_spdx") { + _license_path_spdx = "${_license_path}.spdx.json" + + script = "//tools/licenses/licenses.py" + inputs = [ "$_gn_path" ] + outputs = [ _license_path_spdx ] + args = [ + "license_file", + rebase_path(_license_path_spdx, root_build_dir), + "--gn-target", + _license_target, + "--gn-out-dir", + ".", + "--format", + "spdx", + "--spdx-doc-name", + "${invoker.target_name}", + ] + } + + if (!defined(deps)) { + deps = [] + } + deps += [ + ":${target_name}_license", + ":${target_name}_license_spdx", + ":version", + ] + + if (!defined(sources)) { + sources = [] + } + sources += get_target_outputs(":${target_name}_license") + + get_target_outputs(":${target_name}_license_spdx") + + [ "${target_gen_dir}/VERSION" ] + + fuchsia_cipd_package("${target_name}${_archive_suffix}") { + package = "${package_base_path}/${package_subdirectory}/${targetarch}/${invoker.target_name}" + package_root = "${target_gen_dir}/${invoker.target_name}" + package_definition_name = "${invoker.target_name}.yaml" + + # Always use absolute path. + use_absolute_root_path = true + } +} + +# Prepares a CIPD test archive, which is a regular CIPD archive that generates +# test manifests for a given list of test_sets. +# +# Parameters: +# test_sets: A list of scopes for which test manifests will be created. Each +# set contains: +# manifest_path: The path to the generated manifest JSON file. +# far_sources: An optional list of CFv2 test component .far files. +# +# Required parameters used by the cipd_archive template: +# "package_subdirectory", +# +# Optional parameters used by the cipd_archive template: +# "description" +# "install_mode", +# "data", +# "data_deps" +# "deps", +# "testonly", + +template("cipd_test_archive") { + forward_variables_from(invoker, + [ + "package_subdirectory", + "description", + "install_mode", + "data", + "data_deps", + "deps", + "testonly", + "test_sets", + ]) + + assert(defined(test_sets) && defined(testonly) && testonly == true) + + cipd_archive(target_name) { + # Build JSON manifests for each suite of tests and include them in the + # archive. + sources = [] + foreach(test_set, test_sets) { + assert(defined(test_set.far_sources)) + sources += [ test_set.manifest_path ] + _manifest_contents = [] + if (defined(test_set.far_sources)) { + foreach(source, test_set.far_sources) { + package_name = get_path_info(source, "name") + + _manifest_contents += [ + { + package = package_name + component_name = package_name + ".cm" + }, + ] + } + sources += test_set.far_sources + } + write_file(test_set.manifest_path, _manifest_contents, "json") + } + } +} + +cipd_archive("web_engine") { + package_subdirectory = _web_engine_directory + description = "Prebuilt WebEngine binaries for Fuchsia." + + deps = [ "//fuchsia_web/webengine:web_engine" ] + sources = + [ "${root_gen_dir}/fuchsia_web/webengine/web_engine/web_engine.far" ] +} + +cipd_archive("cast_runner") { + package_subdirectory = _web_engine_directory + description = "Prebuilt Cast application Runner binaries for Fuchsia." + + deps = [ "//fuchsia_web/runners:cast_runner_pkg" ] + sources = + [ "${root_gen_dir}/fuchsia_web/runners/cast_runner/cast_runner.far" ] +} + +cipd_archive("web_engine_shell") { + package_subdirectory = _web_engine_directory + description = "Simple command-line embedder for WebEngine." + testonly = true + + deps = [ "//fuchsia_web/shell:web_engine_shell_pkg" ] + sources = [ + "${root_gen_dir}/fuchsia_web/shell/web_engine_shell/web_engine_shell.far", + ] +} + +_stripped_chromedriver_file = "${root_out_dir}/clang_x64/stripped/chromedriver" + +action("strip_chromedriver_binary") { + testonly = true + + prog_name = "${root_out_dir}/clang_x64/chromedriver" + + deps = [ "//chrome/test/chromedriver:chromedriver_server($host_toolchain)" ] + script = "//build/gn_run_binary.py" + sources = [ + "//buildtools/third_party/eu-strip/bin/eu-strip", + prog_name, + ] + outputs = [ _stripped_chromedriver_file ] + args = [ + rebase_path("//buildtools/third_party/eu-strip/bin/eu-strip", + root_build_dir), + "-o", + rebase_path(_stripped_chromedriver_file, root_build_dir), + rebase_path(prog_name, root_build_dir), + ] +} + +cipd_archive("chromedriver") { + package_subdirectory = "${_host_tools_directory}/\${os}" + description = "Prebuilt Chromedriver binary for Fuchsia host." + install_mode = "copy" + testonly = true + + deps = [ ":strip_chromedriver_binary" ] + sources = [ _stripped_chromedriver_file ] +} + +cipd_test_archive("tests") { + package_subdirectory = _web_engine_directory + description = "Prebuilt Chromium tests for Fuchsia." + testonly = true + + deps = [ + "//base:base_unittests_pkg", + "//fuchsia_web/runners:cast_runner_integration_tests_pkg", + "//fuchsia_web/webengine:web_engine_integration_tests_pkg", + "//ipc:ipc_tests_pkg", + "//media:media_unittests_pkg", + "//mojo:mojo_unittests_pkg", + "//skia:skia_unittests_pkg", + "//third_party/blink/common:blink_common_unittests_pkg", + ] + + test_sets = [ + { + manifest_path = "${target_gen_dir}/test_manifest.json" + far_sources = [ + "${root_gen_dir}/base/base_unittests/base_unittests.far", + "${root_gen_dir}/fuchsia_web/runners/cast_runner_integration_tests/cast_runner_integration_tests.far", + "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests/web_engine_integration_tests.far", + "${root_gen_dir}/ipc/ipc_tests/ipc_tests.far", + "${root_gen_dir}/media/media_unittests/media_unittests.far", + "${root_gen_dir}/mojo/mojo_unittests/mojo_unittests.far", + "${root_gen_dir}/skia/skia_unittests/skia_unittests.far", + "${root_gen_dir}/third_party/blink/common/blink_common_unittests/blink_common_unittests.far", + ] + }, + { + manifest_path = "${target_gen_dir}/common_tests_manifest.json" + far_sources = [ + "${root_gen_dir}/base/base_unittests/base_unittests.far", + "${root_gen_dir}/ipc/ipc_tests/ipc_tests.far", + "${root_gen_dir}/media/media_unittests/media_unittests.far", + "${root_gen_dir}/mojo/mojo_unittests/mojo_unittests.far", + "${root_gen_dir}/skia/skia_unittests/skia_unittests.far", + "${root_gen_dir}/third_party/blink/common/blink_common_unittests/blink_common_unittests.far", + ] + }, + { + manifest_path = "${target_gen_dir}/web_engine_tests_manifest.json" + far_sources = [ "${root_gen_dir}/fuchsia_web/webengine/web_engine_integration_tests/web_engine_integration_tests.far" ] + }, + { + manifest_path = "${target_gen_dir}/cast_runner_tests_manifest.json" + far_sources = [ "${root_gen_dir}/fuchsia_web/runners/cast_runner_integration_tests/cast_runner_integration_tests.far" ] + }, + ] +} + +# Construct a consolidated directory of web_engine debugging symbols using the +# GNU .build_id structure for CIPD archival. +_web_engine_build_ids_target = "web_engine_debug_symbol_directory" +_web_engine_debug_symbols_archive_name = "web_engine_debug_symbols" +_web_engine_debug_symbols_outdir = "${target_gen_dir}/${_web_engine_debug_symbols_archive_name}/${_web_engine_build_ids_target}" + +build_id_dir(_web_engine_build_ids_target) { + testonly = true # Some of the archives contain test packages. + output_path = _web_engine_debug_symbols_outdir + deps = [ ":web_engine_archives_with_tests" ] +} + +fuchsia_cipd_package(_web_engine_debug_symbols_archive_name) { + testonly = true + package = "${package_base_path}/${_web_engine_directory}/${targetarch}/debug-symbols" + package_root = _web_engine_debug_symbols_outdir + package_definition_name = "${target_name}.yaml" + package_definition_dir = "${target_gen_dir}/${target_name}" + description = "Debugging symbols for prebuilt binaries from Chromium." + use_absolute_root_path = true + + directories = [ "." ] + deps = [ ":${_web_engine_build_ids_target}" ] +} + +cipd_archive("chrome") { + package_subdirectory = _chrome_directory + description = "Prebuilt Chrome browser package." + + deps = [ "//chrome/app:chrome_pkg" ] + sources = [ "${root_gen_dir}/chrome/app/chrome/chrome.far" ] +} + +_chrome_build_ids_target = "chrome_debug_symbol_directory" +_chrome_debug_symbols_archive_name = "chrome_debug_symbols" +_chrome_debug_symbols_outdir = "${target_gen_dir}/${_chrome_debug_symbols_archive_name}/${_chrome_build_ids_target}" + +build_id_dir(_chrome_build_ids_target) { + testonly = true # Some of the archives contain test packages. + output_path = _chrome_debug_symbols_outdir + deps = [ ":chrome${_archive_suffix}" ] +} + +fuchsia_cipd_package(_chrome_debug_symbols_archive_name) { + testonly = true + package = + "${package_base_path}/${_chrome_directory}/${targetarch}/debug-symbols" + package_root = _chrome_debug_symbols_outdir + package_definition_name = "${target_name}.yaml" + package_definition_dir = "${target_gen_dir}/${target_name}" + description = "Debugging symbols for prebuilt binaries from Chromium." + use_absolute_root_path = true + + directories = [ "." ] + deps = [ ":${_chrome_build_ids_target}" ] +} + +# A group for production archives to ensure nothing is testonly. +group("web_engine_production_archives") { + deps = [ + ":cast_runner${_archive_suffix}", + ":web_engine${_archive_suffix}", + ] +} + +# Used by both the main group as well as :debug_symbols. +group("web_engine_archives_with_tests") { + testonly = true # tests and web_engine_shell are testonly. + deps = [ + ":tests${_archive_suffix}", + ":web_engine_production_archives", + ":web_engine_shell${_archive_suffix}", + ] +} + +# TODO(zijiehe): Rename to "cipd_yaml" when possible. +# This target only creates yaml files and related archives for cipd rather +# than executing the cipd instance to upload them. +# Currently it's named as "cipd" to match the folder name which introduces +# confusions. +group("cipd") { + testonly = true # Some archives are testonly. + deps = [ + ":chrome${_archive_suffix}", + ":chrome_debug_symbols", + ":web_engine_archives_with_tests", + + # Symbols are not uploaded for the following. + ":chromedriver${_archive_suffix}", + ":web_engine_debug_symbols", + ] + visibility = [] # Required to replace the file default. + visibility = [ "//:gn_all" ] +} diff --git a/fuchsia/cipd/DIR_METADATA b/fuchsia/cipd/DIR_METADATA new file mode 100644 index 000000000000..210aa6a954b8 --- /dev/null +++ b/fuchsia/cipd/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/fuchsia/cipd/README.md b/fuchsia/cipd/README.md new file mode 100644 index 000000000000..c0de38b3c015 --- /dev/null +++ b/fuchsia/cipd/README.md @@ -0,0 +1,11 @@ +# CIPD recipes + +The `//build/fuchsia/cipd` target generates a number of YAML files that are used to +produce archives that are uploaded to CIPD. The generated YAML files are stored +in the output directory under the path `gen/build/fuchsia/cipd/`. + +## Example usage + +The most recent package can be discovered by searching for the "canary" ref: + +`$ cipd describe chromium/fuchsia/$PACKAGE_NAME-$TARGET_ARCH -version canary` diff --git a/fuchsia/cipd/version.template b/fuchsia/cipd/version.template new file mode 100644 index 000000000000..32a49a4aef83 --- /dev/null +++ b/fuchsia/cipd/version.template @@ -0,0 +1 @@ +@MAJOR@.@MINOR@.@BUILD@.@PATCH@ \ No newline at end of file diff --git a/fuchsia/gcs_download.py b/fuchsia/gcs_download.py new file mode 100644 index 000000000000..534091bce9f6 --- /dev/null +++ b/fuchsia/gcs_download.py @@ -0,0 +1,51 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import os +import subprocess +import sys +import tarfile +import tempfile + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +from common import DIR_SRC_ROOT + +sys.path.append(os.path.join(DIR_SRC_ROOT, 'build')) +import find_depot_tools + + +def DownloadAndUnpackFromCloudStorage(url, output_dir): + """Fetches a tarball from GCS and uncompresses it to |output_dir|.""" + + # Pass the compressed stream directly to 'tarfile'; don't bother writing it + # to disk first. + tmp_file = 'image.tgz' + with tempfile.TemporaryDirectory() as tmp_d: + tmp_file_location = os.path.join(tmp_d, tmp_file) + cmd = [ + sys.executable, + os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'), 'cp', url, + tmp_file_location + ] + + logging.debug('Running "%s"', ' '.join(cmd)) + task = subprocess.run(cmd, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + check=True, + encoding='utf-8') + + try: + tarfile.open(name=tmp_file_location, + mode='r|gz').extractall(path=output_dir) + except tarfile.ReadError as exc: + _, stderr_data = task.communicate() + stderr_data = stderr_data.decode() + raise subprocess.CalledProcessError( + task.returncode, cmd, + 'Failed to read a tarfile from gsutil.py.\n{}'.format( + stderr_data if stderr_data else '')) from exc diff --git a/fuchsia/gcs_download_test.py b/fuchsia/gcs_download_test.py new file mode 100755 index 000000000000..50b2bf1a0a5d --- /dev/null +++ b/fuchsia/gcs_download_test.py @@ -0,0 +1,88 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import subprocess +import tarfile +import unittest +from unittest import mock + +from gcs_download import DownloadAndUnpackFromCloudStorage + + +def _mock_task(status_code: int = 0, stderr: str = '') -> mock.Mock: + task_mock = mock.Mock() + attrs = { + 'returncode': status_code, + 'wait.return_value': status_code, + 'communicate.return_value': (None, stderr.encode()), + } + task_mock.configure_mock(**attrs) + + return task_mock + + +@mock.patch('tempfile.TemporaryDirectory') +@mock.patch('subprocess.run') +@mock.patch('tarfile.open') +@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows') +class TestDownloadAndUnpackFromCloudStorage(unittest.TestCase): + def testHappyPath(self, mock_tarfile, mock_run, mock_tmp_dir): + mock_run.return_value = _mock_task() + + tmp_dir = os.path.join('some', 'tmp', 'dir') + mock_tmp_dir.return_value.__enter__.return_value = tmp_dir + + mock_seq = mock.Mock() + mock_seq.attach_mock(mock_run, 'Run') + mock_seq.attach_mock(mock_tarfile, 'Untar') + mock_seq.attach_mock(mock_tmp_dir, 'MkTmpD') + + output_dir = os.path.join('output', 'dir') + DownloadAndUnpackFromCloudStorage('gs://some/url', output_dir) + + image_tgz_path = os.path.join(tmp_dir, 'image.tgz') + mock_seq.assert_has_calls([ + mock.call.MkTmpD(), + mock.call.MkTmpD().__enter__(), + mock.call.Run(mock.ANY, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + check=True, + encoding='utf-8'), + mock.call.Untar(name=image_tgz_path, mode='r|gz'), + mock.call.Untar().extractall(path=output_dir), + mock.call.MkTmpD().__exit__(None, None, None) + ], + any_order=False) + + # Verify cmd. + cmd = ' '.join(mock_run.call_args[0][0]) + self.assertRegex( + cmd, r'.*python3?\s.*gsutil.py\s+cp\s+gs://some/url\s+' + image_tgz_path) + + def testFailedTarOpen(self, mock_tarfile, mock_run, mock_tmp_dir): + mock_run.return_value = _mock_task(stderr='some error') + mock_tarfile.side_effect = tarfile.ReadError() + + with self.assertRaises(subprocess.CalledProcessError): + DownloadAndUnpackFromCloudStorage('', '') + mock_tmp_dir.assert_called_once() + mock_run.assert_called_once() + mock_tarfile.assert_called_once() + + def testBadTaskStatusCode(self, mock_tarfile, mock_run, mock_tmp_dir): + mock_run.side_effect = subprocess.CalledProcessError(cmd='some/command', + returncode=1) + + with self.assertRaises(subprocess.CalledProcessError): + DownloadAndUnpackFromCloudStorage('', '') + mock_run.assert_called_once() + mock_tarfile.assert_not_called() + mock_tmp_dir.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/linux_internal.sdk.sha1 b/fuchsia/linux_internal.sdk.sha1 new file mode 100644 index 000000000000..d9ca1d93be3c --- /dev/null +++ b/fuchsia/linux_internal.sdk.sha1 @@ -0,0 +1 @@ +12.20230425.1.1 diff --git a/fuchsia/sdk-bucket.txt b/fuchsia/sdk-bucket.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/fuchsia/sdk-hash-files.list b/fuchsia/sdk-hash-files.list new file mode 100644 index 000000000000..6f37bcd9f775 --- /dev/null +++ b/fuchsia/sdk-hash-files.list @@ -0,0 +1 @@ +{platform}.sdk.sha1 diff --git a/fuchsia/test/.coveragerc b/fuchsia/test/.coveragerc new file mode 100644 index 000000000000..815fd4b5face --- /dev/null +++ b/fuchsia/test/.coveragerc @@ -0,0 +1,8 @@ +# .coveragerc to control coverage.py + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Don't complain if non-runnable code isn't run: + if __name__ == .__main__.: + diff --git a/fuchsia/test/.style.yapf b/fuchsia/test/.style.yapf new file mode 100644 index 000000000000..557fa7bf84c0 --- /dev/null +++ b/fuchsia/test/.style.yapf @@ -0,0 +1,2 @@ +[style] +based_on_style = pep8 diff --git a/fuchsia/test/OWNERS b/fuchsia/test/OWNERS new file mode 100644 index 000000000000..90b7846a25b6 --- /dev/null +++ b/fuchsia/test/OWNERS @@ -0,0 +1,3 @@ +chonggu@google.com +rohpavone@chromium.org +zijiehe@google.com diff --git a/fuchsia/test/PRESUBMIT.py b/fuchsia/test/PRESUBMIT.py new file mode 100644 index 000000000000..fc5dcfe8f029 --- /dev/null +++ b/fuchsia/test/PRESUBMIT.py @@ -0,0 +1,51 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Top-level presubmit script for build/fuchsia/test. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts +for more details about the presubmit API built into depot_tools. +""" + +USE_PYTHON3 = True + +_EXTRA_PATHS_COMPONENTS = [('testing', )] + +# pylint: disable=invalid-name,missing-function-docstring +def CommonChecks(input_api, output_api): + # Neither running nor linting Fuchsia tests is supported on Windows. + if input_api.is_windows: + return [] + + tests = [] + + chromium_src_path = input_api.os_path.realpath( + input_api.os_path.join(input_api.PresubmitLocalPath(), '..', '..', + '..')) + pylint_extra_paths = [ + input_api.os_path.join(chromium_src_path, *component) + for component in _EXTRA_PATHS_COMPONENTS + ] + tests.extend( + input_api.canned_checks.GetPylint(input_api, + output_api, + extra_paths_list=pylint_extra_paths, + pylintrc='pylintrc', + version='2.7')) + + # coveragetest.py is responsible for running unit tests in this directory + tests.append( + input_api.Command( + name='coveragetest', + cmd=[input_api.python3_executable, 'coveragetest.py'], + kwargs={}, + message=output_api.PresubmitError)) + return input_api.RunTests(tests) + + +def CheckChangeOnUpload(input_api, output_api): + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return CommonChecks(input_api, output_api) diff --git a/fuchsia/test/base_ermine_ctl.py b/fuchsia/test/base_ermine_ctl.py new file mode 100644 index 000000000000..c7519867869f --- /dev/null +++ b/fuchsia/test/base_ermine_ctl.py @@ -0,0 +1,201 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Adds python interface to erminectl tools on workstation products.""" + +import logging +import subprocess +import time +from typing import List, Tuple + + +class BaseErmineCtl: + """Compatible class for automating control of Ermine and its OOBE. + + Must be used after checking if the tool exists. + + Usage: + ctl = base_ermine_ctl.BaseErmineCtl(some_target) + if ctl.exists: + ctl.take_to_shell() + + logging.info('In the shell') + else: + logging.info('Tool does not exist!') + + This is only necessary after a target reboot or provision (IE pave). + """ + + _OOBE_PASSWORD = 'workstation_test_password' + _TOOL = 'erminectl' + _OOBE_SUBTOOL = 'oobe' + _MAX_STATE_TRANSITIONS = 5 + + # Mapping between the current state and the next command to run + # to move it to the next state. + _STATE_TO_NEXT = { + 'SetPassword': ['set_password', _OOBE_PASSWORD], + 'Unknown': ['skip'], + 'Shell': [], + 'Login': ['login', _OOBE_PASSWORD], + } + _COMPLETE_STATE = 'Shell' + + _READY_TIMEOUT = 10 + _WAIT_ATTEMPTS = 10 + _WAIT_FOR_READY_SLEEP_SEC = 3 + + def __init__(self): + self._ermine_exists = False + self._ermine_exists_check = False + + # pylint: disable=no-self-use + # Overridable method to determine how command gets executed. + def execute_command_async(self, args: List[str]) -> subprocess.Popen: + """Executes command asynchronously, returning immediately.""" + raise NotImplementedError + + # pylint: enable=no-self-use + + @property + def exists(self) -> bool: + """Returns the existence of the tool. + + Checks whether the tool exists on and caches the result. + + Returns: + True if the tool exists, False if not. + """ + if not self._ermine_exists_check: + self._ermine_exists = self._execute_tool(['--help'], + can_fail=True) == 0 + self._ermine_exists_check = True + logging.debug('erminectl exists: %s', + ('true' if self._ermine_exists else 'false')) + return self._ermine_exists + + @property + def status(self) -> Tuple[int, str]: + """Returns the status of ermine. + + Note that if the tool times out or does not exist, a non-zero code + is returned. + + Returns: + Tuple of (return code, status as string). -1 for timeout. + Raises: + AssertionError: if the tool does not exist. + """ + assert self.exists, (f'Tool {self._TOOL} cannot have a status if' + ' it does not exist') + # Executes base command, which returns status. + proc = self._execute_tool_async([]) + try: + proc.wait(timeout=self._READY_TIMEOUT) + except subprocess.TimeoutExpired: + logging.warning('Timed out waiting for status') + return -1, 'Timeout' + stdout, _ = proc.communicate() + return proc.returncode, stdout.strip() + + @property + def ready(self) -> bool: + """Indicates if the tool is ready for regular use. + + Returns: + False if not ready, and True if ready. + Raises: + AssertionError: if the tool does not exist. + """ + assert self.exists, (f'Tool {self._TOOL} cannot be ready if' + ' it does not exist') + return_code, _ = self.status + return return_code == 0 + + def _execute_tool_async(self, command: List[str]) -> subprocess.Popen: + """Executes a sub-command asynchronously. + + Args: + command: list of strings to compose the command. Forwards to the + command runner. + Returns: + Popen of the subprocess. + """ + full_command = [self._TOOL, self._OOBE_SUBTOOL] + full_command.extend(command) + + # Returns immediately with Popen. + return self.execute_command_async(full_command) + + def _execute_tool(self, command: List[str], can_fail: bool = False) -> int: + """Executes a sub-command of the tool synchronously. + Raises exception if non-zero returncode is given and |can_fail| = False. + + Args: + command: list of strings to compose the command. Forwards to the + command runner. + can_fail: Whether or not the command can fail. + Raises: + RuntimeError: if non-zero returncode is returned and can_fail = + False. + Returns: + Return code of command execution if |can_fail| is True. + """ + proc = self._execute_tool_async(command) + stdout, stderr = proc.communicate() + if not can_fail and proc.returncode != 0: + raise RuntimeError(f'Command {" ".join(command)} failed.' + f'\nSTDOUT: {stdout}\nSTDERR: {stderr}') + return proc.returncode + + def wait_until_ready(self) -> None: + """Waits until the tool is ready through sleep-poll. + + The tool may not be ready after a pave or restart. + This checks the status and exits after its ready or Timeout. + + Raises: + TimeoutError: if tool is not ready after certain amount of attempts. + AssertionError: if tool does not exist. + """ + assert self.exists, f'Tool {self._TOOL} must exist to use it.' + for _ in range(self._WAIT_ATTEMPTS): + if self.ready: + return + time.sleep(self._WAIT_FOR_READY_SLEEP_SEC) + raise TimeoutError('Timed out waiting for a valid status to return') + + def take_to_shell(self) -> None: + """Takes device to shell after waiting for tool to be ready. + + Examines the current state of the device after waiting for it to be + ready. Once ready, goes through the states of logging in. This is: + - CreatePassword -> Skip screen -> Shell + - Login -> Shell + - Shell + + Regardless of starting state, this will exit once the shell state is + reached. + + Raises: + NotImplementedError: if an unknown state is reached. + RuntimeError: If number of state transitions exceeds the max number + that is expected. + """ + self.wait_until_ready() + _, state = self.status + max_states = self._MAX_STATE_TRANSITIONS + while state != self._COMPLETE_STATE and max_states: + max_states -= 1 + command = self._STATE_TO_NEXT.get(state) + logging.debug('Ermine state is: %s', state) + if command is None: + raise NotImplementedError('Encountered invalid state: %s' % + state) + self._execute_tool(command) + _, state = self.status + + if not max_states: + raise RuntimeError('Did not transition to shell in %d attempts.' + ' Please file a bug.' % + self._MAX_STATE_TRANSITIONS) diff --git a/fuchsia/test/base_ermine_ctl_unittests.py b/fuchsia/test/base_ermine_ctl_unittests.py new file mode 100755 index 000000000000..c0d72fe0edf5 --- /dev/null +++ b/fuchsia/test/base_ermine_ctl_unittests.py @@ -0,0 +1,236 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests scenarios for ermine_ctl""" +import logging +import subprocess +import time +import unittest +import unittest.mock as mock + +from base_ermine_ctl import BaseErmineCtl + + +class BaseBaseErmineCtlTest(unittest.TestCase): + """Unit tests for BaseBaseErmineCtl interface.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.ermine_ctl = BaseErmineCtl() + + def _set_mock_proc(self, return_value: int): + """Set |execute_command_async|'s return value to a mocked subprocess.""" + self.ermine_ctl.execute_command_async = mock.MagicMock() + mock_proc = mock.create_autospec(subprocess.Popen, instance=True) + mock_proc.communicate.return_value = 'foo', 'stderr' + mock_proc.returncode = return_value + self.ermine_ctl.execute_command_async.return_value = mock_proc + + return mock_proc + + def test_check_exists(self): + """Test |exists| returns True if tool command succeeds (returns 0).""" + self._set_mock_proc(return_value=0) + + self.assertTrue(self.ermine_ctl.exists) + + # Modifying this will not result in a change in state due to caching. + self._set_mock_proc(return_value=42) + self.assertTrue(self.ermine_ctl.exists) + + def test_does_not_exist(self): + """Test |exists| returns False if tool command fails (returns != 0).""" + self._set_mock_proc(return_value=42) + + self.assertFalse(self.ermine_ctl.exists) + + def test_ready_raises_assertion_error_if_not_exist(self): + """Test |ready| raises AssertionError if tool does not exist.""" + self._set_mock_proc(return_value=42) + self.assertRaises(AssertionError, getattr, self.ermine_ctl, 'ready') + + def test_ready_returns_false_if_bad_status(self): + """Test |ready| return False if tool has a bad status.""" + with mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status, \ + mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = True + mock_status.return_value = (1, 'FakeStatus') + self.assertFalse(self.ermine_ctl.ready) + + def test_ready_returns_true(self): + """Test |ready| return True if tool returns good status (rc = 0).""" + with mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status, \ + mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = True + mock_status.return_value = (0, 'FakeStatus') + self.assertTrue(self.ermine_ctl.ready) + + def test_status_raises_assertion_error_if_dne(self): + """Test |status| returns |InvalidState| if tool does not exist.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = False + + self.assertRaises(AssertionError, getattr, self.ermine_ctl, + 'status') + + def test_status_returns_rc_and_stdout(self): + """Test |status| returns subprocess stdout and rc if tool exists.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as _: + self._set_mock_proc(return_value=10) + + self.assertEqual(self.ermine_ctl.status, (10, 'foo')) + + def test_status_returns_timeout_state(self): + """Test |status| returns |Timeout| if exception is raised.""" + with mock.patch.object( + BaseErmineCtl, 'exists', new_callable=mock.PropertyMock) as _, \ + mock.patch.object(logging, 'warning') as _: + mock_proc = self._set_mock_proc(return_value=0) + mock_proc.wait.side_effect = subprocess.TimeoutExpired( + 'cmd', 'some timeout') + + self.assertEqual(self.ermine_ctl.status, (-1, 'Timeout')) + + def test_wait_until_ready_raises_assertion_error_if_tool_dne(self): + """Test |wait_until_ready| is returns false if tool does not exist.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = False + + self.assertRaises(AssertionError, self.ermine_ctl.wait_until_ready) + + def test_wait_until_ready_loops_until_ready(self): + """Test |wait_until_ready| loops until |ready| returns True.""" + with mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists, \ + mock.patch.object(time, 'sleep') as mock_sleep, \ + mock.patch.object(BaseErmineCtl, 'ready', + new_callable=mock.PropertyMock) as mock_ready: + mock_exists.return_value = True + mock_ready.side_effect = [False, False, False, True] + + self.ermine_ctl.wait_until_ready() + + self.assertEqual(mock_ready.call_count, 4) + self.assertEqual(mock_sleep.call_count, 3) + + def test_wait_until_ready_raises_assertion_error_if_attempts_exceeded( + self): + """Test |wait_until_ready| loops if |ready| is not True n attempts.""" + with mock.patch.object(BaseErmineCtl, 'exists', + new_callable=mock.PropertyMock) as mock_exists, \ + mock.patch.object(time, 'sleep') as mock_sleep, \ + mock.patch.object(BaseErmineCtl, 'ready', + new_callable=mock.PropertyMock) as mock_ready: + mock_exists.return_value = True + mock_ready.side_effect = [False] * 15 + [True] + + self.assertRaises(TimeoutError, self.ermine_ctl.wait_until_ready) + + self.assertEqual(mock_ready.call_count, 10) + self.assertEqual(mock_sleep.call_count, 10) + + def test_take_to_shell_raises_assertion_error_if_tool_dne(self): + """Test |take_to_shell| throws AssertionError if not ready is False.""" + with mock.patch.object(BaseErmineCtl, + 'exists', + new_callable=mock.PropertyMock) as mock_exists: + mock_exists.return_value = False + self.assertRaises(AssertionError, self.ermine_ctl.take_to_shell) + + def test_take_to_shell_exits_on_complete_state(self): + """Test |take_to_shell| exits with no calls if in completed state.""" + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + mock_proc = self._set_mock_proc(return_value=52) + mock_wait_ready.return_value = True + mock_status.return_value = (0, 'Shell') + + self.ermine_ctl.take_to_shell() + + self.assertEqual(mock_proc.call_count, 0) + + def test_take_to_shell_invalid_state_raises_not_implemented_error(self): + """Test |take_to_shell| raises exception if invalid state is returned. + """ + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + mock_wait_ready.return_value = True + mock_status.return_value = (0, 'SomeUnknownState') + + self.assertRaises(NotImplementedError, + self.ermine_ctl.take_to_shell) + + def test_take_to_shell_with_max_transitions_raises_runtime_error(self): + """Test |take_to_shell| raises exception on too many transitions. + + |take_to_shell| attempts to transition from one state to another. + After 5 attempts, if this does not end in the completed state, an + Exception is thrown. + """ + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + mock_wait_ready.return_value = True + # Returns too many state transitions before CompleteState. + mock_status.side_effect = [(0, 'Unknown'), + (0, 'KnownWithPassword'), + (0, 'Unknown')] * 3 + [ + (0, 'CompleteState') + ] + self.assertRaises(RuntimeError, self.ermine_ctl.take_to_shell) + + def test_take_to_shell_executes_known_commands(self): + """Test |take_to_shell| executes commands if necessary. + + Some states can only be transitioned between with specific commands. + These are executed by |take_to_shell| until the final test |Shell| is + reached. + """ + with mock.patch.object(BaseErmineCtl, + 'wait_until_ready') as mock_wait_ready, \ + mock.patch.object( + BaseErmineCtl, 'status', + new_callable=mock.PropertyMock) as mock_status: + self._set_mock_proc(return_value=0) + mock_wait_ready.return_value = True + mock_status.side_effect = [(0, 'Unknown'), (0, 'SetPassword'), + (0, 'Shell')] + + self.ermine_ctl.take_to_shell() + + self.assertEqual(self.ermine_ctl.execute_command_async.call_count, + 2) + self.ermine_ctl.execute_command_async.assert_has_calls([ + mock.call(['erminectl', 'oobe', 'skip']), + mock.call().communicate(), + mock.call([ + 'erminectl', 'oobe', 'set_password', + 'workstation_test_password' + ]), + mock.call().communicate() + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/common.py b/fuchsia/test/common.py new file mode 100644 index 000000000000..32785f15a6c4 --- /dev/null +++ b/fuchsia/test/common.py @@ -0,0 +1,617 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Common methods and variables used by Cr-Fuchsia testing infrastructure.""" + +import enum +import json +import logging +import os +import re +import signal +import shutil +import subprocess +import sys +import time + +from argparse import ArgumentParser +from typing import Iterable, List, Optional, Tuple + +from compatible_utils import get_ssh_prefix, get_host_arch + +DIR_SRC_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) +IMAGES_ROOT = os.path.join(DIR_SRC_ROOT, 'third_party', 'fuchsia-sdk', + 'images') +REPO_ALIAS = 'fuchsia.com' +SDK_ROOT = os.path.join(DIR_SRC_ROOT, 'third_party', 'fuchsia-sdk', 'sdk') +SDK_TOOLS_DIR = os.path.join(SDK_ROOT, 'tools', get_host_arch()) +_ENABLE_ZEDBOOT = 'discovery.zedboot.enabled=true' +_FFX_TOOL = os.path.join(SDK_TOOLS_DIR, 'ffx') + +# This global variable is used to set the environment variable +# |FFX_ISOLATE_DIR| when running ffx commands in E2E testing scripts. +_FFX_ISOLATE_DIR = None + + +class TargetState(enum.Enum): + """State of a target.""" + UNKNOWN = enum.auto() + DISCONNECTED = enum.auto() + PRODUCT = enum.auto() + FASTBOOT = enum.auto() + ZEDBOOT = enum.auto() + + +class BootMode(enum.Enum): + """Specifies boot mode for device.""" + REGULAR = enum.auto() + RECOVERY = enum.auto() + BOOTLOADER = enum.auto() + + +_STATE_TO_BOOTMODE = { + TargetState.PRODUCT: BootMode.REGULAR, + TargetState.FASTBOOT: BootMode.BOOTLOADER, + TargetState.ZEDBOOT: BootMode.RECOVERY +} + +_BOOTMODE_TO_STATE = {value: key for key, value in _STATE_TO_BOOTMODE.items()} + + +class StateNotFoundError(Exception): + """Raised when target's state cannot be found.""" + + +class StateTransitionError(Exception): + """Raised when target does not transition to desired state.""" + + +def _state_string_to_state(state_str: str) -> TargetState: + state_str = state_str.strip().lower() + if state_str == 'product': + return TargetState.PRODUCT + if state_str == 'zedboot (r)': + return TargetState.ZEDBOOT + if state_str == 'fastboot': + return TargetState.FASTBOOT + if state_str == 'unknown': + return TargetState.UNKNOWN + if state_str == 'disconnected': + return TargetState.DISCONNECTED + + raise NotImplementedError(f'State {state_str} not supported') + + +def get_target_state(target_id: Optional[str], + serial_num: Optional[str], + num_attempts: int = 1) -> TargetState: + """Return state of target or the default target. + + Args: + target_id: Optional nodename of the target. If not given, default target + is used. + serial_num: Optional serial number of target. Only usable if device is + in fastboot. + num_attempts: Optional number of times to attempt getting status. + + Returns: + TargetState of the given node, if found. + + Raises: + StateNotFoundError: If target cannot be found, or default target is not + defined if |target_id| is not given. + """ + for i in range(num_attempts): + targets = json.loads( + run_ffx_command(('target', 'list'), + check=True, + configs=[_ENABLE_ZEDBOOT], + capture_output=True, + json_out=True).stdout.strip()) + for target in targets: + if target_id is None and target['is_default']: + return _state_string_to_state(target['target_state']) + if target_id == target['nodename']: + return _state_string_to_state(target['target_state']) + if serial_num == target['serial']: + # Should only return Fastboot. + return _state_string_to_state(target['target_state']) + # Do not sleep for last attempt. + if i < num_attempts - 1: + time.sleep(10) + + # Could not find a state for given target. + error_target = target_id + if target_id is None: + error_target = 'default target' + + raise StateNotFoundError(f'Could not find state for {error_target}.') + + +def set_ffx_isolate_dir(isolate_dir: str) -> None: + """Overwrites |_FFX_ISOLATE_DIR|.""" + + global _FFX_ISOLATE_DIR # pylint: disable=global-statement + _FFX_ISOLATE_DIR = isolate_dir + + +def get_host_tool_path(tool): + """Get a tool from the SDK.""" + + return os.path.join(SDK_TOOLS_DIR, tool) + + +def get_host_os(): + """Get host operating system.""" + + host_platform = sys.platform + if host_platform.startswith('linux'): + return 'linux' + if host_platform.startswith('darwin'): + return 'mac' + raise Exception('Unsupported host platform: %s' % host_platform) + + +def make_clean_directory(directory_name): + """If the directory exists, delete it and remake with no contents.""" + + if os.path.exists(directory_name): + shutil.rmtree(directory_name) + os.mkdir(directory_name) + + +def _get_daemon_status(): + """Determines daemon status via `ffx daemon socket`. + + Returns: + dict of status of the socket. Status will have a key Running or + NotRunning to indicate if the daemon is running. + """ + status = json.loads( + run_ffx_command(('daemon', 'socket'), + check=True, + capture_output=True, + json_out=True, + suppress_repair=True).stdout.strip()) + return status.get('pid', {}).get('status', {'NotRunning': True}) + + +def _is_daemon_running(): + return 'Running' in _get_daemon_status() + + +def check_ssh_config_file() -> None: + """Checks for ssh keys and generates them if they are missing.""" + + script_path = os.path.join(SDK_ROOT, 'bin', 'fuchsia-common.sh') + check_cmd = ['bash', '-c', f'. {script_path}; check-fuchsia-ssh-config'] + subprocess.run(check_cmd, check=True) + + +def _wait_for_daemon(start=True, timeout_seconds=100): + """Waits for daemon to reach desired state in a polling loop. + + Sleeps for 5s between polls. + + Args: + start: bool. Indicates to wait for daemon to start up. If False, + indicates waiting for daemon to die. + timeout_seconds: int. Number of seconds to wait for the daemon to reach + the desired status. + Raises: + TimeoutError: if the daemon does not reach the desired state in time. + """ + wanted_status = 'start' if start else 'stop' + sleep_period_seconds = 5 + attempts = int(timeout_seconds / sleep_period_seconds) + for i in range(attempts): + if _is_daemon_running() == start: + return + if i != attempts: + logging.info('Waiting for daemon to %s...', wanted_status) + time.sleep(sleep_period_seconds) + + raise TimeoutError(f'Daemon did not {wanted_status} in time.') + + +def _run_repair_command(output): + """Scans |output| for a self-repair command to run and, if found, runs it. + + Returns: + True if a repair command was found and ran successfully. False otherwise. + """ + # Check for a string along the lines of: + # "Run `ffx doctor --restart-daemon` for further diagnostics." + match = re.search('`ffx ([^`]+)`', output) + if not match or len(match.groups()) != 1: + return False # No repair command found. + args = match.groups()[0].split() + + try: + run_ffx_command(args, suppress_repair=True) + # Need the daemon to be up at the end of this. + _wait_for_daemon(start=True) + except subprocess.CalledProcessError: + return False # Repair failed. + return True # Repair succeeded. + + +def run_ffx_command(cmd: Iterable[str], + target_id: Optional[str] = None, + check: bool = True, + suppress_repair: bool = False, + configs: Optional[List[str]] = None, + json_out: bool = False, + **kwargs) -> subprocess.CompletedProcess: + """Runs `ffx` with the given arguments, waiting for it to exit. + + If `ffx` exits with a non-zero exit code, the output is scanned for a + recommended repair command (e.g., "Run `ffx doctor --restart-daemon` for + further diagnostics."). If such a command is found, it is run and then the + original command is retried. This behavior can be suppressed via the + `suppress_repair` argument. + + Args: + cmd: A sequence of arguments to ffx. + target_id: Whether to execute the command for a specific target. The + target_id could be in the form of a nodename or an address. + check: If True, CalledProcessError is raised if ffx returns a non-zero + exit code. + suppress_repair: If True, do not attempt to find and run a repair + command. + configs: A list of configs to be applied to the current command. + json_out: Have command output returned as JSON. Must be parsed by + caller. + Returns: + A CompletedProcess instance + Raises: + CalledProcessError if |check| is true. + """ + + ffx_cmd = [_FFX_TOOL] + if json_out: + ffx_cmd.extend(('--machine', 'json')) + if target_id: + ffx_cmd.extend(('--target', target_id)) + if configs: + for config in configs: + ffx_cmd.extend(('--config', config)) + ffx_cmd.extend(cmd) + env = os.environ + if _FFX_ISOLATE_DIR: + env['FFX_ISOLATE_DIR'] = _FFX_ISOLATE_DIR + + try: + if not suppress_repair: + # If we want to repair, we need to capture output in STDOUT and + # STDERR. This could conflict with expectations of the caller. + output_captured = kwargs.get('capture_output') or ( + kwargs.get('stdout') and kwargs.get('stderr')) + if not output_captured: + # Force output to combine into STDOUT. + kwargs['stdout'] = subprocess.PIPE + kwargs['stderr'] = subprocess.STDOUT + return subprocess.run(ffx_cmd, + check=check, + encoding='utf-8', + env=env, + **kwargs) + except subprocess.CalledProcessError as cpe: + logging.error('%s %s failed with returncode %s.', + os.path.relpath(_FFX_TOOL), + subprocess.list2cmdline(ffx_cmd[1:]), cpe.returncode) + if cpe.output: + logging.error('stdout of the command: %s', cpe.output) + if suppress_repair or (cpe.output + and not _run_repair_command(cpe.output)): + raise + + # If the original command failed but a repair command was found and + # succeeded, try one more time with the original command. + return run_ffx_command(cmd, target_id, check, True, configs, json_out, + **kwargs) + + +def run_continuous_ffx_command(cmd: Iterable[str], + target_id: Optional[str] = None, + encoding: Optional[str] = 'utf-8', + **kwargs) -> subprocess.Popen: + """Runs an ffx command asynchronously.""" + ffx_cmd = [_FFX_TOOL] + if target_id: + ffx_cmd.extend(('--target', target_id)) + ffx_cmd.extend(cmd) + return subprocess.Popen(ffx_cmd, encoding=encoding, **kwargs) + + +def read_package_paths(out_dir: str, pkg_name: str) -> List[str]: + """ + Returns: + A list of the absolute path to all FAR files the package depends on. + """ + with open( + os.path.join(DIR_SRC_ROOT, out_dir, 'gen', 'package_metadata', + f'{pkg_name}.meta')) as meta_file: + data = json.load(meta_file) + packages = [] + for package in data['packages']: + packages.append(os.path.join(DIR_SRC_ROOT, out_dir, package)) + return packages + + +def register_common_args(parser: ArgumentParser) -> None: + """Register commonly used arguments.""" + common_args = parser.add_argument_group('common', 'common arguments') + common_args.add_argument( + '--out-dir', + '-C', + type=os.path.realpath, + help='Path to the directory in which build files are located. ') + + +def register_device_args(parser: ArgumentParser) -> None: + """Register device arguments.""" + device_args = parser.add_argument_group('device', 'device arguments') + device_args.add_argument('--target-id', + default=os.environ.get('FUCHSIA_NODENAME'), + help=('Specify the target device. This could be ' + 'a node-name (e.g. fuchsia-emulator) or an ' + 'an ip address along with an optional port ' + '(e.g. [fe80::e1c4:fd22:5ee5:878e]:22222, ' + '1.2.3.4, 1.2.3.4:33333). If unspecified, ' + 'the default target in ffx will be used.')) + + +def register_log_args(parser: ArgumentParser) -> None: + """Register commonly used arguments.""" + + log_args = parser.add_argument_group('logging', 'logging arguments') + log_args.add_argument('--logs-dir', + type=os.path.realpath, + help=('Directory to write logs to.')) + + +def get_component_uri(package: str) -> str: + """Retrieve the uri for a package.""" + return f'fuchsia-pkg://{REPO_ALIAS}/{package}#meta/{package}.cm' + + +def resolve_packages(packages: List[str], target_id: Optional[str]) -> None: + """Ensure that all |packages| are installed on a device.""" + + ssh_prefix = get_ssh_prefix(get_ssh_address(target_id)) + subprocess.run(ssh_prefix + ['--', 'pkgctl', 'gc'], check=False) + + for package in packages: + resolve_cmd = [ + '--', 'pkgctl', 'resolve', + 'fuchsia-pkg://%s/%s' % (REPO_ALIAS, package) + ] + retry_command(ssh_prefix + resolve_cmd) + + +def retry_command(cmd: List[str], retries: int = 2, + **kwargs) -> Optional[subprocess.CompletedProcess]: + """Helper function for retrying a subprocess.run command.""" + + for i in range(retries): + if i == retries - 1: + proc = subprocess.run(cmd, **kwargs, check=True) + return proc + proc = subprocess.run(cmd, **kwargs, check=False) + if proc.returncode == 0: + return proc + time.sleep(3) + return None + + +def get_ssh_address(target_id: Optional[str]) -> str: + """Determines SSH address for given target.""" + return run_ffx_command(('target', 'get-ssh-address'), + target_id, + capture_output=True).stdout.strip() + + +def find_in_dir(target_name: str, parent_dir: str) -> Optional[str]: + """Finds path in SDK. + + Args: + target_name: Name of target to find, as a string. + parent_dir: Directory to start search in. + + Returns: + Full path to the target, None if not found. + """ + # Doesn't make sense to look for a full path. Only extract the basename. + target_name = os.path.basename(target_name) + for root, dirs, _ in os.walk(parent_dir): + if target_name in dirs: + return os.path.abspath(os.path.join(root, target_name)) + + return None + + +def find_image_in_sdk(product_name: str) -> Optional[str]: + """Finds image dir in SDK for product given. + + Args: + product_name: Name of product's image directory to find. + + Returns: + Full path to the target, None if not found. + """ + top_image_dir = os.path.join(SDK_ROOT, os.pardir, 'images') + path = find_in_dir(product_name, parent_dir=top_image_dir) + if path: + return find_in_dir('images', parent_dir=path) + return path + + +def catch_sigterm() -> None: + """Catches the kill signal and allows the process to exit cleanly.""" + def _sigterm_handler(*_): + sys.exit(0) + + signal.signal(signal.SIGTERM, _sigterm_handler) + + +def get_system_info(target: Optional[str] = None) -> Tuple[str, str]: + """Retrieves installed OS version frm device. + + Returns: + Tuple of strings, containing {product, version number), or a pair of + empty strings to indicate an error. + """ + info_cmd = run_ffx_command(('target', 'show', '--json'), + target_id=target, + capture_output=True, + check=False) + if info_cmd.returncode == 0: + info_json = json.loads(info_cmd.stdout.strip()) + for info in info_json: + if info['title'] == 'Build': + return (info['child'][1]['value'], info['child'][0]['value']) + + # If the information was not retrieved, return empty strings to indicate + # unknown system info. + return ('', '') + + +def boot_device(target_id: Optional[str], + mode: BootMode, + serial_num: Optional[str] = None, + must_boot: bool = False) -> None: + """Boot device into desired mode, with fallback to SSH on failure. + + Args: + target_id: Optional target_id of device. + mode: Desired boot mode. + must_boot: Forces device to boot, regardless of current state. + Raises: + StateTransitionError: When final state of device is not desired. + """ + # Skip boot call if already in the state and not skipping check. + state = get_target_state(target_id, serial_num, num_attempts=3) + wanted_state = _BOOTMODE_TO_STATE.get(mode) + if not must_boot: + logging.debug('Current state %s. Want state %s', str(state), + str(wanted_state)) + must_boot = state != wanted_state + + if not must_boot: + logging.debug('Skipping boot - already in good state') + return + + def _reboot(reboot_cmd, current_state: TargetState): + reboot_cmd() + local_state = None + # Check that we transition out of current state. + for _ in range(30): + try: + local_state = get_target_state(target_id, serial_num) + if local_state != current_state: + # Changed states - can continue + break + except StateNotFoundError: + logging.debug('Device disconnected...') + if current_state != TargetState.DISCONNECTED: + # Changed states - can continue + break + finally: + time.sleep(2) + else: + logging.warning( + 'Device did not change from initial state. Exiting early') + return local_state or TargetState.DISCONNECTED + + # Now we want to transition to the new state. + for _ in range(90): + try: + local_state = get_target_state(target_id, serial_num) + if local_state == wanted_state: + return local_state + except StateNotFoundError: + logging.warning('Could not find target state.' + ' Sleeping then retrying...') + finally: + time.sleep(2) + return local_state or TargetState.DISCONNECTED + + state = _reboot( + (lambda: _boot_device_ffx(target_id, serial_num, state, mode)), state) + + if state == TargetState.DISCONNECTED: + raise StateNotFoundError('Target could not be found!') + + if state == wanted_state: + return + + logging.warning( + 'Booting with FFX to %s did not succeed. Attempting with DM', mode) + + # Fallback to SSH, with no retry if we tried with ffx.: + state = _reboot( + (lambda: _boot_device_dm(target_id, serial_num, state, mode)), state) + + if state != wanted_state: + raise StateTransitionError( + f'Could not get device to desired state. Wanted {wanted_state},' + f' got {state}') + logging.debug('Got desired state: %s', state) + + +def _boot_device_ffx(target_id: Optional[str], serial_num: Optional[str], + current_state: TargetState, mode: BootMode): + cmd = ['target', 'reboot'] + if mode == BootMode.REGULAR: + logging.info('Triggering regular boot') + elif mode == BootMode.RECOVERY: + cmd.append('-r') + elif mode == BootMode.BOOTLOADER: + cmd.append('-b') + else: + raise NotImplementedError(f'BootMode {mode} not supported') + + logging.debug('FFX reboot with command [%s]', ' '.join(cmd)) + if current_state == TargetState.FASTBOOT: + + run_ffx_command(cmd, + configs=[_ENABLE_ZEDBOOT], + target_id=serial_num, + check=False) + else: + run_ffx_command(cmd, + configs=[_ENABLE_ZEDBOOT], + target_id=target_id, + check=False) + + +def _boot_device_dm(target_id: Optional[str], serial_num: Optional[str], + current_state: TargetState, mode: BootMode): + # Can only use DM if device is in regular boot. + if current_state != TargetState.PRODUCT: + if mode == BootMode.REGULAR: + raise StateTransitionError('Cannot boot to Regular via DM - ' + 'FFX already failed to do so.') + # Boot to regular. + _boot_device_ffx(target_id, serial_num, current_state, + BootMode.REGULAR) + + ssh_prefix = get_ssh_prefix(get_ssh_address(target_id)) + + reboot_cmd = None + + if mode == BootMode.REGULAR: + reboot_cmd = 'reboot' + elif mode == BootMode.RECOVERY: + reboot_cmd = 'reboot-recovery' + elif mode == BootMode.BOOTLOADER: + reboot_cmd = 'reboot-bootloader' + else: + raise NotImplementedError(f'BootMode {mode} not supported') + + # Boot commands can fail due to SSH connections timeout. + full_cmd = ssh_prefix + ['--', 'dm', reboot_cmd] + logging.debug('DM reboot with command [%s]', ' '.join(full_cmd)) + subprocess.run(full_cmd, check=False) diff --git a/fuchsia/test/common_unittests.py b/fuchsia/test/common_unittests.py new file mode 100755 index 000000000000..4e419c902574 --- /dev/null +++ b/fuchsia/test/common_unittests.py @@ -0,0 +1,54 @@ +#!/usr/bin/env vpython3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing common.py.""" + +import os +import tempfile +import unittest +import unittest.mock as mock + +import common + + +@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows') +class CommonTest(unittest.TestCase): + """Test common.py methods.""" + def test_find_in_dir_returns_file_or_dir_if_searching(self) -> None: + """Test |find_in_dir| returns files if searching for file, or None.""" + # Make the directory structure. + with tempfile.TemporaryDirectory() as tmp_dir: + with tempfile.NamedTemporaryFile(dir=tmp_dir) as tmp_file, \ + tempfile.TemporaryDirectory(dir=tmp_dir) as inner_tmp_dir: + + # Structure is now: + # temp_dir/ + # temp_dir/inner_dir1 + # temp_dir/tempfile1 + # File is not a dir, so returns None. + self.assertIsNone( + common.find_in_dir(os.path.basename(tmp_file.name), + parent_dir=tmp_dir)) + + # Repeat for directory. + self.assertEqual( + common.find_in_dir(inner_tmp_dir, parent_dir=tmp_dir), + inner_tmp_dir) + + def test_find_image_in_sdk_searches_images_in_product_bundle(self): + """Test |find_image_in_sdk| searches for 'images' if product-bundle.""" + with tempfile.TemporaryDirectory() as tmp_dir: + os.makedirs(os.path.join(tmp_dir, 'sdk'), exist_ok=True) + os.makedirs(os.path.join(tmp_dir, 'images', 'workstation-product', + 'images'), + exist_ok=True) + with mock.patch('common.SDK_ROOT', os.path.join(tmp_dir, 'sdk')): + self.assertEqual( + common.find_image_in_sdk('workstation-product'), + os.path.join(tmp_dir, 'images', 'workstation-product', + 'images')) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/compatible_utils.py b/fuchsia/test/compatible_utils.py new file mode 100644 index 000000000000..b917a656d785 --- /dev/null +++ b/fuchsia/test/compatible_utils.py @@ -0,0 +1,207 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Functions used in both v1 and v2 scripts.""" + +import os +import platform +import re +import stat +import subprocess + +from typing import Iterable, List, Optional, Tuple + + +# File indicating version of an image downloaded to the host +_BUILD_ARGS = "buildargs.gn" +_ARGS_FILE = 'args.gn' + +_FILTER_DIR = 'testing/buildbot/filters' +_SSH_KEYS = os.path.expanduser('~/.ssh/fuchsia_authorized_keys') + + +class VersionNotFoundError(Exception): + """Thrown when version info cannot be retrieved from device.""" + + +def get_ssh_keys() -> str: + """Returns path of Fuchsia ssh keys.""" + + return _SSH_KEYS + + +def running_unattended() -> bool: + """Returns true if running non-interactively. + + When running unattended, confirmation prompts and the like are suppressed. + """ + + # TODO(crbug/1401387): Change to mixin based approach. + return 'SWARMING_SERVER' in os.environ + + +def get_host_arch() -> str: + """Retrieve CPU architecture of the host machine. """ + host_arch = platform.machine() + # platform.machine() returns AMD64 on 64-bit Windows. + if host_arch in ['x86_64', 'AMD64']: + return 'x64' + if host_arch in ['aarch64', 'arm64']: + return 'arm64' + raise NotImplementedError('Unsupported host architecture: %s' % host_arch) + + +def add_exec_to_file(file: str) -> None: + """Add execution bits to a file. + + Args: + file: path to the file. + """ + file_stat = os.stat(file) + os.chmod(file, file_stat.st_mode | stat.S_IXUSR) + + +def _add_exec_to_pave_binaries(system_image_dir: str): + """Add exec to required pave files. + + The pave files may vary depending if a product-bundle or a prebuilt images + directory is being used. + Args: + system_image_dir: string path to the directory containing the pave files. + """ + pb_files = [ + 'pave.sh', + os.path.join(f'host_{get_host_arch()}', 'bootserver') + ] + image_files = [ + 'pave.sh', + os.path.join(f'bootserver.exe.linux-{get_host_arch()}') + ] + use_pb_files = os.path.exists(os.path.join(system_image_dir, pb_files[1])) + for f in pb_files if use_pb_files else image_files: + add_exec_to_file(os.path.join(system_image_dir, f)) + + +def pave(image_dir: str, target_id: Optional[str])\ + -> subprocess.CompletedProcess: + """"Pave a device using the pave script inside |image_dir|.""" + _add_exec_to_pave_binaries(image_dir) + pave_command = [ + os.path.join(image_dir, 'pave.sh'), '--authorized-keys', + get_ssh_keys(), '-1' + ] + if target_id: + pave_command.extend(['-n', target_id]) + return subprocess.run(pave_command, check=True, text=True, timeout=300) + + +def parse_host_port(host_port_pair: str) -> Tuple[str, int]: + """Parses a host name or IP address and a port number from a string of + any of the following forms: + - hostname:port + - IPv4addy:port + - [IPv6addy]:port + + Returns: + A tuple of the string host name/address and integer port number. + + Raises: + ValueError if `host_port_pair` does not contain a colon or if the + substring following the last colon cannot be converted to an int. + """ + + host, port = host_port_pair.rsplit(':', 1) + + # Strip the brackets if the host looks like an IPv6 address. + if len(host) >= 4 and host[0] == '[' and host[-1] == ']': + host = host[1:-1] + return (host, int(port)) + + +def get_ssh_prefix(host_port_pair: str) -> List[str]: + """Get the prefix of a barebone ssh command.""" + + ssh_addr, ssh_port = parse_host_port(host_port_pair) + return [ + 'ssh', '-F', + os.path.expanduser('~/.fuchsia/sshconfig'), ssh_addr, '-p', + str(ssh_port) + ] + + +def install_symbols(package_paths: Iterable[str], + fuchsia_out_dir: str) -> None: + """Installs debug symbols for a package into the GDB-standard symbol + directory located in fuchsia_out_dir.""" + + symbol_root = os.path.join(fuchsia_out_dir, '.build-id') + for path in package_paths: + package_dir = os.path.dirname(path) + ids_txt_path = os.path.join(package_dir, 'ids.txt') + with open(ids_txt_path, 'r') as f: + for entry in f: + build_id, binary_relpath = entry.strip().split(' ') + binary_abspath = os.path.abspath( + os.path.join(package_dir, binary_relpath)) + symbol_dir = os.path.join(symbol_root, build_id[:2]) + symbol_file = os.path.join(symbol_dir, build_id[2:] + '.debug') + if not os.path.exists(symbol_dir): + os.makedirs(symbol_dir) + + if os.path.islink(symbol_file) or os.path.exists(symbol_file): + # Clobber the existing entry to ensure that the symlink's + # target is up to date. + os.unlink(symbol_file) + os.symlink(os.path.relpath(binary_abspath, symbol_dir), + symbol_file) + + +# TODO(crbug.com/1279803): Until one can send files to the device when running +# a test, filter files must be read from the test package. +def map_filter_file_to_package_file(filter_file: str) -> str: + """Returns the path to |filter_file| within the test component's package.""" + + if not _FILTER_DIR in filter_file: + raise ValueError('CFv2 tests only support registered filter files ' + 'present in the test package') + return '/pkg/' + filter_file[filter_file.index(_FILTER_DIR):] + + +def get_sdk_hash(system_image_dir: str) -> Tuple[str, str]: + """Read version of hash in pre-installed package directory. + Returns: + Tuple of (product, version) of image to be installed. + Raises: + VersionNotFoundError: if contents of buildargs.gn cannot be found or the + version number cannot be extracted. + """ + + # TODO(crbug.com/1261961): Stop processing buildargs.gn directly. + args_file = os.path.join(system_image_dir, _BUILD_ARGS) + if not os.path.exists(args_file): + args_file = os.path.join(system_image_dir, _ARGS_FILE) + + if not os.path.exists(args_file): + raise VersionNotFoundError( + f'Dir {system_image_dir} did not contain {_BUILD_ARGS} or ' + f'{_ARGS_FILE}') + + with open(args_file) as f: + contents = f.readlines() + if not contents: + raise VersionNotFoundError('Could not retrieve %s' % args_file) + version_key = 'build_info_version' + product_key = 'build_info_product' + info_keys = [product_key, version_key] + version_info = {} + for line in contents: + for key in info_keys: + match = re.match(r'%s = "(.*)"' % key, line) + if match: + version_info[key] = match.group(1) + if not (version_key in version_info and product_key in version_info): + raise VersionNotFoundError( + 'Could not extract version info from %s. Contents: %s' % + (args_file, contents)) + + return (version_info[product_key], version_info[version_key]) diff --git a/fuchsia/test/compatible_utils_unittests.py b/fuchsia/test/compatible_utils_unittests.py new file mode 100755 index 000000000000..02815921c2cb --- /dev/null +++ b/fuchsia/test/compatible_utils_unittests.py @@ -0,0 +1,238 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing compatible_utils.py.""" + +import io +import os +import stat +import tempfile +import unittest +import unittest.mock as mock + +import compatible_utils + + +@unittest.skipIf(os.name == 'nt', 'Fuchsia tests not supported on Windows') +class CompatibleUtilsTest(unittest.TestCase): + """Test compatible_utils.py methods.""" + + def test_running_unattended_returns_true_if_headless_set(self) -> None: + """Test |running_unattended| returns True if CHROME_HEADLESS is set.""" + with mock.patch('os.environ', {'SWARMING_SERVER': 0}): + self.assertTrue(compatible_utils.running_unattended()) + + with mock.patch('os.environ', {'FOO_HEADLESS': 0}): + self.assertFalse(compatible_utils.running_unattended()) + + def test_get_host_arch(self) -> None: + """Test |get_host_arch| gets the host architecture and throws + exceptions on errors.""" + supported_arches = ['x86_64', 'AMD64', 'aarch64'] + with mock.patch('platform.machine', side_effect=supported_arches): + self.assertEqual(compatible_utils.get_host_arch(), 'x64') + self.assertEqual(compatible_utils.get_host_arch(), 'x64') + self.assertEqual(compatible_utils.get_host_arch(), 'arm64') + + with mock.patch('platform.machine', return_value=['fake-arch']), \ + self.assertRaises(NotImplementedError): + compatible_utils.get_host_arch() + + def test_add_exec_to_file(self) -> None: + """Test |add_exec_to_file| adds executable bit to file.""" + with tempfile.NamedTemporaryFile() as f: + original_stat = os.stat(f.name).st_mode + self.assertFalse(original_stat & stat.S_IXUSR) + + compatible_utils.add_exec_to_file(f.name) + + new_stat = os.stat(f.name).st_mode + self.assertTrue(new_stat & stat.S_IXUSR) + + # pylint: disable=no-self-use + def test_pave_adds_exec_to_binary_files(self) -> None: + """Test |pave| calls |add_exec_to_file| on necessary files.""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('compatible_utils.add_exec_to_file') as mock_exec, \ + mock.patch('platform.machine', return_value='x86_64'), \ + mock.patch('subprocess.run'): + compatible_utils.pave('some/path/to/dir', 'some-target') + + mock_exec.assert_has_calls([ + mock.call('some/path/to/dir/pave.sh'), + mock.call('some/path/to/dir/host_x64/bootserver') + ], + any_order=True) + + def test_pave_adds_exec_to_binary_files_if_pb_set_not_found(self) -> None: + """Test |pave| calls |add_exec_to_file| on necessary files. + + Checks if current product-bundle files exist. If not, defaults to + prebuilt-images set. + """ + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('compatible_utils.add_exec_to_file') as mock_exec, \ + mock.patch('platform.machine', return_value='x86_64'), \ + mock.patch('subprocess.run'): + compatible_utils.pave('some/path/to/dir', 'some-target') + + mock_exec.assert_has_calls([ + mock.call('some/path/to/dir/pave.sh'), + mock.call('some/path/to/dir/bootserver.exe.linux-x64') + ], + any_order=True) + + def test_pave_adds_target_id_if_given(self) -> None: + """Test |pave| adds target-id to the arguments.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('compatible_utils.add_exec_to_file'), \ + mock.patch('platform.machine', return_value='x86_64'), \ + mock.patch('compatible_utils.get_ssh_keys', + return_value='authorized-keys-file'), \ + mock.patch('subprocess.run') as mock_subproc: + mock_subproc.reset_mock() + compatible_utils.pave('some/path/to/dir', 'some-target') + + mock_subproc.assert_called_once_with([ + 'some/path/to/dir/pave.sh', '--authorized-keys', + 'authorized-keys-file', '-1', '-n', 'some-target' + ], + check=True, + text=True, + timeout=300) + + # pylint: disable=no-self-use + + def test_parse_host_port_splits_address_and_strips_brackets(self) -> None: + """Test |parse_host_port| splits ipv4 and ipv6 addresses correctly.""" + self.assertEqual(compatible_utils.parse_host_port('hostname:55'), + ('hostname', 55)) + self.assertEqual(compatible_utils.parse_host_port('192.168.42.40:443'), + ('192.168.42.40', 443)) + self.assertEqual( + compatible_utils.parse_host_port('[2001:db8::1]:8080'), + ('2001:db8::1', 8080)) + + def test_map_filter_filter_file_throws_value_error_if_wrong_path(self + ) -> None: + """Test |map_filter_file| throws ValueError if path is missing + FILTER_DIR.""" + with self.assertRaises(ValueError): + compatible_utils.map_filter_file_to_package_file('foo') + + with self.assertRaises(ValueError): + compatible_utils.map_filter_file_to_package_file('some/other/path') + + with self.assertRaises(ValueError): + compatible_utils.map_filter_file_to_package_file('filters/file') + + # No error. + compatible_utils.map_filter_file_to_package_file( + 'testing/buildbot/filters/some.filter') + + def test_map_filter_filter_replaces_filter_dir_with_pkg_path(self) -> None: + """Test |map_filter_file| throws ValueError if path is missing + FILTER_DIR.""" + self.assertEqual( + '/pkg/testing/buildbot/filters/some.filter', + compatible_utils.map_filter_file_to_package_file( + 'foo/testing/buildbot/filters/some.filter')) + + def test_get_sdk_hash_fallsback_to_args_file_if_buildargs_dne(self + ) -> None: + """Test |get_sdk_hash| checks if buildargs.gn exists. + + If it does not, fallsback to args.gn. This should raise an exception + as it does not exist. + """ + with mock.patch('os.path.exists', return_value=False) as mock_exists, \ + self.assertRaises(compatible_utils.VersionNotFoundError): + compatible_utils.get_sdk_hash('some/image/dir') + mock_exists.assert_has_calls([ + mock.call('some/image/dir/buildargs.gn'), + mock.call('some/image/dir/args.gn') + ]) + + def test_get_sdk_hash_parse_contents_of_args_file(self) -> None: + """Test |get_sdk_hash| parses buildargs contents correctly.""" + build_args_test_contents = """ +build_info_board = "chromebook-x64" +build_info_product = "workstation_eng" +build_info_version = "10.20221114.2.1" +universe_package_labels += [] +""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('builtins.open', + return_value=io.StringIO(build_args_test_contents)): + self.assertEqual(compatible_utils.get_sdk_hash('some/dir'), + ('workstation_eng', '10.20221114.2.1')) + + def test_get_sdk_hash_raises_error_if_keys_missing(self) -> None: + """Test |get_sdk_hash| raises VersionNotFoundError if missing keys""" + build_args_test_contents = """ +import("//boards/chromebook-x64.gni") +import("//products/workstation_eng.gni") +cxx_rbe_enable = true +host_labels += [ "//bundles/infra/build" ] +universe_package_labels += [] +""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch( + 'builtins.open', + return_value=io.StringIO(build_args_test_contents)), \ + self.assertRaises(compatible_utils.VersionNotFoundError): + compatible_utils.get_sdk_hash('some/dir') + + def test_get_sdk_hash_raises_error_if_contents_empty(self) -> None: + """Test |get_sdk_hash| raises VersionNotFoundError if no contents.""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('builtins.open', return_value=io.StringIO("")), \ + self.assertRaises(compatible_utils.VersionNotFoundError): + compatible_utils.get_sdk_hash('some/dir') + + def trim_noop_prefixes(self, path): + """Helper function to trim no-op path name prefixes that are + introduced by os.path.realpath on some platforms. These break + the unit tests, but have no actual effect on behavior.""" + # These must all end in the path separator character for the + # string length computation to be correct on all platforms. + noop_prefixes = ['/private/'] + for prefix in noop_prefixes: + if path.startswith(prefix): + return path[len(prefix) - 1:] + return path + + def test_install_symbols(self): + + """Test |install_symbols|.""" + + with tempfile.TemporaryDirectory() as fuchsia_out_dir: + build_id = 'test_build_id' + symbol_file = os.path.join(fuchsia_out_dir, '.build-id', + build_id[:2], build_id[2:] + '.debug') + id_path = os.path.join(fuchsia_out_dir, 'ids.txt') + try: + binary_relpath = 'path/to/binary' + with open(id_path, 'w') as f: + f.write(f'{build_id} {binary_relpath}') + compatible_utils.install_symbols([id_path], fuchsia_out_dir) + self.assertTrue(os.path.islink(symbol_file)) + self.assertEqual( + self.trim_noop_prefixes(os.path.realpath(symbol_file)), + os.path.join(fuchsia_out_dir, binary_relpath)) + + new_binary_relpath = 'path/to/new/binary' + with open(id_path, 'w') as f: + f.write(f'{build_id} {new_binary_relpath}') + compatible_utils.install_symbols([id_path], fuchsia_out_dir) + self.assertTrue(os.path.islink(symbol_file)) + self.assertEqual( + self.trim_noop_prefixes(os.path.realpath(symbol_file)), + os.path.join(fuchsia_out_dir, new_binary_relpath)) + finally: + os.remove(id_path) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/coveragetest.py b/fuchsia/test/coveragetest.py new file mode 100755 index 000000000000..3a82e53c2902 --- /dev/null +++ b/fuchsia/test/coveragetest.py @@ -0,0 +1,59 @@ +#!/usr/bin/env vpython3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Ensure files in the directory are thoroughly tested.""" + +import importlib +import io +import os +import sys +import unittest + +import coverage # pylint: disable=import-error + +# The files need to have sufficient coverages. +COVERED_FILES = [ + 'compatible_utils.py', 'deploy_to_fuchsia.py', 'flash_device.py', + 'log_manager.py', 'publish_package.py', 'serve_repo.py', 'test_server.py' +] + +# The files will be tested without coverage requirements. +TESTED_FILES = ['common.py', 'ffx_emulator.py'] + + +def main(): + """Gather coverage data, ensure included files are 100% covered.""" + + # Fuchsia tests not supported on Windows + if os.name == 'nt': + return 0 + + cov = coverage.coverage(data_file=None, + include=COVERED_FILES, + config_file=True) + cov.start() + + for file in COVERED_FILES + TESTED_FILES: + print('Testing ' + file + ' ...') + # pylint: disable=import-outside-toplevel + # import tests after coverage start to also cover definition lines. + module = importlib.import_module(file.replace('.py', '_unittests')) + # pylint: enable=import-outside-toplevel + + tests = unittest.TestLoader().loadTestsFromModule(module) + if not unittest.TextTestRunner().run(tests).wasSuccessful(): + return 1 + + cov.stop() + outf = io.StringIO() + percentage = cov.report(file=outf, show_missing=True) + if int(percentage) != 100: + print(outf.getvalue()) + print('FATAL: Insufficient coverage (%.f%%)' % int(percentage)) + return 1 + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/deploy_to_fuchsia.py b/fuchsia/test/deploy_to_fuchsia.py new file mode 100755 index 000000000000..41b92aac07d6 --- /dev/null +++ b/fuchsia/test/deploy_to_fuchsia.py @@ -0,0 +1,44 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""A script for deploying Chrome binaries to a Fuchsia checkout.""" + +import argparse +import os +import sys + +from common import read_package_paths, register_common_args +from compatible_utils import install_symbols +from publish_package import publish_packages + + +def register_fuchsia_args(parser: argparse.ArgumentParser) -> None: + """Register common arguments for deploying to Fuchsia.""" + + fuchsia_args = parser.add_argument_group( + 'fuchsia', 'Arguments for working with Fuchsia checkout.') + fuchsia_args.add_argument('--fuchsia-out-dir', + help='Path to output directory of a local ' + 'Fuchsia checkout.') + + +def main(): + """Stand-alone program for deploying to the output directory of a local + Fuchsia checkout.""" + + parser = argparse.ArgumentParser() + parser.add_argument('package', help='The package to deploy to Fuchsia.') + register_common_args(parser) + register_fuchsia_args(parser) + args = parser.parse_args() + + fuchsia_out_dir = os.path.expanduser(args.fuchsia_out_dir) + package_paths = read_package_paths(args.out_dir, args.package) + publish_packages(package_paths, os.path.join(fuchsia_out_dir, + 'amber-files')) + install_symbols(package_paths, fuchsia_out_dir) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/deploy_to_fuchsia_unittests.py b/fuchsia/test/deploy_to_fuchsia_unittests.py new file mode 100755 index 000000000000..7635b46297ca --- /dev/null +++ b/fuchsia/test/deploy_to_fuchsia_unittests.py @@ -0,0 +1,38 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing deploy_to_fuchsia.py.""" + +import os +import unittest +import unittest.mock as mock + +import deploy_to_fuchsia + + +class DeployToFuchsiaTest(unittest.TestCase): + """Unittests for deploy_to_fuchsia.py.""" + + @mock.patch('deploy_to_fuchsia.read_package_paths', return_value=[]) + @mock.patch('deploy_to_fuchsia.publish_packages') + @mock.patch('deploy_to_fuchsia.install_symbols') + def test_main(self, mock_install, mock_publish, mock_read) -> None: + """Tests |main|.""" + + test_package = 'test_package' + fuchsia_out_dir = 'out/fuchsia' + with mock.patch('sys.argv', [ + 'deploy_to_fuchsia.py', test_package, '-C', 'out/chromium', + '--fuchsia-out-dir', fuchsia_out_dir + ]): + deploy_to_fuchsia.main() + self.assertEqual(mock_read.call_args_list[0][0][1], test_package) + self.assertEqual(mock_publish.call_args_list[0][0][1], + os.path.join(fuchsia_out_dir, 'amber-files')) + self.assertEqual(mock_install.call_args_list[0][0][1], + fuchsia_out_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/ermine_ctl.py b/fuchsia/test/ermine_ctl.py new file mode 100644 index 000000000000..66253891a2b6 --- /dev/null +++ b/fuchsia/test/ermine_ctl.py @@ -0,0 +1,25 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Defines erminctl interface compatible with modern scripts.""" + +import subprocess +from typing import List + +from compatible_utils import get_ssh_prefix +from common import get_ssh_address +import base_ermine_ctl + + +class ErmineCtl(base_ermine_ctl.BaseErmineCtl): + """ErmineCtl adaptation for modern scripts.""" + + def __init__(self, target_id: str): + super().__init__() + self._ssh_prefix = get_ssh_prefix(get_ssh_address(target_id)) + + def execute_command_async(self, args: List[str]) -> subprocess.Popen: + return subprocess.Popen(self._ssh_prefix + args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + encoding='utf-8') diff --git a/fuchsia/test/ffx_emulator.py b/fuchsia/test/ffx_emulator.py new file mode 100644 index 000000000000..be473ccb920d --- /dev/null +++ b/fuchsia/test/ffx_emulator.py @@ -0,0 +1,162 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provide helpers for running Fuchsia's `ffx emu`.""" + +import argparse +import ast +import logging +import os +import json +import random +import subprocess + +from contextlib import AbstractContextManager + +from common import check_ssh_config_file, find_image_in_sdk, get_system_info, \ + run_ffx_command, SDK_ROOT +from compatible_utils import get_host_arch, get_sdk_hash + +_EMU_COMMAND_RETRIES = 3 + + +class FfxEmulator(AbstractContextManager): + """A helper for managing emulators.""" + def __init__(self, args: argparse.Namespace) -> None: + if args.product_bundle: + self._product_bundle = args.product_bundle + else: + self._product_bundle = 'terminal.qemu-' + get_host_arch() + + self._enable_graphics = args.enable_graphics + self._hardware_gpu = args.hardware_gpu + self._logs_dir = args.logs_dir + self._with_network = args.with_network + if args.everlasting: + # Do not change the name, it will break the logic. + # ffx has a prefix-matching logic, so 'fuchsia-emulator' is not + # usable to avoid breaking local development workflow. I.e. + # developers can create an everlasting emulator and an ephemeral one + # without interfering each other. + self._node_name = 'fuchsia-everlasting-emulator' + assert self._everlasting() + else: + self._node_name = 'fuchsia-emulator-' + str(random.randint( + 1, 9999)) + + # Set the download path parallel to Fuchsia SDK directory + # permanently so that scripts can always find the product bundles. + run_ffx_command(('config', 'set', 'pbms.storage.path', + os.path.join(SDK_ROOT, os.pardir, 'images'))) + + def _everlasting(self) -> bool: + return self._node_name == 'fuchsia-everlasting-emulator' + + def _start_emulator(self) -> None: + """Start the emulator.""" + logging.info('Starting emulator %s', self._node_name) + check_ssh_config_file() + emu_command = [ + 'emu', 'start', self._product_bundle, '--name', self._node_name + ] + if not self._enable_graphics: + emu_command.append('-H') + if self._hardware_gpu: + emu_command.append('--gpu') + if self._logs_dir: + emu_command.extend( + ('-l', os.path.join(self._logs_dir, 'emulator_log'))) + if self._with_network: + emu_command.extend(('--net', 'tap')) + + # TODO(https://crbug.com/1336776): remove when ffx has native support + # for starting emulator on arm64 host. + if get_host_arch() == 'arm64': + + arm64_qemu_dir = os.path.join(SDK_ROOT, 'tools', 'arm64', + 'qemu_internal') + + # The arm64 emulator binaries are downloaded separately, so add + # a symlink to the expected location inside the SDK. + if not os.path.isdir(arm64_qemu_dir): + os.symlink( + os.path.join(SDK_ROOT, '..', '..', 'qemu-linux-arm64'), + arm64_qemu_dir) + + # Add the arm64 emulator binaries to the SDK's manifest.json file. + sdk_manifest = os.path.join(SDK_ROOT, 'meta', 'manifest.json') + with open(sdk_manifest, 'r+') as f: + data = json.load(f) + for part in data['parts']: + if part['meta'] == 'tools/x64/qemu_internal-meta.json': + part['meta'] = 'tools/arm64/qemu_internal-meta.json' + break + f.seek(0) + json.dump(data, f) + f.truncate() + + # Generate a meta file for the arm64 emulator binaries using its + # x64 counterpart. + qemu_arm64_meta_file = os.path.join(SDK_ROOT, 'tools', 'arm64', + 'qemu_internal-meta.json') + qemu_x64_meta_file = os.path.join(SDK_ROOT, 'tools', 'x64', + 'qemu_internal-meta.json') + with open(qemu_x64_meta_file) as f: + data = str(json.load(f)) + qemu_arm64_meta = data.replace(r'tools/x64', 'tools/arm64') + with open(qemu_arm64_meta_file, "w+") as f: + json.dump(ast.literal_eval(qemu_arm64_meta), f) + emu_command.extend(['--engine', 'qemu']) + + for i in range(_EMU_COMMAND_RETRIES): + + # If the ffx daemon fails to establish a connection with + # the emulator after 85 seconds, that means the emulator + # failed to be brought up and a retry is needed. + # TODO(fxb/103540): Remove retry when start up issue is fixed. + try: + # TODO(fxb/125872): Debug is added for examining flakiness. + configs = ['emu.start.timeout=90'] + if i > 0: + logging.warning( + 'Emulator failed to start. Turning on debug') + configs.append('log.level=debug') + run_ffx_command(emu_command, timeout=85, configs=configs) + break + except (subprocess.TimeoutExpired, subprocess.CalledProcessError): + run_ffx_command(('emu', 'stop')) + + def _shutdown_emulator(self) -> None: + """Shutdown the emulator.""" + + logging.info('Stopping the emulator %s', self._node_name) + # The emulator might have shut down unexpectedly, so this command + # might fail. + run_ffx_command(('emu', 'stop', self._node_name), check=False) + + def __enter__(self) -> str: + """Start the emulator if necessary. + + Returns: + The node name of the emulator. + """ + + if self._everlasting(): + sdk_hash = get_sdk_hash(find_image_in_sdk(self._product_bundle)) + sys_info = get_system_info(self._node_name) + if sdk_hash == sys_info: + return self._node_name + logging.info( + ('The emulator version [%s] does not match the SDK [%s], ' + 'updating...'), sys_info, sdk_hash) + + self._start_emulator() + return self._node_name + + def __exit__(self, exc_type, exc_value, traceback) -> bool: + """Shutdown the emulator if necessary.""" + + if not self._everlasting(): + self._shutdown_emulator() + # Do not suppress exceptions. + return False diff --git a/fuchsia/test/ffx_emulator_unittests.py b/fuchsia/test/ffx_emulator_unittests.py new file mode 100755 index 000000000000..e12f13aa9b83 --- /dev/null +++ b/fuchsia/test/ffx_emulator_unittests.py @@ -0,0 +1,49 @@ +#!/usr/bin/env vpython3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing ffx_emulator.py.""" + +import argparse +import unittest + +from ffx_emulator import FfxEmulator + + +class FfxEmulatorTest(unittest.TestCase): + """Unittests for ffx_emulator.py""" + def test_use_fixed_node_name(self) -> None: + """FfxEmulator should use a fixed node name.""" + # Allowing the test case to access FfxEmulator._node_name directly. + # pylint: disable=protected-access + self.assertEqual( + FfxEmulator( + argparse.Namespace( + **{ + 'product_bundle': None, + 'enable_graphics': False, + 'hardware_gpu': False, + 'logs_dir': '.', + 'with_network': False, + 'everlasting': True + }))._node_name, 'fuchsia-everlasting-emulator') + + def test_use_random_node_name(self) -> None: + """FfxEmulator should not use a fixed node name.""" + # Allowing the test case to access FfxEmulator._node_name directly. + # pylint: disable=protected-access + self.assertNotEqual( + FfxEmulator( + argparse.Namespace( + **{ + 'product_bundle': None, + 'enable_graphics': False, + 'hardware_gpu': False, + 'logs_dir': '.', + 'with_network': False, + 'everlasting': False + }))._node_name, 'fuchsia-everlasting-emulator') + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/ffx_integration.py b/fuchsia/test/ffx_integration.py new file mode 100644 index 000000000000..9385e93f9fea --- /dev/null +++ b/fuchsia/test/ffx_integration.py @@ -0,0 +1,236 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provide helpers for running Fuchsia's `ffx`.""" + +import logging +import os +import json +import subprocess +import sys +import tempfile + +from contextlib import AbstractContextManager +from typing import IO, Iterable, List, Optional + +from common import run_continuous_ffx_command, run_ffx_command, SDK_ROOT + +RUN_SUMMARY_SCHEMA = \ + 'https://fuchsia.dev/schema/ffx_test/run_summary-8d1dd964.json' + + +def get_config(name: str) -> Optional[str]: + """Run a ffx config get command to retrieve the config value.""" + + try: + return run_ffx_command(['config', 'get', name], + capture_output=True).stdout.strip() + except subprocess.CalledProcessError as cpe: + # A return code of 2 indicates no previous value set. + if cpe.returncode == 2: + return None + raise + + +class ScopedFfxConfig(AbstractContextManager): + """Temporarily overrides `ffx` configuration. Restores the previous value + upon exit.""" + + def __init__(self, name: str, value: str) -> None: + """ + Args: + name: The name of the property to set. + value: The value to associate with `name`. + """ + self._old_value = None + self._new_value = value + self._name = name + + def __enter__(self): + """Override the configuration.""" + + # Cache the old value. + self._old_value = get_config(self._name) + if self._new_value != self._old_value: + run_ffx_command(['config', 'set', self._name, self._new_value]) + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> bool: + if self._new_value == self._old_value: + return False + + # Allow removal of config to fail. + remove_cmd = run_ffx_command(['config', 'remove', self._name], + check=False) + if remove_cmd.returncode != 0: + logging.warning('Error when removing ffx config %s', self._name) + + # Explicitly set the value back only if removing the new value doesn't + # already restore the old value. + if self._old_value is not None and \ + self._old_value != get_config(self._name): + run_ffx_command(['config', 'set', self._name, self._old_value]) + + # Do not suppress exceptions. + return False + + +def test_connection(target_id: Optional[str]) -> None: + """Run an echo test to verify that the device can be connected to.""" + + run_ffx_command(('target', 'echo'), target_id) + + +class FfxTestRunner(AbstractContextManager): + """A context manager that manages a session for running a test via `ffx`. + + Upon entry, an instance of this class configures `ffx` to retrieve files + generated by a test and prepares a directory to hold these files either in a + specified directory or in tmp. On exit, any previous configuration of + `ffx` is restored and the temporary directory, if used, is deleted. + + The prepared directory is used when invoking `ffx test run`. + """ + + def __init__(self, results_dir: Optional[str] = None) -> None: + """ + Args: + results_dir: Directory on the host where results should be stored. + """ + self._results_dir = results_dir + self._custom_artifact_directory = None + self._temp_results_dir = None + self._debug_data_directory = None + + def __enter__(self): + if self._results_dir: + os.makedirs(self._results_dir, exist_ok=True) + else: + self._temp_results_dir = tempfile.TemporaryDirectory() + self._results_dir = self._temp_results_dir.__enter__() + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> bool: + if self._temp_results_dir: + self._temp_results_dir.__exit__(exc_type, exc_val, exc_tb) + self._temp_results_dir = None + + # Do not suppress exceptions. + return False + + def run_test(self, + component_uri: str, + test_args: Optional[Iterable[str]] = None, + node_name: Optional[str] = None) -> subprocess.Popen: + """Starts a subprocess to run a test on a target. + Args: + component_uri: The test component URI. + test_args: Arguments to the test package, if any. + node_name: The target on which to run the test. + Returns: + A subprocess.Popen object. + """ + command = [ + 'test', 'run', '--output-directory', self._results_dir, + component_uri + ] + if test_args: + command.append('--') + command.extend(test_args) + return run_continuous_ffx_command(command, + node_name, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + def _parse_test_outputs(self): + """Parses the output files generated by the test runner. + + The instance's `_custom_artifact_directory` member is set to the + directory holding output files emitted by the test. + + This function is idempotent, and performs no work if it has already been + called. + """ + if self._custom_artifact_directory: + return + + run_summary_path = os.path.join(self._results_dir, 'run_summary.json') + try: + with open(run_summary_path) as run_summary_file: + run_summary = json.load(run_summary_file) + except IOError: + logging.exception('Error reading run summary file.') + return + except ValueError: + logging.exception('Error parsing run summary file %s', + run_summary_path) + return + + assert run_summary['schema_id'] == RUN_SUMMARY_SCHEMA, \ + 'Unsupported version found in %s' % run_summary_path + + run_artifact_dir = run_summary.get('data', {})['artifact_dir'] + for artifact_path, artifact in run_summary.get( + 'data', {})['artifacts'].items(): + if artifact['artifact_type'] == 'DEBUG': + self._debug_data_directory = os.path.join( + self._results_dir, run_artifact_dir, artifact_path) + break + + if run_summary['data']['outcome'] == "NOT_STARTED": + logging.critical('Test execution was interrupted. Either the ' + 'emulator crashed while the tests were still ' + 'running or connection to the device was lost.') + sys.exit(1) + + # There should be precisely one suite for the test that ran. + suites_list = run_summary.get('data', {}).get('suites') + if not suites_list: + logging.error('Missing or empty list of suites in %s', + run_summary_path) + return + suite_summary = suites_list[0] + + # Get the top-level directory holding all artifacts for this suite. + artifact_dir = suite_summary.get('artifact_dir') + if not artifact_dir: + logging.error('Failed to find suite\'s artifact_dir in %s', + run_summary_path) + return + + # Get the path corresponding to artifacts + for artifact_path, artifact in suite_summary['artifacts'].items(): + if artifact['artifact_type'] == 'CUSTOM': + self._custom_artifact_directory = os.path.join( + self._results_dir, artifact_dir, artifact_path) + break + + def get_custom_artifact_directory(self) -> str: + """Returns the full path to the directory holding custom artifacts + emitted by the test or None if the directory could not be discovered. + """ + self._parse_test_outputs() + return self._custom_artifact_directory + + def get_debug_data_directory(self): + """Returns the full path to the directory holding debug data + emitted by the test, or None if the path cannot be determined. + """ + self._parse_test_outputs() + return self._debug_data_directory + + +def run_symbolizer(symbol_paths: List[str], input_fd: IO, + output_fd: IO) -> subprocess.Popen: + """Runs symbolizer that symbolizes |input| and outputs to |output|.""" + + symbolize_cmd = ([ + 'debug', 'symbolize', '--', '--omit-module-lines', '--build-id-dir', + os.path.join(SDK_ROOT, '.build-id') + ]) + for path in symbol_paths: + symbolize_cmd.extend(['--ids-txt', path]) + return run_continuous_ffx_command(symbolize_cmd, + stdin=input_fd, + stdout=output_fd, + stderr=subprocess.STDOUT) diff --git a/fuchsia/test/flash_device.py b/fuchsia/test/flash_device.py new file mode 100755 index 000000000000..291f6e90fb8e --- /dev/null +++ b/fuchsia/test/flash_device.py @@ -0,0 +1,243 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for flashing a Fuchsia device.""" + +import argparse +import logging +import os +import subprocess +import sys +import time + +from typing import Optional, Tuple + +import common +from common import BootMode, boot_device, check_ssh_config_file, \ + get_system_info, find_image_in_sdk, register_device_args +from compatible_utils import get_sdk_hash, get_ssh_keys, pave, \ + running_unattended, add_exec_to_file, get_host_arch +from lockfile import lock + +# Flash-file lock. Used to restrict number of flash operations per host. +# File lock should be marked as stale after 15 mins. +_FF_LOCK = os.path.join('/tmp', 'flash.lock') +_FF_LOCK_STALE_SECS = 60 * 15 +_FF_LOCK_ACQ_TIMEOUT = _FF_LOCK_STALE_SECS + + +def _get_system_info(target: Optional[str], + serial_num: Optional[str]) -> Tuple[str, str]: + """Retrieves installed OS version from device. + + Args: + target: Target to get system info of. + serial_num: Serial number of device to get system info of. + Returns: + Tuple of strings, containing (product, version number). + """ + + # TODO(b/242191374): Remove when devices in swarming are no longer booted + # into zedboot. + if running_unattended(): + try: + boot_device(target, BootMode.REGULAR, serial_num) + except (subprocess.CalledProcessError, common.StateTransitionError): + logging.warning('Could not boot device. Assuming in ZEDBOOT') + return ('', '') + wait_cmd = common.run_ffx_command(('target', 'wait', '-t', '180'), + target, + check=False) + if wait_cmd.returncode != 0: + return ('', '') + + return get_system_info(target) + + +def update_required( + os_check, + system_image_dir: Optional[str], + target: Optional[str], + serial_num: Optional[str] = None) -> Tuple[bool, Optional[str]]: + """Returns True if a system update is required and path to image dir.""" + + if os_check == 'ignore': + return False, system_image_dir + if not system_image_dir: + raise ValueError('System image directory must be specified.') + if not os.path.exists(system_image_dir): + logging.warning( + 'System image directory does not exist. Assuming it\'s ' + 'a product-bundle name and dynamically searching for ' + 'image directory') + path = find_image_in_sdk(system_image_dir) + if not path: + raise FileNotFoundError( + f'System image directory {system_image_dir} could not' + 'be found') + system_image_dir = path + if (os_check == 'check' + and get_sdk_hash(system_image_dir) == _get_system_info( + target, serial_num)): + return False, system_image_dir + return True, system_image_dir + + +def _add_exec_to_flash_binaries(system_image_dir: str) -> None: + """Add exec to required flash files. + + The flash files may vary depending if a product-bundle or a prebuilt images + directory is being used. + Args: + system_image_dir: string path to the directory containing the flash files. + """ + pb_files = [ + 'flash.sh', + os.path.join(f'host_{get_host_arch()}', 'fastboot') + ] + image_files = ['flash.sh', f'fastboot.exe.linux-{get_host_arch()}'] + use_pb_files = os.path.exists(os.path.join(system_image_dir, pb_files[1])) + for f in pb_files if use_pb_files else image_files: + add_exec_to_file(os.path.join(system_image_dir, f)) + + +def _run_flash_command(system_image_dir: str, target_id: Optional[str]): + """Helper function for running `ffx target flash`.""" + + _add_exec_to_flash_binaries(system_image_dir) + # TODO(fxb/91843): Remove workaround when ffx has stable support for + # multiple hardware devices connected via USB. + if running_unattended(): + flash_cmd = [ + os.path.join(system_image_dir, 'flash.sh'), + '--ssh-key=%s' % get_ssh_keys() + ] + # Target ID could be the nodename or the Serial number. + if target_id: + flash_cmd.extend(('-s', target_id)) + subprocess.run(flash_cmd, check=True, timeout=240) + return + + manifest = os.path.join(system_image_dir, 'flash-manifest.manifest') + common.run_ffx_command( + ('target', 'flash', manifest, '--no-bootloader-reboot'), + target_id=target_id, + configs=[ + 'fastboot.usb.disabled=true', 'ffx.fastboot.inline_target=true', + 'fastboot.reboot.reconnect_timeout=120' + ]) + + +def flash(system_image_dir: str, + target: Optional[str], + serial_num: Optional[str] = None) -> None: + """Flash the device.""" + # Flash only with a file lock acquired. + # This prevents multiple fastboot binaries from flashing concurrently, + # which should increase the odds of flashing success. + with lock(_FF_LOCK, timeout=_FF_LOCK_ACQ_TIMEOUT): + if serial_num: + boot_device(target, BootMode.BOOTLOADER, serial_num) + for _ in range(10): + time.sleep(10) + if common.run_ffx_command(('target', 'list', serial_num), + check=False).returncode == 0: + break + _run_flash_command(system_image_dir, serial_num) + else: + _run_flash_command(system_image_dir, target) + + +def update(system_image_dir: str, + os_check: str, + target: Optional[str], + serial_num: Optional[str] = None, + should_pave: Optional[bool] = True) -> None: + """Conditionally updates target given. + + Args: + system_image_dir: string, path to image directory. + os_check: , which decides how to update the device. + target: Node-name string indicating device that should be updated. + serial_num: String of serial number of device that should be updated. + should_pave: Optional bool on whether or not to pave or flash. + """ + needs_update, actual_image_dir = update_required(os_check, + system_image_dir, target, + serial_num) + + system_image_dir = actual_image_dir + if needs_update: + check_ssh_config_file() + if should_pave: + if running_unattended(): + assert target, ('Target ID must be specified on swarming when' + ' paving.') + # TODO(crbug.com/1405525): We should check the device state + # before and after rebooting it to avoid unnecessary reboot or + # undesired state. + boot_device(target, BootMode.RECOVERY, serial_num) + try: + pave(system_image_dir, target) + except subprocess.TimeoutExpired: + # Fallback to flashing, just in case it might work. + # This could recover the device and make it usable. + # If it fails, device is unpaveable anyway, and should be taken + # out of fleet - this will do that. + flash(system_image_dir, target, serial_num) + else: + flash(system_image_dir, target, serial_num) + # Always sleep after all updates. + time.sleep(180) + + +def register_update_args(arg_parser: argparse.ArgumentParser, + default_os_check: Optional[str] = 'check', + default_pave: Optional[bool] = True) -> None: + """Register common arguments for device updating.""" + serve_args = arg_parser.add_argument_group('update', + 'device updating arguments') + serve_args.add_argument('--system-image-dir', + help='Specify the directory that contains the ' + 'Fuchsia image used to pave the device. Only ' + 'needs to be specified if "os_check" is not ' + '"ignore".') + serve_args.add_argument('--serial-num', + default=os.environ.get('FUCHSIA_FASTBOOT_SERNUM'), + help='Serial number of the device. Should be ' + 'specified for devices that do not have an image ' + 'flashed.') + serve_args.add_argument('--os-check', + choices=['check', 'update', 'ignore'], + default=default_os_check, + help='Sets the OS version enforcement policy. If ' + '"check", then the deployment process will halt ' + 'if the target\'s version does not match. If ' + '"update", then the target device will ' + 'be reflashed. If "ignore", then the OS version ' + 'will not be checked.') + serve_args.add_argument('--pave', + action='store_true', + help='Performs a pave instead of a flash. ' + 'Device must already be in Zedboot') + serve_args.add_argument('--no-pave', + action='store_false', + dest='pave', + help='Performs a flash instead of a pave ' + '(experimental).') + serve_args.set_defaults(pave=default_pave) + + +def main(): + """Stand-alone function for flashing a device.""" + parser = argparse.ArgumentParser() + register_device_args(parser) + register_update_args(parser, default_os_check='update', default_pave=False) + args = parser.parse_args() + update(args.system_image_dir, args.os_check, args.target_id, + args.serial_num, args.pave) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/flash_device_unittests.py b/fuchsia/test/flash_device_unittests.py new file mode 100755 index 000000000000..0233ba9660d3 --- /dev/null +++ b/fuchsia/test/flash_device_unittests.py @@ -0,0 +1,349 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing flash_device.py.""" + +import os +import subprocess +import unittest +import unittest.mock as mock + +import common +import flash_device + +_TEST_IMAGE_DIR = 'test/image/dir' +_TEST_PRODUCT = 'test_product' +_TEST_VERSION = 'test.version' + + +# pylint: disable=too-many-public-methods,protected-access +class FlashDeviceTest(unittest.TestCase): + """Unittests for flash_device.py.""" + + def setUp(self) -> None: + context_mock = mock.Mock() + context_mock.__enter__ = mock.Mock(return_value=None) + context_mock.__exit__ = mock.Mock(return_value=None) + ffx_mock = mock.Mock() + ffx_mock.returncode = 0 + ffx_patcher = mock.patch('common.run_ffx_command', + return_value=ffx_mock) + sdk_hash_patcher = mock.patch('flash_device.get_sdk_hash', + return_value=(_TEST_PRODUCT, + _TEST_VERSION)) + swarming_patcher = mock.patch('flash_device.running_unattended', + return_value=False) + check_patcher = mock.patch('flash_device.check_ssh_config_file') + time_sleep = mock.patch('time.sleep') + self._ffx_mock = ffx_patcher.start() + self._sdk_hash_mock = sdk_hash_patcher.start() + self._check_patcher_mock = check_patcher.start() + self._swarming_mock = swarming_patcher.start() + self._time_sleep = time_sleep.start() + self.addCleanup(self._ffx_mock.stop) + self.addCleanup(self._sdk_hash_mock.stop) + self.addCleanup(self._check_patcher_mock.stop) + self.addCleanup(self._swarming_mock.stop) + self.addCleanup(self._time_sleep.stop) + + def test_update_required_on_ignore_returns_immediately(self) -> None: + """Test |os_check|='ignore' skips all checks.""" + result, new_image_dir = flash_device.update_required( + 'ignore', 'some-image-dir', None) + + self.assertFalse(result) + self.assertEqual(new_image_dir, 'some-image-dir') + + def test_update_required_raises_value_error_if_no_image_dir(self) -> None: + """Test |os_check|!='ignore' checks that image dir is non-Falsey.""" + with self.assertRaises(ValueError): + flash_device.update_required('update', None, None) + + def test_update_required_logs_missing_image_dir(self) -> None: + """Test |os_check|!='ignore' warns if image dir does not exist.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('flash_device.find_image_in_sdk'), \ + mock.patch('flash_device._get_system_info'), \ + self.assertLogs() as logger: + flash_device.update_required('update', 'some/image/dir', None) + self.assertIn('image directory does not exist', logger.output[0]) + + def test_update_required_searches_and_returns_sdk_if_image_found(self + ) -> None: + """Test |os_check|!='ignore' searches for image dir in SDK.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('flash_device.find_image_in_sdk') as mock_find, \ + mock.patch('flash_device._get_system_info'), \ + mock.patch('common.SDK_ROOT', 'path/to/sdk/dir'), \ + self.assertLogs(): + mock_find.return_value = 'path/to/image/dir' + update_required, new_image_dir = flash_device.update_required( + 'update', 'product-bundle', None, None) + self.assertTrue(update_required) + self.assertEqual(new_image_dir, 'path/to/image/dir') + mock_find.assert_called_once_with('product-bundle') + + def test_update_required_raises_file_not_found_error(self) -> None: + """Test |os_check|!='ignore' raises FileNotFoundError if no path.""" + with mock.patch('os.path.exists', return_value=False), \ + mock.patch('flash_device.find_image_in_sdk', + return_value=None), \ + mock.patch('common.SDK_ROOT', 'path/to/sdk/dir'), \ + self.assertLogs(), \ + self.assertRaises(FileNotFoundError): + flash_device.update_required('update', 'product-bundle', None) + + def test_update_ignore(self) -> None: + """Test setting |os_check| to 'ignore'.""" + + flash_device.update(_TEST_IMAGE_DIR, 'ignore', None) + self.assertEqual(self._ffx_mock.call_count, 0) + self.assertEqual(self._sdk_hash_mock.call_count, 0) + + def test_dir_unspecified_value_error(self) -> None: + """Test ValueError raised when system_image_dir unspecified.""" + + with self.assertRaises(ValueError): + flash_device.update(None, 'check', None) + + def test_update_system_info_match(self) -> None: + """Test no update when |os_check| is 'check' and system info matches.""" + + with mock.patch('os.path.exists', return_value=True): + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "%s"}, ' \ + '{"value": "%s"}]}]' % (_TEST_VERSION, _TEST_PRODUCT) + flash_device.update(_TEST_IMAGE_DIR, 'check', None) + self.assertEqual(self._ffx_mock.call_count, 1) + self.assertEqual(self._sdk_hash_mock.call_count, 1) + + def test_update_system_info_catches_boot_failure(self) -> None: + """Test update when |os_check=check| catches boot_device exceptions.""" + + self._swarming_mock.return_value = True + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device.get_system_info') as mock_sys_info, \ + mock.patch('flash_device.subprocess.run'): + mock_boot.side_effect = common.StateTransitionError( + 'Incorrect state') + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + # Regular boot is to check the versions. + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.REGULAR, None) + self.assertEqual(self._ffx_mock.call_count, 0) + + # get_system_info should not even be called due to early exit. + mock_sys_info.assert_not_called() + + def test_update_system_info_mismatch(self) -> None: + """Test update when |os_check| is 'check' and system info does not + match.""" + + self._swarming_mock.return_value = True + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device.subprocess.run'): + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + # Regular boot is to check the versions. + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.REGULAR, None) + self.assertEqual(self._ffx_mock.call_count, 2) + + def test_update_system_info_mismatch_adds_exec_to_flash_binaries(self + ) -> None: + """Test update adds exec bit to flash binaries if flashing.""" + + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.get_host_arch', + return_value='foo_arch'), \ + mock.patch('flash_device.add_exec_to_file') as add_exec: + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + add_exec.assert_has_calls([ + mock.call(os.path.join(_TEST_IMAGE_DIR, 'flash.sh')), + mock.call( + os.path.join(_TEST_IMAGE_DIR, 'host_foo_arch', 'fastboot')) + ], + any_order=True) + + def test_update_adds_exec_to_flash_binaries_depending_on_location( + self) -> None: + """Test update adds exec bit to flash binaries if flashing.""" + + # First exists is for image dir, second is for fastboot binary. + # Missing this fastboot binary means that the test will default to a + # different path. + with mock.patch('os.path.exists', side_effect=[True, False]), \ + mock.patch('flash_device.get_host_arch', + return_value='foo_arch'), \ + mock.patch('flash_device.add_exec_to_file') as add_exec: + self._ffx_mock.return_value.stdout = \ + '[{"title": "Build", "child": [{"value": "wrong.version"}, ' \ + '{"value": "wrong_product"}]}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + add_exec.assert_has_calls([ + mock.call(os.path.join(_TEST_IMAGE_DIR, 'flash.sh')), + mock.call( + os.path.join(_TEST_IMAGE_DIR, + 'fastboot.exe.linux-foo_arch')) + ], + any_order=True) + + def test_incorrect_target_info(self) -> None: + """Test update when |os_check| is 'check' and system info was not + retrieved.""" + with mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'): + self._ffx_mock.return_value.stdout = '[{"title": "badtitle"}]' + flash_device.update(_TEST_IMAGE_DIR, + 'check', + None, + should_pave=False) + self.assertEqual(self._ffx_mock.call_count, 2) + + def test_update_with_serial_num(self) -> None: + """Test update when |serial_num| is specified.""" + + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device._add_exec_to_flash_binaries'): + flash_device.update(_TEST_IMAGE_DIR, + 'update', + None, + 'test_serial', + should_pave=False) + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.BOOTLOADER, + 'test_serial') + self.assertEqual(self._ffx_mock.call_count, 2) + + def test_reboot_failure(self) -> None: + """Test update when |serial_num| is specified.""" + self._ffx_mock.return_value.returncode = 1 + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.running_unattended', + return_value=True), \ + mock.patch('flash_device.boot_device'): + required, _ = flash_device.update_required('check', + _TEST_IMAGE_DIR, None) + self.assertEqual(required, True) + + # pylint: disable=no-self-use + def test_update_calls_paving_if_specified(self) -> None: + """Test update calls pave if specified.""" + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.running_unattended', + return_value=True), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('flash_device.pave') as mock_pave: + flash_device.update(_TEST_IMAGE_DIR, + 'update', + 'some-target-id', + should_pave=True) + + mock_boot.assert_called_once_with('some-target-id', + common.BootMode.RECOVERY, None) + mock_pave.assert_called_once_with(_TEST_IMAGE_DIR, + 'some-target-id') + + # pylint: enable=no-self-use + + def test_update_raises_error_if_unattended_with_no_target(self) -> None: + """Test update raises error if no target specified.""" + + self._swarming_mock.return_value = True + with mock.patch('time.sleep'), \ + mock.patch('flash_device.pave'), \ + mock.patch('os.path.exists', return_value=True): + self.assertRaises(AssertionError, + flash_device.update, + _TEST_IMAGE_DIR, + 'update', + None, + should_pave=True) + + def test_update_on_swarming(self) -> None: + """Test update on swarming bots.""" + + self._swarming_mock.return_value = True + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device._add_exec_to_flash_binaries'), \ + mock.patch('flash_device.boot_device') as mock_boot, \ + mock.patch('subprocess.run'): + flash_device.update(_TEST_IMAGE_DIR, + 'update', + None, + 'test_serial', + should_pave=False) + mock_boot.assert_called_once_with(mock.ANY, + common.BootMode.BOOTLOADER, + 'test_serial') + self.assertEqual(self._ffx_mock.call_count, 1) + + # pylint: disable=no-self-use + def test_update_with_pave_timeout_defaults_to_flash(self) -> None: + """Test update falls back to flash if pave fails.""" + with mock.patch('time.sleep'), \ + mock.patch('os.path.exists', return_value=True), \ + mock.patch('flash_device.running_unattended', + return_value=True), \ + mock.patch('flash_device.pave') as mock_pave, \ + mock.patch('flash_device.boot_device'), \ + mock.patch('flash_device.flash') as mock_flash: + mock_pave.side_effect = subprocess.TimeoutExpired( + cmd='/some/cmd', + timeout=0, + ) + flash_device.update(_TEST_IMAGE_DIR, + 'update', + 'some-target-id', + should_pave=True) + mock_pave.assert_called_once_with(_TEST_IMAGE_DIR, + 'some-target-id') + mock_flash.assert_called_once_with(_TEST_IMAGE_DIR, + 'some-target-id', None) + + # pylint: enable=no-self-use + + def test_main(self) -> None: + """Tests |main| function.""" + + with mock.patch( + 'sys.argv', + ['flash_device.py', '--os-check', 'ignore', '--no-pave']): + with mock.patch.dict(os.environ, {}): + flash_device.main() + self.assertEqual(self._ffx_mock.call_count, 0) +# pylint: enable=too-many-public-methods,protected-access + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/lockfile.py b/fuchsia/test/lockfile.py new file mode 100644 index 000000000000..422cfe4c2403 --- /dev/null +++ b/fuchsia/test/lockfile.py @@ -0,0 +1,79 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Exclusive filelocking for all supported platforms. + +Copied from third_party/depot_tools/lockfile.py. +""" + +import contextlib +import fcntl +import logging +import os +import time + + +class LockError(Exception): + """Error raised if timeout or lock (without timeout) fails.""" + + +def _open_file(lockfile): + open_flags = (os.O_CREAT | os.O_WRONLY) + return os.open(lockfile, open_flags, 0o644) + + +def _close_file(file_descriptor): + os.close(file_descriptor) + + +def _lock_file(file_descriptor): + fcntl.flock(file_descriptor, fcntl.LOCK_EX | fcntl.LOCK_NB) + + +def _try_lock(lockfile): + f = _open_file(lockfile) + try: + _lock_file(f) + except Exception: + _close_file(f) + raise + return lambda: _close_file(f) + + +def _lock(path, timeout=0): + """_lock returns function to release the lock if locking was successful. + + _lock also implements simple retry logic.""" + elapsed = 0 + while True: + try: + return _try_lock(path + '.locked') + except (OSError, IOError) as error: + if elapsed < timeout: + sleep_time = min(10, timeout - elapsed) + logging.info( + 'Could not create lockfile; will retry after sleep(%d).', + sleep_time) + elapsed += sleep_time + time.sleep(sleep_time) + continue + raise LockError("Error locking %s (err: %s)" % + (path, str(error))) from error + + +@contextlib.contextmanager +def lock(path, timeout=0): + """Get exclusive lock to path. + + Usage: + import lockfile + with lockfile.lock(path, timeout): + # Do something + pass + + """ + release_fn = _lock(path, timeout) + try: + yield + finally: + release_fn() diff --git a/fuchsia/test/log_manager.py b/fuchsia/test/log_manager.py new file mode 100755 index 000000000000..98b711d57969 --- /dev/null +++ b/fuchsia/test/log_manager.py @@ -0,0 +1,160 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Reads log data from a device.""" + +import argparse +import os +import subprocess +import sys +import time + +from contextlib import AbstractContextManager +from typing import Iterable, Optional, TextIO + +from common import catch_sigterm, read_package_paths, register_common_args, \ + register_device_args, run_continuous_ffx_command, \ + run_ffx_command +from ffx_integration import ScopedFfxConfig, run_symbolizer + + +class LogManager(AbstractContextManager): + """Handles opening and closing file streams for logging purposes.""" + + def __init__(self, logs_dir: Optional[str]) -> None: + self._logs_dir = logs_dir + + # A dictionary with the log file path as the key and a file stream as + # value. + self._log_files = {} + self._log_procs = [] + self._scoped_ffx_log = None + + if self._logs_dir: + self._scoped_ffx_log = ScopedFfxConfig('log.dir', self._logs_dir) + + def __enter__(self): + if self._scoped_ffx_log: + self._scoped_ffx_log.__enter__() + run_ffx_command(('daemon', 'stop'), check=False) + + return self + + def is_logging_enabled(self) -> bool: + """Check whether logging is turned on.""" + + return self._logs_dir is not None + + def add_log_process(self, process: subprocess.Popen) -> None: + """Register a logging process to LogManager to be killed at LogManager + teardown.""" + + self._log_procs.append(process) + + def open_log_file(self, log_file_name: str) -> TextIO: + """Open a file stream with log_file_name in the logs directory.""" + + if not self._logs_dir: + raise Exception('Logging directory is not specified.') + log_file_path = os.path.join(self._logs_dir, log_file_name) + log_file = open(log_file_path, 'w', buffering=1) + self._log_files[log_file_path] = log_file + return log_file + + def stop(self): + """Stop all active logging instances.""" + + for proc in self._log_procs: + proc.kill() + for log in self._log_files.values(): + log.close() + + def __exit__(self, exc_type, exc_value, traceback): + self.stop() + if self._scoped_ffx_log: + self._scoped_ffx_log.__exit__(exc_type, exc_value, traceback) + + # Allow command to fail while ffx team investigates the issue. + run_ffx_command(('daemon', 'stop'), check=False) + + +def start_system_log(log_manager: LogManager, + log_to_stdout: bool, + pkg_paths: Optional[Iterable[str]] = None, + log_args: Optional[Iterable[str]] = None, + target_id: Optional[str] = None) -> None: + """ + Start system logging. + + Args: + log_manager: A LogManager class that manages the log file and process. + log_to_stdout: If set to True, print logs directly to stdout. + pkg_paths: Path to the packages + log_args: Arguments forwarded to `ffx log` command. + target_id: Specify a target to use. + """ + + if not log_manager.is_logging_enabled() and not log_to_stdout: + return + symbol_paths = None + if pkg_paths: + symbol_paths = [] + + # Locate debug symbols for each package. + for pkg_path in pkg_paths: + assert os.path.isfile(pkg_path), '%s does not exist' % pkg_path + symbol_paths.append( + os.path.join(os.path.dirname(pkg_path), 'ids.txt')) + + if log_to_stdout: + system_log = sys.stdout + else: + system_log = log_manager.open_log_file('system_log') + log_cmd = ['log', '--raw'] + if log_args: + log_cmd.extend(log_args) + if symbol_paths: + log_proc = run_continuous_ffx_command(log_cmd, + target_id, + stdout=subprocess.PIPE) + log_manager.add_log_process(log_proc) + log_manager.add_log_process( + run_symbolizer(symbol_paths, log_proc.stdout, system_log)) + else: + log_manager.add_log_process( + run_continuous_ffx_command(log_cmd, target_id, stdout=system_log)) + + +def main(): + """Stand-alone function for fetching system logs and print to terminal. + Runs until the process is killed or interrupted (i.e. user presses CTRL-C). + """ + + catch_sigterm() + parser = argparse.ArgumentParser() + register_common_args(parser) + register_device_args(parser) + parser.add_argument('--packages', + action='append', + help='Name of the packages to symbolize.') + manager_args, system_log_args = parser.parse_known_args() + if manager_args.packages and not manager_args.out_dir: + raise ValueError('--out-dir must be specified to symbolize packages.') + package_paths = [] + if manager_args.packages: + for package in manager_args.packages: + package_paths.extend( + read_package_paths(manager_args.out_dir, package)) + with LogManager(None) as log_manager: + try: + start_system_log(log_manager, True, package_paths, system_log_args, + manager_args.target_id) + while True: + time.sleep(10000) + except (KeyboardInterrupt, SystemExit): + pass + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/log_manager_unittests.py b/fuchsia/test/log_manager_unittests.py new file mode 100755 index 000000000000..66830a836a14 --- /dev/null +++ b/fuchsia/test/log_manager_unittests.py @@ -0,0 +1,115 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing log_manager.py.""" + +import sys +import unittest +import unittest.mock as mock + +import log_manager + +_LOGS_DIR = 'test_logs_dir' + + +class LogManagerTest(unittest.TestCase): + """Unittests for log_manager.py.""" + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_no_logs(self, mock_ffx) -> None: + """Test |start_system_log| does nothing when logging is off.""" + + log = log_manager.LogManager(None) + log_manager.start_system_log(log, False) + self.assertEqual(mock_ffx.call_count, 0) + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_log_to_stdout(self, mock_ffx) -> None: + """Test |start_system_log| logs to stdout when log manager is off.""" + + log = log_manager.LogManager(None) + log_manager.start_system_log(log, True) + self.assertEqual(mock_ffx.call_args_list[0][1]['stdout'], sys.stdout) + self.assertEqual(mock_ffx.call_count, 1) + + @mock.patch('log_manager.run_continuous_ffx_command') + @mock.patch('builtins.open') + def test_log_to_file(self, mock_open, mock_ffx) -> None: + """Test |start_system_log| logs to log file when log manager is on.""" + + log = log_manager.LogManager(_LOGS_DIR) + log_manager.start_system_log(log, False) + self.assertEqual(mock_ffx.call_args_list[0][1]['stdout'], + mock_open.return_value) + self.assertEqual(mock_ffx.call_count, 1) + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_log_with_log_args(self, mock_ffx) -> None: + """Test log args are used when passed in to |start_system_log|.""" + + log = log_manager.LogManager(None) + log_manager.start_system_log(log, True, log_args=['test_log_args']) + self.assertEqual(mock_ffx.call_args_list[0][0][0], + ['log', '--raw', 'test_log_args']) + self.assertEqual(mock_ffx.call_count, 1) + + @mock.patch('log_manager.run_continuous_ffx_command') + def test_log_with_symbols(self, mock_ffx) -> None: + """Test symbols are used when pkg_paths are set.""" + + log = log_manager.LogManager(_LOGS_DIR) + with mock.patch('os.path.isfile', return_value=True), \ + mock.patch('builtins.open'), \ + mock.patch('log_manager.run_symbolizer'): + log_manager.start_system_log(log, False, pkg_paths=['test_pkg']) + log.stop() + self.assertEqual(mock_ffx.call_count, 1) + self.assertEqual(mock_ffx.call_args_list[0][0][0], ['log', '--raw']) + + def test_no_logging_dir_exception(self) -> None: + """Tests empty LogManager throws an exception on |open_log_file|.""" + + log = log_manager.LogManager(None) + with self.assertRaises(Exception): + log.open_log_file('test_log_file') + + @mock.patch('log_manager.ScopedFfxConfig') + @mock.patch('log_manager.run_ffx_command') + def test_log_manager(self, mock_ffx, mock_scoped_config) -> None: + """Tests LogManager as a context manager.""" + + context_mock = mock.Mock() + mock_scoped_config.return_value = context_mock + context_mock.__enter__ = mock.Mock(return_value=None) + context_mock.__exit__ = mock.Mock(return_value=None) + with log_manager.LogManager(_LOGS_DIR): + pass + self.assertEqual(mock_ffx.call_count, 2) + + def test_main_exception(self) -> None: + """Tests |main| function to throw exception on incompatible flags.""" + + with mock.patch('sys.argv', + ['log_manager.py', '--packages', 'test_package']): + with self.assertRaises(ValueError): + log_manager.main() + + @mock.patch('log_manager.read_package_paths') + @mock.patch('log_manager.start_system_log') + def test_main(self, mock_system_log, mock_read_paths) -> None: + """Tests |main| function.""" + + with mock.patch('sys.argv', [ + 'log_manager.py', '--packages', 'test_package', '--out-dir', + 'test_out_dir' + ]): + with mock.patch('log_manager.time.sleep', + side_effect=KeyboardInterrupt): + log_manager.main() + self.assertEqual(mock_system_log.call_count, 1) + self.assertEqual(mock_read_paths.call_count, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/publish_package.py b/fuchsia/test/publish_package.py new file mode 100755 index 000000000000..5c566544af93 --- /dev/null +++ b/fuchsia/test/publish_package.py @@ -0,0 +1,68 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for managing Fuchsia repos via the pm tool.""" + +import argparse +import os +import subprocess +import sys + +from typing import Iterable + +from common import SDK_TOOLS_DIR, read_package_paths, register_common_args + +_pm_tool = os.path.join(SDK_TOOLS_DIR, 'pm') + + +def publish_packages(packages: Iterable[str], + repo: str, + new_repo: bool = False) -> None: + """Publish packages to a repo directory, initializing it if necessary.""" + if new_repo: + subprocess.run([_pm_tool, 'newrepo', '-repo', repo], check=True) + for package in packages: + subprocess.run([_pm_tool, 'publish', '-a', '-r', repo, '-f', package], + check=True) + + +def register_package_args(parser: argparse.ArgumentParser, + allow_temp_repo: bool = False) -> None: + """Register common arguments for package publishing.""" + package_args = parser.add_argument_group( + 'package', 'Arguments for package publishing.') + package_args.add_argument('--packages', + action='append', + help='Paths of the package archives to install') + package_args.add_argument('--repo', + help='Directory packages will be published to.') + if allow_temp_repo: + package_args.add_argument( + '--no-repo-init', + action='store_true', + default=False, + help='Do not initialize the package repository.') + + +def main(): + """Stand-alone function for publishing packages.""" + parser = argparse.ArgumentParser() + register_package_args(parser) + register_common_args(parser) + args = parser.parse_args() + if not args.repo: + raise ValueError('Must specify directory to publish packages.') + if not args.packages: + raise ValueError('Must specify packages to publish.') + if args.out_dir: + package_paths = [] + for package in args.packages: + package_paths.extend(read_package_paths(args.out_dir, package)) + else: + package_paths = args.packages + publish_packages(package_paths, args.repo) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/publish_package_unittests.py b/fuchsia/test/publish_package_unittests.py new file mode 100755 index 000000000000..2bb22da963c9 --- /dev/null +++ b/fuchsia/test/publish_package_unittests.py @@ -0,0 +1,103 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing publish_package.py.""" + +import argparse +import unittest +import unittest.mock as mock + +from io import StringIO + +import publish_package + +_PACKAGES = ['test_package'] +_REPO = 'test_repo' + + +class PublishPackageTest(unittest.TestCase): + """Unittests for publish_package.py.""" + + def setUp(self) -> None: + self._subprocess_patcher = mock.patch('publish_package.subprocess.run') + self._subprocess_mock = self._subprocess_patcher.start() + self.addCleanup(self._subprocess_mock.stop) + + def test_new_repo(self) -> None: + """Test setting |new_repo| to True in |publish_packages|.""" + + publish_package.publish_packages(_PACKAGES, _REPO, True) + self.assertEqual(self._subprocess_mock.call_count, 2) + first_call = self._subprocess_mock.call_args_list[0] + self.assertEqual(['newrepo', '-repo', _REPO], first_call[0][0][1:]) + second_call = self._subprocess_mock.call_args_list[1] + self.assertEqual(['publish', '-a', '-r', _REPO, '-f', _PACKAGES[0]], + second_call[0][0][1:]) + + def test_no_new_repo(self) -> None: + """Test setting |new_repo| to False in |publish_packages|.""" + + publish_package.publish_packages(['test_package'], 'test_repo', False) + self.assertEqual(self._subprocess_mock.call_count, 1) + + + def test_allow_temp_repo(self) -> None: + """Test setting |allow_temp_repo| to True in |register_package_args|.""" + + parser = argparse.ArgumentParser() + publish_package.register_package_args(parser, True) + args = parser.parse_args(['--no-repo-init']) + self.assertEqual(args.no_repo_init, True) + + @mock.patch('sys.stderr', new_callable=StringIO) + def test_not_allow_temp_repo(self, mock_stderr) -> None: + """Test setting |allow_temp_repo| to False in + |register_package_args|.""" + + parser = argparse.ArgumentParser() + publish_package.register_package_args(parser) + with self.assertRaises(SystemExit): + parser.parse_args(['--no-repo-init']) + self.assertRegex(mock_stderr.getvalue(), 'unrecognized arguments') + + def test_main_no_repo_flag(self) -> None: + """Tests that not specifying packages raise a ValueError.""" + + with mock.patch('sys.argv', ['publish_package.py', '--repo', _REPO]): + with self.assertRaises(ValueError): + publish_package.main() + + def test_main_no_packages_flag(self) -> None: + """Tests that not specifying directory raise a ValueError.""" + + with mock.patch('sys.argv', + ['publish_package.py', '--packages', _PACKAGES[0]]): + with self.assertRaises(ValueError): + publish_package.main() + + def test_main_no_out_dir_flag(self) -> None: + """Tests |main| with `out_dir` omitted.""" + + with mock.patch('sys.argv', [ + 'publish_package.py', '--packages', _PACKAGES[0], '--repo', + _REPO + ]): + publish_package.main() + self.assertEqual(self._subprocess_mock.call_count, 1) + + @mock.patch('publish_package.read_package_paths') + def test_main(self, read_mock) -> None: + """Tests |main|.""" + + read_mock.return_value = ['out/test/package/path'] + with mock.patch('sys.argv', [ + 'publish_package.py', '--packages', _PACKAGES[0], '--repo', + _REPO, '--out-dir', 'out/test' + ]): + publish_package.main() + self.assertEqual(self._subprocess_mock.call_count, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/pylintrc b/fuchsia/test/pylintrc new file mode 100644 index 000000000000..a144b8066660 --- /dev/null +++ b/fuchsia/test/pylintrc @@ -0,0 +1,26 @@ +[MESSAGES CONTROL] + +# Disable the message, report, category or checker with the given id(s). +disable=fixme, + +# fixme +# This complains about TODOs, which are perfectly valid to have. + +# Suppression for invalid-name error for PRESUBMIT.py file. +good-names=i,j,k,f,PRESUBMIT + +[REPORTS] + +reports=no + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=6 + +# Maximum number of instance attributes +max-attributes=10 + +[FORMAT] + +max-line-length=80 diff --git a/fuchsia/test/run_blink_test.py b/fuchsia/test/run_blink_test.py new file mode 100644 index 000000000000..ba71aa69ea38 --- /dev/null +++ b/fuchsia/test/run_blink_test.py @@ -0,0 +1,36 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running blink web tests.""" + +import os +import subprocess + +from argparse import Namespace +from typing import Optional + +from common import DIR_SRC_ROOT +from test_runner import TestRunner + +_BLINK_TEST_SCRIPT = os.path.join(DIR_SRC_ROOT, 'third_party', 'blink', + 'tools', 'run_web_tests.py') + + +class BlinkTestRunner(TestRunner): + """Test runner for running blink web tests.""" + + def __init__(self, out_dir: str, test_args: Namespace, + target_id: Optional[str]) -> None: + super().__init__(out_dir, test_args, ['content_shell'], target_id) + + # TODO(crbug.com/1278939): Remove when blink tests use CFv2 content_shell. + @staticmethod + def is_cfv2() -> bool: + return False + + def run_test(self): + test_cmd = [_BLINK_TEST_SCRIPT, '-t', os.path.basename(self._out_dir)] + + if self._test_args: + test_cmd.extend(self._test_args) + return subprocess.run(test_cmd, check=True) diff --git a/fuchsia/test/run_executable_test.py b/fuchsia/test/run_executable_test.py new file mode 100755 index 000000000000..7c6772be7e27 --- /dev/null +++ b/fuchsia/test/run_executable_test.py @@ -0,0 +1,263 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for standalone CFv2 test executables.""" + +import argparse +import logging +import os +import shutil +import subprocess +import sys + +from typing import List, Optional + +from common import get_component_uri, get_host_arch, \ + register_common_args, register_device_args, \ + register_log_args +from compatible_utils import map_filter_file_to_package_file +from ffx_integration import FfxTestRunner, run_symbolizer +from test_runner import TestRunner +from test_server import setup_test_server + +DEFAULT_TEST_SERVER_CONCURRENCY = 4 + + +def _copy_custom_output_file(test_runner: FfxTestRunner, file: str, + dest: str) -> None: + """Copy custom test output file from the device to the host.""" + + artifact_dir = test_runner.get_custom_artifact_directory() + if not artifact_dir: + logging.error( + 'Failed to parse custom artifact directory from test summary ' + 'output files. Not copying %s from the device', file) + return + shutil.copy(os.path.join(artifact_dir, file), dest) + + +def _copy_coverage_files(test_runner: FfxTestRunner, dest: str) -> None: + """Copy debug data file from the device to the host if it exists.""" + + coverage_dir = test_runner.get_debug_data_directory() + if not coverage_dir: + logging.info( + 'Failed to parse coverage data directory from test summary ' + 'output files. Not copying coverage files from the device.') + return + shutil.copytree(coverage_dir, dest, dirs_exist_ok=True) + + +def _get_vulkan_args(use_vulkan: Optional[str]) -> List[str]: + """Helper function to set vulkan related flag.""" + + vulkan_args = [] + if not use_vulkan: + if get_host_arch() == 'x64': + # TODO(crbug.com/1261646) Remove once Vulkan is enabled by + # default. + use_vulkan = 'native' + else: + # Use swiftshader on arm64 by default because most arm64 bots + # currently don't support Vulkan emulation. + use_vulkan = 'swiftshader' + vulkan_args.append('--ozone-platform=headless') + vulkan_args.append(f'--use-vulkan={use_vulkan}') + return vulkan_args + + +class ExecutableTestRunner(TestRunner): + """Test runner for running standalone test executables.""" + + def __init__( # pylint: disable=too-many-arguments + self, + out_dir: str, + test_args: List[str], + test_name: str, + target_id: Optional[str], + code_coverage_dir: str, + logs_dir: Optional[str] = None) -> None: + super().__init__(out_dir, test_args, [test_name], target_id) + if not self._test_args: + self._test_args = [] + self._test_name = test_name + self._code_coverage_dir = os.path.basename(code_coverage_dir) + self._custom_artifact_directory = None + self._isolated_script_test_output = None + self._isolated_script_test_perf_output = None + self._logs_dir = logs_dir + self._test_launcher_summary_output = None + self._test_server = None + + def _get_args(self) -> List[str]: + parser = argparse.ArgumentParser() + parser.add_argument( + '--isolated-script-test-output', + help='If present, store test results on this path.') + parser.add_argument('--isolated-script-test-perf-output', + help='If present, store chartjson results on this ' + 'path.') + parser.add_argument( + '--test-launcher-shard-index', + type=int, + default=os.environ.get('GTEST_SHARD_INDEX'), + help='Index of this instance amongst swarming shards.') + parser.add_argument( + '--test-launcher-summary-output', + help='Where the test launcher will output its json.') + parser.add_argument( + '--test-launcher-total-shards', + type=int, + default=os.environ.get('GTEST_TOTAL_SHARDS'), + help='Total number of swarming shards of this suite.') + parser.add_argument( + '--test-launcher-filter-file', + help='Filter file(s) passed to target test process. Use ";" to ' + 'separate multiple filter files.') + parser.add_argument('--test-launcher-jobs', + type=int, + help='Sets the number of parallel test jobs.') + parser.add_argument('--enable-test-server', + action='store_true', + default=False, + help='Enable Chrome test server spawner.') + parser.add_argument('--test-arg', + dest='test_args', + action='append', + help='Legacy flag to pass in arguments for ' + 'the test process. These arguments can now be ' + 'passed in without a preceding "--" flag.') + parser.add_argument('--use-vulkan', + help='\'native\', \'swiftshader\' or \'none\'.') + args, child_args = parser.parse_known_args(self._test_args) + if args.isolated_script_test_output: + self._isolated_script_test_output = args.isolated_script_test_output + child_args.append( + '--isolated-script-test-output=/custom_artifacts/%s' % + os.path.basename(self._isolated_script_test_output)) + if args.isolated_script_test_perf_output: + self._isolated_script_test_perf_output = \ + args.isolated_script_test_perf_output + child_args.append( + '--isolated-script-test-perf-output=/custom_artifacts/%s' % + os.path.basename(self._isolated_script_test_perf_output)) + if args.test_launcher_shard_index is not None: + child_args.append('--test-launcher-shard-index=%d' % + args.test_launcher_shard_index) + if args.test_launcher_total_shards is not None: + child_args.append('--test-launcher-total-shards=%d' % + args.test_launcher_total_shards) + if args.test_launcher_summary_output: + self._test_launcher_summary_output = \ + args.test_launcher_summary_output + child_args.append( + '--test-launcher-summary-output=/custom_artifacts/%s' % + os.path.basename(self._test_launcher_summary_output)) + if args.test_launcher_filter_file: + test_launcher_filter_files = map( + map_filter_file_to_package_file, + args.test_launcher_filter_file.split(';')) + child_args.append('--test-launcher-filter-file=' + + ';'.join(test_launcher_filter_files)) + if args.test_launcher_jobs is not None: + test_concurrency = args.test_launcher_jobs + else: + test_concurrency = DEFAULT_TEST_SERVER_CONCURRENCY + if args.enable_test_server: + self._test_server, spawner_url_base = setup_test_server( + self._target_id, test_concurrency) + child_args.append('--remote-test-server-spawner-url-base=%s' % + spawner_url_base) + child_args.extend(_get_vulkan_args(args.use_vulkan)) + if args.test_args: + child_args.extend(args.test_args) + return child_args + + def _postprocess(self, test_runner: FfxTestRunner) -> None: + if self._test_server: + self._test_server.Stop() + if self._test_launcher_summary_output: + _copy_custom_output_file( + test_runner, + os.path.basename(self._test_launcher_summary_output), + self._test_launcher_summary_output) + if self._isolated_script_test_output: + _copy_custom_output_file( + test_runner, + os.path.basename(self._isolated_script_test_output), + self._isolated_script_test_output) + if self._isolated_script_test_perf_output: + _copy_custom_output_file( + test_runner, + os.path.basename(self._isolated_script_test_perf_output), + self._isolated_script_test_perf_output) + _copy_coverage_files(test_runner, self._code_coverage_dir) + + def run_test(self) -> subprocess.Popen: + test_args = self._get_args() + with FfxTestRunner(self._logs_dir) as test_runner: + test_proc = test_runner.run_test( + get_component_uri(self._test_name), test_args, self._target_id) + + symbol_paths = [] + for pkg_path in self._package_deps.values(): + symbol_paths.append( + os.path.join(os.path.dirname(pkg_path), 'ids.txt')) + # Symbolize output from test process and print to terminal. + symbolizer_proc = run_symbolizer(symbol_paths, test_proc.stdout, + sys.stdout) + symbolizer_proc.communicate() + + if test_proc.wait() == 0: + logging.info('Process exited normally with status code 0.') + else: + # The test runner returns an error status code if *any* + # tests fail, so we should proceed anyway. + logging.warning('Process exited with status code %d.', + test_proc.returncode) + self._postprocess(test_runner) + return test_proc + + +def create_executable_test_runner(runner_args: argparse.Namespace, + test_args: List[str]): + """Helper for creating an ExecutableTestRunner.""" + + return ExecutableTestRunner(runner_args.out_dir, test_args, + runner_args.test_type, runner_args.target_id, + runner_args.code_coverage_dir, + runner_args.logs_dir) + + +def register_executable_test_args(parser: argparse.ArgumentParser) -> None: + """Register common arguments for ExecutableTestRunner.""" + + test_args = parser.add_argument_group('test', 'arguments for test running') + test_args.add_argument('--code-coverage-dir', + default=os.getcwd(), + help='Directory to place code coverage ' + 'information. Only relevant when the target was ' + 'built with |fuchsia_code_coverage| set to true. ' + 'Defaults to current directory.') + test_args.add_argument('--test-name', + dest='test_type', + help='Name of the test package (e.g. ' + 'unit_tests).') + + +def main(): + """Stand-alone function for running executable tests.""" + + parser = argparse.ArgumentParser() + register_common_args(parser) + register_device_args(parser) + register_log_args(parser) + register_executable_test_args(parser) + runner_args, test_args = parser.parse_known_args() + runner = create_executable_test_runner(runner_args, test_args) + return runner.run_test().returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/run_pytype.py b/fuchsia/test/run_pytype.py new file mode 100755 index 000000000000..8e603313ca15 --- /dev/null +++ b/fuchsia/test/run_pytype.py @@ -0,0 +1,42 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Simple helper script to run pytype on //build/fuchsia/test code.""" + +import os +import sys + +from coveragetest import COVERED_FILES + +FUCHSIA_TEST_DIR = os.path.abspath(os.path.dirname(__file__)) +DIR_SRC_DIR = os.path.realpath(os.path.join(FUCHSIA_TEST_DIR, '..', '..', + '..')) + +sys.path.append(os.path.join(FUCHSIA_TEST_DIR, '..', '..', '..', 'testing')) + +from pytype_common import pytype_runner # pylint: disable=wrong-import-position + +EXTRA_PATHS_COMPONENTS = [ + ('build', 'util', 'lib', 'common'), +] +EXTRA_PATHS = [os.path.join(DIR_SRC_DIR, *p) for p in EXTRA_PATHS_COMPONENTS] +EXTRA_PATHS.append(FUCHSIA_TEST_DIR) + +FILES_AND_DIRECTORIES_TO_CHECK = [ + os.path.join(FUCHSIA_TEST_DIR, f) for f in COVERED_FILES +] +TEST_NAME = 'fuchsia_pytype' +TEST_LOCATION = "//build/fuchsia/test/run_pytype.py" + + +def main() -> int: + """Run pytype check.""" + + return pytype_runner.run_pytype(TEST_NAME, TEST_LOCATION, + FILES_AND_DIRECTORIES_TO_CHECK, + EXTRA_PATHS, FUCHSIA_TEST_DIR) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/run_telemetry_test.py b/fuchsia/test/run_telemetry_test.py new file mode 100644 index 000000000000..7556b815cb28 --- /dev/null +++ b/fuchsia/test/run_telemetry_test.py @@ -0,0 +1,61 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running GPU tests.""" + +import argparse +import os +import subprocess + +from typing import List, Optional + +from common import DIR_SRC_ROOT +from test_runner import TestRunner + +_GPU_TEST_SCRIPT = os.path.join(DIR_SRC_ROOT, 'content', 'test', 'gpu', + 'run_gpu_integration_test.py') +_PERF_TEST_SCRIPT = os.path.join(DIR_SRC_ROOT, 'tools', 'perf', + 'run_benchmark') + + +class TelemetryTestRunner(TestRunner): + """Test runner for running GPU tests.""" + + def __init__(self, test_type: str, out_dir: str, test_args: List[str], + target_id: Optional[str]) -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + '--browser', help='The browser to use for Telemetry based tests.') + args, _ = parser.parse_known_args(test_args) + + if args.browser == 'web-engine-shell': + packages = ['web_engine_shell'] + elif args.browser == 'fuchsia-chrome': + packages = ['chrome'] + elif args.browser == 'cast-streaming-shell': + packages = ['cast_streaming_shell'] + else: + raise Exception('Unknown browser %s' % args.browser) + + if test_type == 'gpu': + self._test_script = _GPU_TEST_SCRIPT + elif test_type == 'perf': + self._test_script = _PERF_TEST_SCRIPT + else: + raise ValueError('Test type can only be |gpu| or |perf|.') + + super().__init__(out_dir, test_args, packages, target_id) + + # TODO(crbug.com/1345390): Remove when Telemetry tests use CFv2 components. + @staticmethod + def is_cfv2() -> bool: + return False + + def run_test(self): + test_cmd = [self._test_script] + if self._test_args: + test_cmd.extend(self._test_args) + test_cmd.extend(['--chromium-output-directory', self._out_dir]) + if self._target_id: + test_cmd.extend(['--fuchsia-target-id', self._target_id]) + return subprocess.run(test_cmd, check=True) diff --git a/fuchsia/test/run_test.py b/fuchsia/test/run_test.py new file mode 100755 index 000000000000..3fc3ac91d253 --- /dev/null +++ b/fuchsia/test/run_test.py @@ -0,0 +1,127 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running tests E2E on a Fuchsia device.""" + +import argparse +import sys +import tempfile + +from contextlib import ExitStack +from typing import List + +from common import register_common_args, register_device_args, \ + register_log_args, resolve_packages, run_ffx_command, \ + set_ffx_isolate_dir +from compatible_utils import running_unattended +from ffx_integration import ScopedFfxConfig, test_connection +from flash_device import register_update_args, update +from log_manager import LogManager, start_system_log +from publish_package import publish_packages, register_package_args +from run_blink_test import BlinkTestRunner +from run_executable_test import create_executable_test_runner, \ + register_executable_test_args +from run_telemetry_test import TelemetryTestRunner +from run_webpage_test import WebpageTestRunner +from serve_repo import register_serve_args, serve_repository +from start_emulator import create_emulator_from_args, register_emulator_args +from test_runner import TestRunner +from ermine_ctl import ErmineCtl + + +def _get_test_runner(runner_args: argparse.Namespace, + test_args: List[str]) -> TestRunner: + """Initialize a suitable TestRunner class.""" + + if runner_args.test_type == 'blink': + return BlinkTestRunner(runner_args.out_dir, test_args, + runner_args.target_id) + if runner_args.test_type in ['gpu', 'perf']: + return TelemetryTestRunner(runner_args.test_type, runner_args.out_dir, + test_args, runner_args.target_id) + if runner_args.test_type in ['webpage']: + return WebpageTestRunner(runner_args.out_dir, test_args, + runner_args.target_id) + return create_executable_test_runner(runner_args, test_args) + + +def main(): + """E2E method for installing packages and running a test.""" + parser = argparse.ArgumentParser() + parser.add_argument( + 'test_type', + help='The type of test to run. Options include \'blink\', \'gpu\', ' + 'or in the case of executable tests, the test name.') + parser.add_argument('--device', + '-d', + action='store_true', + default=False, + help='Use an existing device.') + + # Register arguments + register_common_args(parser) + register_device_args(parser) + register_emulator_args(parser) + register_executable_test_args(parser) + register_update_args(parser, default_os_check='ignore', default_pave=False) + register_log_args(parser) + register_package_args(parser, allow_temp_repo=True) + register_serve_args(parser) + + # Treat unrecognized arguments as test specific arguments. + runner_args, test_args = parser.parse_known_args() + + if not runner_args.out_dir: + raise ValueError('--out-dir must be specified.') + + if runner_args.target_id: + runner_args.device = True + + with ExitStack() as stack: + if running_unattended(): + set_ffx_isolate_dir( + stack.enter_context(tempfile.TemporaryDirectory())) + run_ffx_command(('daemon', 'stop'), check=False) + if running_unattended(): + stack.enter_context( + ScopedFfxConfig('repository.server.listen', '"[::]:0"')) + log_manager = stack.enter_context(LogManager(runner_args.logs_dir)) + if runner_args.device: + update(runner_args.system_image_dir, runner_args.os_check, + runner_args.target_id, runner_args.serial_num, + runner_args.pave) + else: + runner_args.target_id = stack.enter_context( + create_emulator_from_args(runner_args)) + + test_connection(runner_args.target_id) + + test_runner = _get_test_runner(runner_args, test_args) + package_deps = test_runner.package_deps + + if not runner_args.repo: + # Create a directory that serves as a temporary repository. + runner_args.repo = stack.enter_context( + tempfile.TemporaryDirectory()) + + publish_packages(package_deps.values(), runner_args.repo, + not runner_args.no_repo_init) + + stack.enter_context(serve_repository(runner_args)) + + # Start system logging, after all possible restarts of the ffx daemon + # so that logging will not be interrupted. + start_system_log(log_manager, False, package_deps.values(), + ('--since', 'now'), runner_args.target_id) + + ermine = ErmineCtl(runner_args.target_id) + if ermine.exists: + ermine.take_to_shell() + + resolve_packages(package_deps.keys(), runner_args.target_id) + return test_runner.run_test().returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/run_webpage_test.py b/fuchsia/test/run_webpage_test.py new file mode 100644 index 000000000000..31fa0a32ca48 --- /dev/null +++ b/fuchsia/test/run_webpage_test.py @@ -0,0 +1,60 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for running webpage tests.""" + +import argparse +import logging +import time + +from typing import List, Optional + +from common import catch_sigterm, run_continuous_ffx_command +from test_runner import TestRunner + + +class WebpageTestRunner(TestRunner): + """Test runner for running GPU tests.""" + + def __init__(self, out_dir: str, test_args: List[str], + target_id: Optional[str]) -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + '--browser', + choices=['web-engine-shell', 'chrome'], + help='The browser to use for Telemetry based tests.') + args, _ = parser.parse_known_args(test_args) + + if args.browser == 'web-engine-shell': + packages = ['web_engine_shell'] + else: + packages = ['chrome'] + + super().__init__(out_dir, test_args, packages, target_id) + + def run_test(self): + catch_sigterm() + browser_cmd = [ + 'test', + 'run', + '-t', + '3600', # Keep the webpage running for an hour. + f'fuchsia-pkg://fuchsia.com/{self._packages[0]}#meta/' + f'{self._packages[0]}.cm' + ] + browser_cmd.extend( + ['--', '--web-engine-package-name=web_engine_with_webui']) + if self._test_args: + browser_cmd.extend(self._test_args) + logging.info('Starting %s', self._packages[0]) + try: + browser_proc = run_continuous_ffx_command(browser_cmd) + while True: + time.sleep(10000) + except KeyboardInterrupt: + logging.info('Ctrl-C received; shutting down the webpage.') + browser_proc.kill() + except SystemExit: + logging.info('SIGTERM received; shutting down the webpage.') + browser_proc.kill() + return browser_proc diff --git a/fuchsia/test/serve_repo.py b/fuchsia/test/serve_repo.py new file mode 100755 index 000000000000..7270bb9ba778 --- /dev/null +++ b/fuchsia/test/serve_repo.py @@ -0,0 +1,98 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implements commands for serving a TUF repository.""" + +import argparse +import contextlib +import sys + +from typing import Iterator, Optional + +from common import REPO_ALIAS, register_device_args, run_ffx_command + +_REPO_NAME = 'chromium-test-package-server' + + +def _stop_serving(repo_name: str, target: Optional[str]) -> None: + """Stop serving a repository.""" + + # Attempt to clean up. + run_ffx_command(['target', 'repository', 'deregister', '-r', repo_name], + target, + check=False) + run_ffx_command(['repository', 'remove', repo_name], check=False) + run_ffx_command(['repository', 'server', 'stop'], check=False) + + +def _start_serving(repo_dir: str, repo_name: str, + target: Optional[str]) -> None: + """Start serving a repository to a target device. + + Args: + repo_dir: directory the repository is served from. + repo_name: repository name. + target: Fuchsia device the repository is served to. + """ + + run_ffx_command(('config', 'set', 'repository.server.mode', '\"ffx\"')) + + run_ffx_command(['repository', 'server', 'start']) + run_ffx_command(['repository', 'add-from-pm', repo_dir, '-r', repo_name]) + run_ffx_command([ + 'target', 'repository', 'register', '-r', repo_name, '--alias', + REPO_ALIAS + ], target) + + +def register_serve_args(arg_parser: argparse.ArgumentParser) -> None: + """Register common arguments for repository serving.""" + + serve_args = arg_parser.add_argument_group('serve', + 'repo serving arguments') + serve_args.add_argument('--serve-repo', + dest='repo', + help='Directory the repository is served from.') + serve_args.add_argument('--repo-name', + default=_REPO_NAME, + help='Name of the repository.') + + +def run_serve_cmd(cmd: str, args: argparse.Namespace) -> None: + """Helper for running serve commands.""" + + if cmd == 'start': + _start_serving(args.repo, args.repo_name, args.target_id) + else: + _stop_serving(args.repo_name, args.target_id) + + +@contextlib.contextmanager +def serve_repository(args: argparse.Namespace) -> Iterator[None]: + """Context manager for serving a repository.""" + run_serve_cmd('start', args) + try: + yield None + finally: + run_serve_cmd('stop', args) + + +def main(): + """Stand-alone function for serving a repository.""" + + parser = argparse.ArgumentParser() + parser.add_argument('cmd', + choices=['start', 'stop'], + help='Choose to start|stop repository serving.') + register_device_args(parser) + register_serve_args(parser) + args = parser.parse_args() + if args.cmd == 'start' and not args.repo: + raise ValueError('Directory the repository is serving from needs ' + 'to be specified.') + run_serve_cmd(args.cmd, args) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/serve_repo_unittests.py b/fuchsia/test/serve_repo_unittests.py new file mode 100755 index 000000000000..de3fa62cca61 --- /dev/null +++ b/fuchsia/test/serve_repo_unittests.py @@ -0,0 +1,89 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing serve_repo.py.""" + +import argparse +import unittest +import unittest.mock as mock + +import serve_repo + +from common import REPO_ALIAS + +_REPO_DIR = 'test_repo_dir' +_REPO_NAME = 'test_repo_name' +_TARGET = 'test_target' + + +class ServeRepoTest(unittest.TestCase): + """Unittests for serve_repo.py.""" + + def setUp(self) -> None: + self._namespace = argparse.Namespace(repo=_REPO_DIR, + repo_name=_REPO_NAME, + target_id=_TARGET) + + @mock.patch('serve_repo.run_ffx_command') + def test_run_serve_cmd_start(self, mock_ffx) -> None: + """Test |run_serve_cmd| function for start.""" + + serve_repo.run_serve_cmd('start', self._namespace) + self.assertEqual(mock_ffx.call_count, 4) + second_call = mock_ffx.call_args_list[1] + self.assertEqual(['repository', 'server', 'start'], second_call[0][0]) + third_call = mock_ffx.call_args_list[2] + self.assertEqual( + ['repository', 'add-from-pm', _REPO_DIR, '-r', _REPO_NAME], + third_call[0][0]) + fourth_call = mock_ffx.call_args_list[3] + self.assertEqual([ + 'target', 'repository', 'register', '-r', _REPO_NAME, '--alias', + REPO_ALIAS + ], fourth_call[0][0]) + self.assertEqual(_TARGET, fourth_call[0][1]) + + @mock.patch('serve_repo.run_ffx_command') + def test_run_serve_cmd_stop(self, mock_ffx) -> None: + """Test |run_serve_cmd| function for stop.""" + + serve_repo.run_serve_cmd('stop', self._namespace) + self.assertEqual(mock_ffx.call_count, 3) + first_call = mock_ffx.call_args_list[0] + self.assertEqual( + ['target', 'repository', 'deregister', '-r', _REPO_NAME], + first_call[0][0]) + self.assertEqual(_TARGET, first_call[0][1]) + second_call = mock_ffx.call_args_list[1] + self.assertEqual(['repository', 'remove', _REPO_NAME], + second_call[0][0]) + third_call = mock_ffx.call_args_list[2] + self.assertEqual(['repository', 'server', 'stop'], third_call[0][0]) + + @mock.patch('serve_repo.run_serve_cmd') + def test_serve_repository(self, mock_serve) -> None: + """Tests |serve_repository| context manager.""" + + with serve_repo.serve_repository(self._namespace): + self.assertEqual(mock_serve.call_count, 1) + self.assertEqual(mock_serve.call_count, 2) + + def test_main_start_no_serve_repo_flag(self) -> None: + """Tests not specifying directory for start raises a ValueError.""" + + with mock.patch('sys.argv', ['serve_repo.py', 'start']): + with self.assertRaises(ValueError): + serve_repo.main() + + @mock.patch('serve_repo.run_serve_cmd') + def test_main_stop(self, mock_serve) -> None: + """Tests |main| function.""" + + with mock.patch('sys.argv', ['serve_repo.py', 'stop']): + serve_repo.main() + self.assertEqual(mock_serve.call_count, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/test/start_emulator.py b/fuchsia/test/start_emulator.py new file mode 100755 index 000000000000..cd16505f47a7 --- /dev/null +++ b/fuchsia/test/start_emulator.py @@ -0,0 +1,83 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provides a class for managing emulators.""" + +import argparse +import logging +import sys +import time + +from contextlib import AbstractContextManager + +from common import catch_sigterm, register_log_args +from ffx_emulator import FfxEmulator + + +def register_emulator_args(parser: argparse.ArgumentParser, + enable_graphics: bool = False) -> None: + """Register emulator specific arguments.""" + femu_args = parser.add_argument_group('emulator', + 'emulator startup arguments.') + femu_args.add_argument('--custom-image', + dest='product_bundle', + help='Backwards compatible flag that specifies an ' + 'image used for booting up the emulator.') + if enable_graphics: + femu_args.add_argument('--disable-graphics', + action='store_false', + dest='enable_graphics', + help='Start emulator in headless mode.') + else: + femu_args.add_argument('--enable-graphics', + action='store_true', + help='Start emulator with graphics.') + femu_args.add_argument( + '--hardware-gpu', + action='store_true', + help='Use host GPU hardware instead of Swiftshader.') + femu_args.add_argument( + '--product-bundle', + help='Specify a product bundle used for booting the ' + 'emulator. Defaults to the terminal product.') + femu_args.add_argument('--with-network', + action='store_true', + help='Run emulator with emulated nic via tun/tap.') + femu_args.add_argument('--everlasting', + action='store_true', + help='If the emulator should be long-living.') + + +def create_emulator_from_args( + args: argparse.Namespace) -> AbstractContextManager: + """Helper method for initializing an FfxEmulator class with parsed + arguments.""" + return FfxEmulator(args) + + +def main(): + """Stand-alone function for starting an emulator.""" + + catch_sigterm() + logging.basicConfig(level=logging.INFO) + parser = argparse.ArgumentParser() + register_emulator_args(parser, True) + register_log_args(parser) + args = parser.parse_args() + with create_emulator_from_args(args) as target_id: + logging.info( + 'Emulator successfully started. You can now run Chrome ' + 'Fuchsia tests with --target-id=%s to target this emulator.', + target_id) + try: + while True: + time.sleep(10000) + except KeyboardInterrupt: + logging.info('Ctrl-C received; shutting down the emulator.') + except SystemExit: + logging.info('SIGTERM received; shutting down the emulator.') + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/test/test_runner.py b/fuchsia/test/test_runner.py new file mode 100644 index 000000000000..a4a2f5bf3d30 --- /dev/null +++ b/fuchsia/test/test_runner.py @@ -0,0 +1,74 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Provides a base class for test running.""" + +import os +import subprocess + +from abc import ABC, abstractmethod +from argparse import Namespace +from typing import Dict, List, Optional + +from common import read_package_paths + + +class TestRunner(ABC): + """Base class that handles running a test.""" + + def __init__(self, + out_dir: str, + test_args: Namespace, + packages: List[str], + target_id: Optional[str] = None) -> None: + self._target_id = target_id + self._out_dir = out_dir + self._test_args = test_args + self._packages = packages + self._package_deps = None + + # TODO(crbug.com/1256503): Remove when all tests are converted to CFv2. + @staticmethod + def is_cfv2() -> bool: + """ + Returns True if packages are CFv2, False otherwise. Subclasses can + override this and return False if needed. + """ + + return True + + @property + def package_deps(self) -> Dict[str, str]: + """ + Returns: + A dictionary of packages that |self._packages| depend on, with + mapping from the package name to the local path to its far file. + """ + + if not self._package_deps: + self._populate_package_deps() + return self._package_deps + + def _populate_package_deps(self) -> None: + """Retrieve information for all packages |self._packages| depend on. + """ + + package_deps = {} + + package_paths = [] + for package in self._packages: + package_paths.extend(read_package_paths(self._out_dir, package)) + + for path in package_paths: + package_name = os.path.basename(path).replace('.far', '') + if package_name in package_deps: + assert path == package_deps[package_name] + package_deps[package_name] = path + self._package_deps = package_deps + + @abstractmethod + def run_test(self) -> subprocess.Popen: + """ + Returns: + A subprocess.Popen object that ran the test command. + """ diff --git a/fuchsia/test/test_server.py b/fuchsia/test/test_server.py new file mode 100644 index 000000000000..c2ed3d23584a --- /dev/null +++ b/fuchsia/test/test_server.py @@ -0,0 +1,130 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Test server set up.""" + +import logging +import os +import sys +import subprocess + +from typing import List, Optional, Tuple + +from common import DIR_SRC_ROOT, run_ffx_command +from compatible_utils import get_ssh_prefix + +sys.path.append(os.path.join(DIR_SRC_ROOT, 'build', 'util', 'lib', 'common')) +# pylint: disable=import-error,wrong-import-position +import chrome_test_server_spawner +# pylint: enable=import-error,wrong-import-position + + +def port_forward(host_port_pair: str, host_port: int) -> int: + """Establishes a port forwarding SSH task to a localhost TCP endpoint + hosted at port |local_port|. Blocks until port forwarding is established. + + Returns the remote port number.""" + + ssh_prefix = get_ssh_prefix(host_port_pair) + + # Allow a tunnel to be established. + subprocess.run(ssh_prefix + ['echo', 'true'], check=True) + + forward_cmd = [ + '-O', + 'forward', # Send SSH mux control signal. + '-R', + '0:localhost:%d' % host_port, + '-v', # Get forwarded port info from stderr. + '-NT' # Don't execute command; don't allocate terminal. + ] + forward_proc = subprocess.run(ssh_prefix + forward_cmd, + capture_output=True, + check=False, + text=True) + if forward_proc.returncode != 0: + raise Exception( + 'Got an error code when requesting port forwarding: %d' % + forward_proc.returncode) + + output = forward_proc.stdout + parsed_port = int(output.splitlines()[0].strip()) + logging.debug('Port forwarding established (local=%d, device=%d)', + host_port, parsed_port) + return parsed_port + + +# Disable pylint errors since the subclass is not from this directory. +# pylint: disable=invalid-name,missing-function-docstring +class SSHPortForwarder(chrome_test_server_spawner.PortForwarder): + """Implementation of chrome_test_server_spawner.PortForwarder that uses + SSH's remote port forwarding feature to forward ports.""" + + def __init__(self, host_port_pair: str) -> None: + self._host_port_pair = host_port_pair + + # Maps the host (server) port to the device port number. + self._port_mapping = {} + + def Map(self, port_pairs: List[Tuple[int, int]]) -> None: + for p in port_pairs: + _, host_port = p + self._port_mapping[host_port] = \ + port_forward(self._host_port_pair, host_port) + + def GetDevicePortForHostPort(self, host_port: int) -> int: + return self._port_mapping[host_port] + + def Unmap(self, device_port: int) -> None: + for host_port, entry in self._port_mapping.items(): + if entry == device_port: + ssh_prefix = get_ssh_prefix(self._host_port_pair) + unmap_cmd = [ + '-NT', '-O', 'cancel', '-R', + '0:localhost:%d' % host_port + ] + ssh_proc = subprocess.run(ssh_prefix + unmap_cmd, check=False) + if ssh_proc.returncode != 0: + raise Exception('Error %d when unmapping port %d' % + (ssh_proc.returncode, device_port)) + del self._port_mapping[host_port] + return + + raise Exception('Unmap called for unknown port: %d' % device_port) + + +# pylint: enable=invalid-name,missing-function-docstring + + +def setup_test_server(target_id: Optional[str], test_concurrency: int)\ + -> Tuple[chrome_test_server_spawner.SpawningServer, str]: + """Provisions a test server and configures |target_id| to use it. + + Args: + target_id: The target to which port forwarding to the test server will + be established. + test_concurrency: The number of parallel test jobs that will be run. + + Returns a tuple of a SpawningServer object and the local url to use on + |target_id| to reach the test server.""" + + logging.debug('Starting test server.') + + host_port_pair = run_ffx_command(('target', 'get-ssh-address'), + target_id, + capture_output=True).stdout.strip() + + # The TestLauncher can launch more jobs than the limit specified with + # --test-launcher-jobs so the max number of spawned test servers is set to + # twice that limit here. See https://crbug.com/913156#c19. + spawning_server = chrome_test_server_spawner.SpawningServer( + 0, SSHPortForwarder(host_port_pair), test_concurrency * 2) + + forwarded_port = port_forward(host_port_pair, spawning_server.server_port) + spawning_server.Start() + + logging.debug('Test server listening for connections (port=%d)', + spawning_server.server_port) + logging.debug('Forwarded port is %d', forwarded_port) + + return (spawning_server, 'http://localhost:%d' % forwarded_port) diff --git a/fuchsia/test/test_server_unittests.py b/fuchsia/test/test_server_unittests.py new file mode 100755 index 000000000000..f601884956e1 --- /dev/null +++ b/fuchsia/test/test_server_unittests.py @@ -0,0 +1,84 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""File for testing test_server.py.""" + +import unittest +import unittest.mock as mock + +import test_server + +_HOST_PORT = 44444 +_HOST_PORT_PAIR = '127.0.0.1:33333' +_SERVER_PORT = 55555 + + +class TestServerTest(unittest.TestCase): + """Unittests for test_server.py.""" + + def setUp(self) -> None: + self._subprocess_patcher = mock.patch('test_server.subprocess.run') + self._log_patcher = mock.patch('test_server.logging.debug') + self._subprocess_mock = self._subprocess_patcher.start() + self._log_mock = self._log_patcher.start() + self.addCleanup(self._log_mock.stop) + self.addCleanup(self._subprocess_mock.stop) + + def test_ssh_port_forwarder(self) -> None: + """Test SSHPortForwarder.""" + + port_pair = (_HOST_PORT, _SERVER_PORT) + cmd_mock = mock.Mock() + cmd_mock.returncode = 0 + cmd_mock.stdout = str(port_pair[0]) + self._subprocess_mock.return_value = cmd_mock + + forwarder = test_server.SSHPortForwarder(_HOST_PORT_PAIR) + + # Unmap should raise an exception if no ports are mapped. + with self.assertRaises(Exception): + forwarder.Unmap(port_pair[0]) + + forwarder.Map([port_pair]) + self.assertEqual(self._subprocess_mock.call_count, 2) + self.assertEqual(forwarder.GetDevicePortForHostPort(port_pair[1]), + port_pair[0]) + + # Unmap should also raise an exception if the unmap command fails. + self._subprocess_mock.reset_mock() + cmd_mock.returncode = 1 + with self.assertRaises(Exception): + forwarder.Unmap(port_pair[0]) + self.assertEqual(self._subprocess_mock.call_count, 1) + + self._subprocess_mock.reset_mock() + cmd_mock.returncode = 0 + forwarder.Unmap(port_pair[0]) + self.assertEqual(self._subprocess_mock.call_count, 1) + + def test_port_forward_exception(self) -> None: + """Tests that exception is raised if |port_forward| command fails.""" + + cmd_mock = mock.Mock() + cmd_mock.returncode = 1 + self._subprocess_mock.return_value = cmd_mock + with self.assertRaises(Exception): + test_server.port_forward(_HOST_PORT_PAIR, _HOST_PORT) + + @mock.patch('test_server.chrome_test_server_spawner.SpawningServer') + @mock.patch('test_server.port_forward') + def test_setup_test_server(self, forward_mock, server_mock) -> None: + """Test |setup_test_server|.""" + + forward_mock.return_value = _HOST_PORT + server = test_server.chrome_test_server_spawner.SpawningServer + server.Start = mock.Mock() + server_mock.return_value = server + with mock.patch('test_server.run_ffx_command'): + _, url = test_server.setup_test_server(_HOST_PORT_PAIR, 4) + self.assertTrue(str(_HOST_PORT) in url) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/update_images.py b/fuchsia/update_images.py new file mode 100755 index 000000000000..5251f98e4489 --- /dev/null +++ b/fuchsia/update_images.py @@ -0,0 +1,266 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Updates the Fuchsia images to the given revision. Should be used in a +'hooks_os' entry so that it only runs when .gclient's target_os includes +'fuchsia'.""" + +import argparse +import itertools +import logging +import os +import re +import subprocess +import sys +from typing import Dict, Optional + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +from common import DIR_SRC_ROOT, IMAGES_ROOT, get_host_os, \ + make_clean_directory + +from gcs_download import DownloadAndUnpackFromCloudStorage + +from update_sdk import GetSDKOverrideGCSPath + +IMAGE_SIGNATURE_FILE = '.hash' + + +# TODO(crbug.com/1138433): Investigate whether we can deprecate +# use of sdk_bucket.txt. +def GetOverrideCloudStorageBucket(): + """Read bucket entry from sdk_bucket.txt""" + return ReadFile('sdk-bucket.txt').strip() + + +def ReadFile(filename): + """Read a file in this directory.""" + with open(os.path.join(os.path.dirname(__file__), filename), 'r') as f: + return f.read() + + +def StrExpansion(): + return lambda str_value: str_value + + +def VarLookup(local_scope): + return lambda var_name: local_scope['vars'][var_name] + + +def GetImageHashList(bucket): + """Read filename entries from sdk-hash-files.list (one per line), substitute + {platform} in each entry if present, and read from each filename.""" + assert (get_host_os() == 'linux') + filenames = [ + line.strip() for line in ReadFile('sdk-hash-files.list').replace( + '{platform}', 'linux_internal').splitlines() + ] + image_hashes = [ReadFile(filename).strip() for filename in filenames] + return image_hashes + + +def ParseDepsDict(deps_content): + local_scope = {} + global_scope = { + 'Str': StrExpansion(), + 'Var': VarLookup(local_scope), + 'deps_os': {}, + } + exec(deps_content, global_scope, local_scope) + return local_scope + + +def ParseDepsFile(filename): + with open(filename, 'rb') as f: + deps_content = f.read() + return ParseDepsDict(deps_content) + + +def GetImageHash(bucket): + """Gets the hash identifier of the newest generation of images.""" + if bucket == 'fuchsia-sdk': + hashes = GetImageHashList(bucket) + return max(hashes) + deps_file = os.path.join(DIR_SRC_ROOT, 'DEPS') + return ParseDepsFile(deps_file)['vars']['fuchsia_version'].split(':')[1] + + +def GetImageSignature(image_hash, boot_images): + return 'gn:{image_hash}:{boot_images}:'.format(image_hash=image_hash, + boot_images=boot_images) + + +def GetAllImages(boot_image_names): + if not boot_image_names: + return + + all_device_types = ['generic', 'qemu'] + all_archs = ['x64', 'arm64'] + + images_to_download = set() + + for boot_image in boot_image_names.split(','): + components = boot_image.split('.') + if len(components) != 2: + continue + + device_type, arch = components + device_images = all_device_types if device_type == '*' else [device_type] + arch_images = all_archs if arch == '*' else [arch] + images_to_download.update(itertools.product(device_images, arch_images)) + return images_to_download + + +def DownloadBootImages(bucket, image_hash, boot_image_names, image_root_dir): + images_to_download = GetAllImages(boot_image_names) + for image_to_download in images_to_download: + device_type = image_to_download[0] + arch = image_to_download[1] + image_output_dir = os.path.join(image_root_dir, arch, device_type) + if os.path.exists(image_output_dir): + continue + + logging.info('Downloading Fuchsia boot images for %s.%s...', device_type, + arch) + + # Legacy images use different naming conventions. See fxbug.dev/85552. + legacy_delimiter_device_types = ['qemu', 'generic'] + if bucket == 'fuchsia-sdk' or \ + device_type not in legacy_delimiter_device_types: + type_arch_connector = '.' + else: + type_arch_connector = '-' + + images_tarball_url = 'gs://{bucket}/development/{image_hash}/images/'\ + '{device_type}{type_arch_connector}{arch}.tgz'.format( + bucket=bucket, image_hash=image_hash, device_type=device_type, + type_arch_connector=type_arch_connector, arch=arch) + try: + DownloadAndUnpackFromCloudStorage(images_tarball_url, image_output_dir) + except subprocess.CalledProcessError as e: + logging.exception('Failed to download image %s from URL: %s', + image_to_download, images_tarball_url) + raise e + + +def _GetImageOverrideInfo() -> Optional[Dict[str, str]]: + """Get the bucket location from sdk_override.txt.""" + location = GetSDKOverrideGCSPath() + if not location: + return None + + m = re.match(r'gs://([^/]+)/development/([^/]+)/?(?:sdk)?', location) + if not m: + raise ValueError('Badly formatted image override location %s' % location) + + return { + 'bucket': m.group(1), + 'image_hash': m.group(2), + } + + +def GetImageLocationInfo(default_bucket: str, + allow_override: bool = True) -> Dict[str, str]: + """Figures out where to pull the image from. + + Defaults to the provided default bucket and generates the hash from defaults. + If sdk_override.txt exists (and is allowed) it uses that bucket instead. + + Args: + default_bucket: a given default for what bucket to use + allow_override: allow SDK override to be used. + + Returns: + A dictionary containing the bucket and image_hash + """ + # if sdk_override.txt exists (and is allowed) use the image from that bucket. + if allow_override: + override = _GetImageOverrideInfo() + if override: + return override + + # Use the bucket in sdk-bucket.txt if an entry exists. + # Otherwise use the default bucket. + bucket = GetOverrideCloudStorageBucket() or default_bucket + return { + 'bucket': bucket, + 'image_hash': GetImageHash(bucket), + } + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable debug-level logging.') + parser.add_argument( + '--boot-images', + type=str, + required=True, + help='List of boot images to download, represented as a comma separated ' + 'list. Wildcards are allowed. ') + parser.add_argument( + '--default-bucket', + type=str, + default='fuchsia', + help='The Google Cloud Storage bucket in which the Fuchsia images are ' + 'stored. Entry in sdk-bucket.txt will override this flag.') + parser.add_argument( + '--image-root-dir', + default=IMAGES_ROOT, + help='Specify the root directory of the downloaded images. Optional') + parser.add_argument( + '--allow-override', + default=True, + type=bool, + help='Whether sdk_override.txt can be used for fetching the image, if ' + 'it exists.') + args = parser.parse_args() + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + + # If no boot images need to be downloaded, exit. + if not args.boot_images: + return 0 + + # Check whether there's Fuchsia support for this platform. + get_host_os() + + image_info = GetImageLocationInfo(args.default_bucket, args.allow_override) + + bucket = image_info['bucket'] + image_hash = image_info['image_hash'] + + if not image_hash: + return 1 + + signature_filename = os.path.join(args.image_root_dir, IMAGE_SIGNATURE_FILE) + current_signature = (open(signature_filename, 'r').read().strip() + if os.path.exists(signature_filename) else '') + new_signature = GetImageSignature(image_hash, args.boot_images) + if current_signature != new_signature: + logging.info('Downloading Fuchsia images %s from bucket %s...', image_hash, + bucket) + make_clean_directory(args.image_root_dir) + + try: + DownloadBootImages(bucket, image_hash, args.boot_images, + args.image_root_dir) + with open(signature_filename, 'w') as f: + f.write(new_signature) + except subprocess.CalledProcessError as e: + logging.exception("command '%s' failed with status %d.%s", + ' '.join(e.cmd), e.returncode, + ' Details: ' + e.output if e.output else '') + raise e + else: + logging.info('Signatures matched! Got %s', new_signature) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/update_images_test.py b/fuchsia/update_images_test.py new file mode 100755 index 000000000000..f5be774cd316 --- /dev/null +++ b/fuchsia/update_images_test.py @@ -0,0 +1,97 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest +from unittest import mock + +from parameterized import parameterized + +from update_images import _GetImageOverrideInfo +from update_images import GetImageLocationInfo + + +@mock.patch('update_images.GetSDKOverrideGCSPath') +class TestGetImageOverrideInfo(unittest.TestCase): + def testLocationIsNone(self, mock_sdk_loc): + mock_sdk_loc.return_value = None + + actual = _GetImageOverrideInfo() + self.assertIsNone(actual) + + def testBadLocationStr(self, mock_sdk_loc): + mock_sdk_loc.return_value = 'bad-format-string' + + with self.assertRaises(Exception): + _GetImageOverrideInfo() + + @parameterized.expand([ + ('gs://my-bucket/development/my-hash/sdk', { + 'bucket': 'my-bucket', + 'image_hash': 'my-hash' + }), + ('gs://my-bucket/development/my-hash', { + 'bucket': 'my-bucket', + 'image_hash': 'my-hash' + }), + ('gs://my-bucket/development/my-hash/', { + 'bucket': 'my-bucket', + 'image_hash': 'my-hash' + }), + ]) + def testValidLocation(self, mock_sdk_loc, in_path, expected): + mock_sdk_loc.return_value = in_path + + actual = _GetImageOverrideInfo() + self.assertEqual(actual, expected) + + +@mock.patch('update_images.GetImageHash') +@mock.patch('update_images.GetOverrideCloudStorageBucket') +@mock.patch('update_images._GetImageOverrideInfo') +class TestGetImageLocationInfo(unittest.TestCase): + def testNoOverride(self, mock_image_override, mock_override_bucket, + mock_image_hash): + mock_image_override.return_value = None + mock_override_bucket.return_value = None + mock_image_hash.return_value = 'image-hash' + + actual = GetImageLocationInfo('my-bucket') + self.assertEqual(actual, { + 'bucket': 'my-bucket', + 'image_hash': 'image-hash', + }) + + def testOverride(self, mock_image_override, mock_override_bucket, + mock_image_hash): + override_info = { + 'bucket': 'override-bucket', + 'image_hash': 'override-hash', + } + mock_image_override.return_value = override_info + mock_override_bucket.return_value = None + mock_image_hash.return_value = 'image-hash' + + actual = GetImageLocationInfo('my-bucket') + self.assertEqual(actual, override_info) + + def testNoAllowOverride(self, mock_image_override, mock_override_bucket, + mock_image_hash): + override_info = { + 'bucket': 'override-bucket', + 'image_hash': 'override-hash', + } + mock_image_override.return_value = override_info + mock_override_bucket.return_value = None + mock_image_hash.return_value = 'image-hash' + + actual = GetImageLocationInfo('my-bucket', allow_override=False) + self.assertEqual(actual, { + 'bucket': 'my-bucket', + 'image_hash': 'image-hash', + }) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/update_product_bundles.py b/fuchsia/update_product_bundles.py new file mode 100755 index 000000000000..79ad3970964e --- /dev/null +++ b/fuchsia/update_product_bundles.py @@ -0,0 +1,359 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Updates the Fuchsia product bundles to the given revision. Should be used +in a 'hooks_os' entry so that it only runs when .gclient's target_os includes +'fuchsia'.""" + +import argparse +import json +import logging +import os +import re +import subprocess +import sys + +from contextlib import ExitStack + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +import common +import ffx_integration + +_PRODUCT_BUNDLES = [ + 'core.x64-dfv2', + 'terminal.qemu-arm64', + 'terminal.qemu-x64', + 'workstation_eng.chromebook-x64', + 'workstation_eng.chromebook-x64-dfv2', + 'workstation_eng.qemu-x64', + 'workstation_eng.x64', +] + +# TODO(crbug/1361089): Remove when the old scripts have been deprecated. +_IMAGE_TO_PRODUCT_BUNDLE = { + 'core.x64-dfv2-release': 'core.x64-dfv2', + 'qemu.arm64': 'terminal.qemu-arm64', + 'qemu.x64': 'terminal.qemu-x64', + 'workstation_eng.chromebook-x64-dfv2-release': + 'workstation_eng.chromebook-x64-dfv2', + 'workstation_eng.chromebook-x64-release': 'workstation_eng.chromebook-x64', + 'workstation_eng.qemu-x64-release': 'workstation_eng.qemu-x64', +} + + +_PRODUCT_BUNDLE_FIX_INSTRUCTIONS = ( + 'This could be because an earlier version of the product bundle was not ' + 'properly removed. Run |ffx product-bundle list| and |ffx repository list|,' + ' remove the available product bundles listed using ' + '|ffx product-bundle remove| and |ffx repository remove|, ' + f'remove the directory {common.IMAGES_ROOT} and rerun hooks/this script.') + + +# TODO(crbug/1361089): Remove when the old scripts have been deprecated. +def convert_to_product_bundle(images_list): + """Convert image names in the SDK to product bundle names.""" + + product_bundle_list = [] + for image in images_list: + if image in _IMAGE_TO_PRODUCT_BUNDLE: + logging.warning(f'Image name {image} has been deprecated. Use ' + f'{_IMAGE_TO_PRODUCT_BUNDLE.get(image)} instead.') + product_bundle_list.append(_IMAGE_TO_PRODUCT_BUNDLE.get(image, image)) + return product_bundle_list + + +def get_hash_from_sdk(): + """Retrieve version info from the SDK.""" + + version_file = os.path.join(common.SDK_ROOT, 'meta', 'manifest.json') + if not os.path.exists(version_file): + raise RuntimeError('Could not detect version file. Make sure the SDK has ' + 'been downloaded') + with open(version_file, 'r') as f: + return json.load(f)['id'] + + +def remove_repositories(repo_names_to_remove): + """Removes given repos from repo list. + Repo MUST be present in list to succeed. + + Args: + repo_names_to_remove: List of repo names (as strings) to remove. + """ + for repo_name in repo_names_to_remove: + common.run_ffx_command(('repository', 'remove', repo_name), check=True) + + +def get_repositories(): + """Lists repositories that are available on disk. + + Also prunes repositories that are listed, but do not have an actual packages + directory. + + Returns: + List of dictionaries containing info about the repositories. They have the + following structure: + { + 'name': , + 'spec': { + 'type': , + 'path': + }, + } + """ + + repos = json.loads( + common.run_ffx_command(('--machine', 'json', 'repository', 'list'), + check=True, + capture_output=True).stdout.strip()) + to_prune = set() + sdk_root_abspath = os.path.abspath(os.path.dirname(common.SDK_ROOT)) + for repo in repos: + # Confirm the path actually exists. If not, prune list. + # Also assert the product-bundle repository is for the current repo + # (IE within the same directory). + if not os.path.exists(repo['spec']['path']): + to_prune.add(repo['name']) + + if not repo['spec']['path'].startswith(sdk_root_abspath): + to_prune.add(repo['name']) + + repos = [repo for repo in repos if repo['name'] not in to_prune] + + remove_repositories(to_prune) + return repos + + +def update_repositories_list(): + """Used to prune stale repositories.""" + get_repositories() + + +def remove_product_bundle(product_bundle): + """Removes product-bundle given.""" + common.run_ffx_command(('product-bundle', 'remove', '-f', product_bundle)) + + +def get_product_bundle_urls(): + """Retrieves URLs of available product-bundles. + + Returns: + List of dictionaries of structure, indicating whether the product-bundle + has been downloaded. + { + 'url': , + 'downloaded': + } + """ + # TODO(fxb/115328): Replaces with JSON API when available. + bundles = common.run_ffx_command(('product-bundle', 'list'), + capture_output=True).stdout.strip() + urls = [ + line.strip() for line in bundles.splitlines() if 'gs://fuchsia' in line + ] + structured_urls = [] + for url in urls: + downloaded = False + if '*' in url: + downloaded = True + url = url.split(' ')[1] + structured_urls.append({'downloaded': downloaded, 'url': url.strip()}) + return structured_urls + + +def keep_product_bundles_by_sdk_version(sdk_version): + """Prunes product bundles not containing the sdk_version given.""" + urls = get_product_bundle_urls() + for url in urls: + if url['downloaded'] and sdk_version not in url['url']: + remove_product_bundle(url['url']) + + +def get_product_bundles(): + """Lists all downloaded product-bundles for the given SDK. + + Cross-references the repositories with downloaded packages and the stated + downloaded product-bundles to validate whether or not a product-bundle is + present. Prunes invalid product-bundles with each call as well. + + Returns: + List of strings of product-bundle names downloaded and that FFX is aware + of. + """ + downloaded_bundles = [] + + for url in get_product_bundle_urls(): + if url['downloaded']: + # The product is separated by a # + product = url['url'].split('#') + downloaded_bundles.append(product[1]) + + repos = get_repositories() + + # Some repo names do not match product-bundle names due to underscores. + # Normalize them both. + repo_names = set([repo['name'].replace('-', '_') for repo in repos]) + + def bundle_is_active(name): + # Returns True if the product-bundle named `name` is present in a package + # repository (assuming it is downloaded already); otherwise, removes the + # product-bundle and returns False. + if name.replace('-', '_') in repo_names: + return True + + remove_product_bundle(name) + return False + + return list(filter(bundle_is_active, downloaded_bundles)) + + +def download_product_bundle(product_bundle, download_config): + """Download product bundles using the SDK.""" + # This also updates the repository list, in case it is stale. + update_repositories_list() + + try: + common.run_ffx_command( + ('product-bundle', 'get', product_bundle, '--force-repo'), + configs=download_config) + except subprocess.CalledProcessError as cpe: + logging.error('Product bundle download has failed. ' + + _PRODUCT_BUNDLE_FIX_INSTRUCTIONS) + raise + + +def get_current_signature(): + """Determines the SDK version of the product-bundles associated with the SDK. + + Parses this information from the URLs of the product-bundle. + + Returns: + An SDK version string, or None if no product-bundle versions are downloaded. + """ + product_bundles = get_product_bundles() + if not product_bundles: + logging.info('No product bundles - signature will default to None') + return None + product_bundle_urls = get_product_bundle_urls() + + # Get the numbers, hope they're the same. + signatures = set() + for bundle in product_bundle_urls: + m = re.search(r'/(\d+\.\d+\.\d+.\d+|\d+)/', bundle['url']) + assert m, 'Must have a signature in each URL' + signatures.add(m.group(1)) + + if len(signatures) > 1: + raise RuntimeError('Found more than one product signature. ' + + _PRODUCT_BUNDLE_FIX_INSTRUCTIONS) + + return next(iter(signatures)) if signatures else None + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable debug-level logging.') + parser.add_argument( + 'product_bundles', + type=str, + help='List of product bundles to download, represented as a comma ' + 'separated list.') + args = parser.parse_args() + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + + # Check whether there's Fuchsia support for this platform. + common.get_host_os() + + new_product_bundles = convert_to_product_bundle( + args.product_bundles.split(',')) + logging.info('Searching for the following product bundles: %s', + str(new_product_bundles)) + for pb in new_product_bundles: + if pb not in _PRODUCT_BUNDLES: + raise ValueError(f'{pb} is not part of the Fuchsia product bundle.') + + if '*' in args.product_bundles: + raise ValueError('Wildcards are no longer supported, all product bundles ' + 'need to be explicitly listed. The full list can be ' + 'found in the DEPS file.') + + with ExitStack() as stack: + + # Re-set the directory to which product bundles are downloaded so that + # these bundles are located inside the Chromium codebase. + common.run_ffx_command( + ('config', 'set', 'pbms.storage.path', common.IMAGES_ROOT)) + + logging.debug('Checking for override file') + + # TODO(crbug/1380807): Remove when product bundles can be downloaded + # for custom SDKs without editing metadata + override_file = os.path.join(os.path.dirname(__file__), 'sdk_override.txt') + pb_metadata = None + if os.path.isfile(override_file): + with open(override_file) as f: + pb_metadata = f.read().strip().split('\n') + pb_metadata.append('{sdk.root}/*.json') + logging.debug('Applied overrides') + + logging.debug('Getting new SDK hash') + new_sdk_hash = get_hash_from_sdk() + keep_product_bundles_by_sdk_version(new_sdk_hash) + logging.debug('Checking for current signature') + curr_signature = get_current_signature() + + current_images = get_product_bundles() + + # If SDK versions match, remove the product bundles that are no longer + # needed and download missing ones. + if curr_signature == new_sdk_hash: + logging.debug('Current images: %s, desired images %s', + str(current_images), str(new_product_bundles)) + for image in current_images: + if image not in new_product_bundles: + logging.debug('Removing no longer needed Fuchsia image %s' % image) + remove_product_bundle(image) + + bundles_to_download = set(new_product_bundles) - \ + set(current_images) + for bundle in bundles_to_download: + logging.debug('Downloading image: %s', image) + download_product_bundle(bundle) + + return 0 + + # If SDK versions do not match, remove all existing product bundles + # and download the ones required. + for pb in current_images: + remove_product_bundle(pb) + + logging.debug('Make clean images root') + common.make_clean_directory(common.IMAGES_ROOT) + + download_config = None + if pb_metadata: + download_config = [ + '{"pbms":{"metadata": %s}}' % json.dumps((pb_metadata)) + ] + for pb in new_product_bundles: + logging.debug('Downloading bundle: %s', pb) + download_product_bundle(pb, download_config) + + current_pb = get_product_bundles() + + assert set(current_pb) == set(new_product_bundles), ( + 'Failed to download expected set of product-bundles. ' + f'Expected {new_product_bundles}, got {current_pb}') + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/update_product_bundles_test.py b/fuchsia/update_product_bundles_test.py new file mode 100755 index 000000000000..0ffc20cce916 --- /dev/null +++ b/fuchsia/update_product_bundles_test.py @@ -0,0 +1,288 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import io +import json +import os +import sys +import unittest +from unittest import mock + +from parameterized import parameterized + +import update_product_bundles + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +import common + + +class TestUpdateProductBundles(unittest.TestCase): + def setUp(self): + ffx_mock = mock.Mock() + ffx_mock.returncode = 0 + self._ffx_patcher = mock.patch('common.run_ffx_command', + return_value=ffx_mock) + self._ffx_mock = self._ffx_patcher.start() + self.addCleanup(self._ffx_mock.stop) + + def testConvertToProductBundleDefaultsUnknownImage(self): + self.assertEqual( + update_product_bundles.convert_to_product_bundle(['unknown-image']), + ['unknown-image']) + + def testConvertToProductBundleWarnsDeprecated(self): + with self.assertLogs(level='WARNING') as logs: + deprecated_images = [ + 'qemu.arm64', 'qemu.x64', 'core.x64-dfv2-release', + 'workstation_eng.chromebook-x64-release' + ] + self.assertEqual( + update_product_bundles.convert_to_product_bundle(deprecated_images), [ + 'terminal.qemu-arm64', 'terminal.qemu-x64', 'core.x64-dfv2', + 'workstation_eng.chromebook-x64' + ]) + for i, deprecated_image in enumerate(deprecated_images): + self.assertIn(f'Image name {deprecated_image} has been deprecated', + logs.output[i]) + + @mock.patch('builtins.open') + @mock.patch('os.path.exists') + def testGetHashFromSDK(self, mock_exists, mock_open): + mock_open.return_value = io.StringIO(json.dumps({'id': 'foo-bar'})) + mock_exists.return_value = True + + self.assertEqual(update_product_bundles.get_hash_from_sdk(), 'foo-bar') + + manifest_file = os.path.join(common.SDK_ROOT, 'meta', 'manifest.json') + mock_exists.assert_called_once_with(manifest_file) + mock_open.assert_called_once_with(manifest_file, 'r') + + @mock.patch('builtins.open') + @mock.patch('os.path.exists') + def testGetHashFromSDKRaisesErrorIfNoManifestExists(self, mock_exists, + mock_open): + mock_exists.return_value = False + + self.assertRaises(RuntimeError, update_product_bundles.get_hash_from_sdk) + + @mock.patch('common.run_ffx_command') + def testRemoveRepositoriesRunsRemoveOnGivenRepos(self, ffx_mock): + update_product_bundles.remove_repositories(['foo', 'bar', 'fizz', 'buzz']) + + ffx_mock.assert_has_calls([ + mock.call(('repository', 'remove', 'foo'), check=True), + mock.call(('repository', 'remove', 'bar'), check=True), + mock.call(('repository', 'remove', 'fizz'), check=True), + mock.call(('repository', 'remove', 'buzz'), check=True), + ]) + + @mock.patch('os.path.exists') + @mock.patch('os.path.abspath') + def testGetRepositoriesPrunesReposThatDoNotExist(self, mock_abspath, + mock_exists): + with mock.patch('common.SDK_ROOT', 'some/path'): + self._ffx_mock.return_value.stdout = json.dumps([{ + "name": "terminal.qemu-x64", + "spec": { + "type": "pm", + "path": "some/path/that/exists" + } + }, { + "name": "workstation-eng.chromebook-x64", + "spec": { + "type": "pm", + "path": "some/path/that/does/not/exist" + } + }]) + mock_exists.side_effect = [True, False] + mock_abspath.side_effect = lambda x: x + + self.assertEqual(update_product_bundles.get_repositories(), [{ + "name": "terminal.qemu-x64", + "spec": { + "type": "pm", + "path": "some/path/that/exists" + } + }]) + + self._ffx_mock.assert_has_calls([ + mock.call(('--machine', 'json', 'repository', 'list'), + capture_output=True, + check=True), + mock.call(('repository', 'remove', 'workstation-eng.chromebook-x64'), + check=True) + ]) + + def testRemoveProductBundle(self): + update_product_bundles.remove_product_bundle('some-bundle-foo-bar') + + self._ffx_mock.assert_called_once_with( + ('product-bundle', 'remove', '-f', 'some-bundle-foo-bar')) + + def _InitFFXRunWithProductBundleList(self, sdk_version='10.20221114.2.1'): + self._ffx_mock.return_value.stdout = f""" + gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.qemu-x64 + gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.chromebook-x64-dfv2 +* gs://fuchsia/{sdk_version}/bundles.json#workstation_eng.chromebook-x64 +* gs://fuchsia/{sdk_version}/bundles.json#terminal.qemu-x64 + gs://fuchsia/{sdk_version}/bundles.json#terminal.qemu-arm64 +* gs://fuchsia/{sdk_version}/bundles.json#core.x64-dfv2 + +*No need to fetch with `ffx product-bundle get ...`. + """ + + def testGetProductBundleUrlsMarksDesiredAsDownloaded(self): + self._InitFFXRunWithProductBundleList() + urls = update_product_bundles.get_product_bundle_urls() + expected_urls = [{ + 'url': + 'gs://fuchsia/10.20221114.2.1/bundles.json#workstation_eng.qemu-x64', + 'downloaded': False, + }, { + 'url': ('gs://fuchsia/10.20221114.2.1/bundles.json#workstation_eng.' + 'chromebook-x64-dfv2'), + 'downloaded': + False, + }, { + 'url': ('gs://fuchsia/10.20221114.2.1/bundles.json#workstation_eng.' + 'chromebook-x64'), + 'downloaded': + True, + }, { + 'url': 'gs://fuchsia/10.20221114.2.1/bundles.json#terminal.qemu-x64', + 'downloaded': True, + }, { + 'url': 'gs://fuchsia/10.20221114.2.1/bundles.json#terminal.qemu-arm64', + 'downloaded': False, + }, { + 'url': 'gs://fuchsia/10.20221114.2.1/bundles.json#core.x64-dfv2', + 'downloaded': True, + }] + + for i, url in enumerate(urls): + self.assertEqual(url, expected_urls[i]) + + @mock.patch('update_product_bundles.get_repositories') + def testGetProductBundlesExtractsProductBundlesFromURLs(self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-x64' + }, { + 'name': 'core.x64-dfv2' + }] + + self.assertEqual( + set(update_product_bundles.get_product_bundles()), + set([ + 'workstation_eng.chromebook-x64', + 'terminal.qemu-x64', + 'core.x64-dfv2', + ])) + + @mock.patch('update_product_bundles.get_repositories') + def testGetProductBundlesExtractsProductBundlesFromURLsFiltersMissingRepos( + self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + + # This will be missing two repos from the bundle list: + # core and terminal.qemu-x64 + # Additionally, workstation-eng != workstation_eng, but they will be treated + # as the same product-bundle + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-arm64' + }] + + self.assertEqual(update_product_bundles.get_product_bundles(), + ['workstation_eng.chromebook-x64']) + self._ffx_mock.assert_has_calls([ + mock.call(('product-bundle', 'remove', '-f', 'terminal.qemu-x64')), + mock.call(('product-bundle', 'remove', '-f', 'core.x64-dfv2')), + ], + any_order=True) + + @mock.patch('common.run_ffx_command') + @mock.patch('update_product_bundles.update_repositories_list') + def testDownloadProductBundleUpdatesRepoListBeforeCall( + self, mock_update_repo, mock_ffx): + mock_sequence = mock.Mock() + mock_sequence.attach_mock(mock_update_repo, 'update_repo_list') + mock_sequence.attach_mock(mock_ffx, 'run_ffx_command') + + update_product_bundles.download_product_bundle('some-bundle', None) + + mock_sequence.assert_has_calls([ + mock.call.update_repo_list(), + mock.call.run_ffx_command( + ('product-bundle', 'get', 'some-bundle', '--force-repo'), + configs=None) + ]) + + @mock.patch('common.run_ffx_command') + @mock.patch('update_product_bundles.get_product_bundle_urls') + def testFilterProductBundleURLsRemovesBundlesWithoutGivenString( + self, mock_get_urls, mock_ffx): + mock_get_urls.return_value = [ + { + 'url': 'some-url-has-buzz', + 'downloaded': True, + }, + { + 'url': 'some-url-to-remove-has-foo', + 'downloaded': True, + }, + { + 'url': 'some-url-to-not-remove-has-foo', + 'downloaded': False, + }, + ] + update_product_bundles.keep_product_bundles_by_sdk_version('buzz') + mock_ffx.assert_called_once_with( + ('product-bundle', 'remove', '-f', 'some-url-to-remove-has-foo')) + + @mock.patch('update_product_bundles.get_repositories') + def testGetCurrentSignatureReturnsNoneIfNoProductBundles( + self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + + # Forces no product-bundles + mock_get_repos.return_value = [] + + # Mutes logs + with self.assertLogs(): + self.assertIsNone(update_product_bundles.get_current_signature()) + + @mock.patch('update_product_bundles.get_repositories') + def testGetCurrentSignatureParsesVersionCorrectly(self, mock_get_repos): + self._InitFFXRunWithProductBundleList() + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-x64' + }] + + self.assertEqual('10.20221114.2.1', + update_product_bundles.get_current_signature()) + + @mock.patch('update_product_bundles.get_repositories') + def testGetCurrentSignatureParsesCustomArtifactsCorrectlys( + self, mock_get_repos): + self._InitFFXRunWithProductBundleList(sdk_version='51390009') + mock_get_repos.return_value = [{ + 'name': 'workstation-eng.chromebook-x64' + }, { + 'name': 'terminal.qemu-x64' + }] + + self.assertEqual('51390009', update_product_bundles.get_current_signature()) + + +if __name__ == '__main__': + unittest.main() diff --git a/fuchsia/update_sdk.py b/fuchsia/update_sdk.py new file mode 100755 index 000000000000..2b30a9c3c0a9 --- /dev/null +++ b/fuchsia/update_sdk.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Check out the Fuchsia SDK from a given GCS path. Should be used in a +'hooks_os' entry so that it only runs when .gclient's custom_vars includes +'fuchsia'.""" + +import argparse +import logging +import os +import platform +import subprocess +import sys +from typing import Optional + +from gcs_download import DownloadAndUnpackFromCloudStorage + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + 'test'))) + +from common import SDK_ROOT, get_host_arch, get_host_os, make_clean_directory + + +def _GetHostArch(): + host_arch = platform.machine() + # platform.machine() returns AMD64 on 64-bit Windows. + if host_arch in ['x86_64', 'AMD64']: + return 'amd64' + elif host_arch == 'aarch64': + return 'arm64' + raise Exception('Unsupported host architecture: %s' % host_arch) + + +def GetSDKOverrideGCSPath(path: Optional[str] = None) -> Optional[str]: + """Fetches the sdk override path from a file. + + Args: + path: the full file path to read the data from. + defaults to sdk_override.txt in the directory of this file. + + Returns: + The contents of the file, stripped of white space. + Example: gs://fuchsia-artifacts/development/some-id/sdk + """ + if not path: + path = os.path.join(os.path.dirname(__file__), 'sdk_override.txt') + + if not os.path.isfile(path): + return None + + with open(path, 'r') as f: + return f.read().strip() + + +def _GetTarballPath(gcs_tarball_prefix: str) -> str: + """Get the full path to the sdk tarball on GCS""" + platform = get_host_os() + arch = _GetHostArch() + return f'{gcs_tarball_prefix}/{platform}-{arch}/gn.tar.gz' + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--cipd-prefix', help='CIPD base directory for the SDK.') + parser.add_argument('--version', help='Specifies the SDK version.') + parser.add_argument('--verbose', + '-v', + action='store_true', + help='Enable debug-level logging.') + args = parser.parse_args() + + logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) + + # Exit if there's no SDK support for this platform. + try: + host_plat = get_host_os() + except: + logging.warning('Fuchsia SDK is not supported on this platform.') + return 0 + + gcs_tarball_prefix = GetSDKOverrideGCSPath() + + # Download from CIPD if there is no override file. + if not gcs_tarball_prefix: + if not args.cipd_prefix: + parser.exit(1, '--cipd-prefix must be specified.') + if not args.version: + parser.exit(2, '--version must be specified.') + logging.info('Downloading GN SDK from CIPD...') + ensure_file = '%s%s-%s %s' % (args.cipd_prefix, host_plat, _GetHostArch(), + args.version) + subprocess.run(('cipd', 'ensure', '-ensure-file', '-', '-root', SDK_ROOT, + '-log-level', 'warning'), + check=True, + text=True, + input=ensure_file) + return 0 + + # Always re-download the SDK. + logging.info('Downloading GN SDK from GCS...') + make_clean_directory(SDK_ROOT) + DownloadAndUnpackFromCloudStorage(_GetTarballPath(gcs_tarball_prefix), + SDK_ROOT) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/fuchsia/update_sdk_test.py b/fuchsia/update_sdk_test.py new file mode 100755 index 000000000000..5def6796d449 --- /dev/null +++ b/fuchsia/update_sdk_test.py @@ -0,0 +1,69 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest +from unittest import mock + +from parameterized import parameterized + +from update_sdk import _GetHostArch +from update_sdk import _GetTarballPath +from update_sdk import GetSDKOverrideGCSPath + + +@mock.patch('platform.machine') +class TestGetHostArch(unittest.TestCase): + @parameterized.expand([('x86_64', 'amd64'), ('AMD64', 'amd64'), + ('aarch64', 'arm64')]) + def testSupportedArchs(self, mock_machine, arch, expected): + mock_machine.return_value = arch + self.assertEqual(_GetHostArch(), expected) + + def testUnsupportedArch(self, mock_machine): + mock_machine.return_value = 'bad_arch' + with self.assertRaises(Exception): + _GetHostArch() + + +@mock.patch('builtins.open') +@mock.patch('os.path.isfile') +class TestGetSDKOverrideGCSPath(unittest.TestCase): + def testFileNotFound(self, mock_isfile, mock_open): + mock_isfile.return_value = False + + actual = GetSDKOverrideGCSPath('this-file-does-not-exist.txt') + self.assertIsNone(actual) + + def testDefaultPath(self, mock_isfile, mock_open): + mock_isfile.return_value = False + + with mock.patch('os.path.dirname', return_value='./'): + GetSDKOverrideGCSPath() + + mock_isfile.assert_called_with('./sdk_override.txt') + + def testRead(self, mock_isfile, mock_open): + fake_path = '\n\ngs://fuchsia-artifacts/development/abc123/sdk\n\n' + + mock_isfile.return_value = True + mock_open.side_effect = mock.mock_open(read_data=fake_path) + + actual = GetSDKOverrideGCSPath() + self.assertEqual(actual, 'gs://fuchsia-artifacts/development/abc123/sdk') + + +@mock.patch('update_sdk._GetHostArch') +@mock.patch('update_sdk.get_host_os') +class TestGetTarballPath(unittest.TestCase): + def testGetTarballPath(self, mock_get_host_os, mock_host_arch): + mock_get_host_os.return_value = 'linux' + mock_host_arch.return_value = 'amd64' + + actual = _GetTarballPath('gs://bucket/sdk') + self.assertEqual(actual, 'gs://bucket/sdk/linux-amd64/gn.tar.gz') + + +if __name__ == '__main__': + unittest.main() diff --git a/gdb-add-index b/gdb-add-index new file mode 100755 index 000000000000..e756ceacc62a --- /dev/null +++ b/gdb-add-index @@ -0,0 +1,184 @@ +#!/bin/bash +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Saves the gdb index for a given binary and its shared library dependencies. +# +# This will run gdb index in parallel on a number of binaries using SIGUSR1 +# as the communication mechanism to simulate a semaphore. Because of the +# nature of this technique, using "set -e" is very difficult. The SIGUSR1 +# terminates a "wait" with an error which we need to interpret. +# +# When modifying this code, most of the real logic is in the index_one_file +# function. The rest is cleanup + sempahore plumbing. + +function usage_exit { + echo "Usage: $0 [-f] [-r] [-n] ..." + echo " -f forces replacement of an existing index." + echo " -r removes the index section." + echo " -n don't extract the dependencies of each binary with lld." + echo " e.g., $0 -n out/Debug/lib.unstripped/lib*" + echo + echo " Set TOOLCHAIN_PREFIX to use a non-default set of binutils." + exit 1 +} + +# Cleanup temp directory and ensure all child jobs are dead-dead. +function on_exit { + trap "" EXIT USR1 # Avoid reentrancy. + + local jobs=$(jobs -p) + if [ -n "$jobs" ]; then + echo -n "Killing outstanding index jobs..." + kill -KILL $(jobs -p) + wait + echo "done" + fi + + if [ -d "$directory" ]; then + echo -n "Removing temp directory $directory..." + rm -rf "$directory" + echo done + fi +} + +# Add index to one binary. +function index_one_file { + local file=$1 + local basename=$(basename "$file") + local should_index_this_file="${should_index}" + + local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file") + if [[ $readelf_out =~ "gdb_index" ]]; then + if $remove_index; then + ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file" + echo "Removed index from $basename." + else + echo "Skipped $basename -- already contains index." + should_index_this_file=false + fi + fi + + if $should_index_this_file; then + local start=$(date +"%s%N") + echo "Adding index to $basename..." + + ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $directory" \ + -ex "quit" + local index_file="$directory/$basename.gdb-index" + if [ -f "$index_file" ]; then + ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \ + --set-section-flags .gdb_index=readonly "$file" "$file" + local finish=$(date +"%s%N") + local elapsed=$(((finish - start) / 1000000)) + echo " ...$basename indexed. [${elapsed}ms]" + else + echo " ...$basename unindexable." + fi + fi +} + +# Functions that when combined, concurrently index all files in FILES_TO_INDEX +# array. The global FILES_TO_INDEX is declared in the main body of the script. +function async_index { + # Start a background subshell to run the index command. + { + index_one_file $1 + kill -SIGUSR1 $$ # $$ resolves to the parent script. + exit 129 # See comment above wait loop at bottom. + } & +} + +cur_file_num=0 +function index_next { + if ((cur_file_num >= ${#files_to_index[@]})); then + return + fi + + async_index "${files_to_index[cur_file_num]}" + ((cur_file_num += 1)) || true +} + +######## +### Main body of the script. + +remove_index=false +should_index=true +should_index_deps=true +files_to_index=() +while (($# > 0)); do + case "$1" in + -h) + usage_exit + ;; + -f) + remove_index=true + ;; + -r) + remove_index=true + should_index=false + ;; + -n) + should_index_deps=false + ;; + -*) + echo "Invalid option: $1" >&2 + usage_exit + ;; + *) + if [[ ! -f "$1" ]]; then + echo "Path $1 does not exist." + exit 1 + fi + files_to_index+=("$1") + ;; + esac + shift +done + +if ((${#files_to_index[@]} == 0)); then + usage_exit +fi + +dependencies=() +if $should_index_deps; then + for file in "${files_to_index[@]}"; do + # Append the shared library dependencies of this file that + # have the same dirname. The dirname is a signal that these + # shared libraries were part of the same build as the binary. + dependencies+=( \ + $(ldd "$file" 2>/dev/null \ + | grep $(dirname "$file") \ + | sed "s/.*[ \t]\(.*\) (.*/\1/") \ + ) + done +fi +files_to_index+=("${dependencies[@]}") + +# Ensure we cleanup on on exit. +trap on_exit EXIT INT + +# We're good to go! Create temp directory for index files. +directory=$(mktemp -d) +echo "Made temp directory $directory." + +# Start concurrent indexing. +trap index_next USR1 + +# 4 is an arbitrary default. When changing, remember we are likely IO bound +# so basing this off the number of cores is not sensible. +index_tasks=${INDEX_TASKS:-4} +for ((i = 0; i < index_tasks; i++)); do + index_next +done + +# Do a wait loop. Bash waits that terminate due a trap have an exit +# code > 128. We also ensure that our subshell's "normal" exit occurs with +# an exit code > 128. This allows us to do consider a > 128 exit code as +# an indication that the loop should continue. Unfortunately, it also means +# we cannot use set -e since technically the "wait" is failing. +wait +while (($? > 128)); do + wait +done diff --git a/get_landmines.py b/get_landmines.py new file mode 100755 index 000000000000..6155d71deca1 --- /dev/null +++ b/get_landmines.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +This file emits the list of reasons why a particular build needs to be clobbered +(or a list of 'landmines'). +""" + + +import sys + +import landmine_utils + + +host_os = landmine_utils.host_os + + +def print_landmines(): + """ + ALL LANDMINES ARE EMITTED FROM HERE. + """ + # DO NOT add landmines as part of a regular CL. Landmines are a last-effort + # bandaid fix if a CL that got landed has a build dependency bug and all bots + # need to be cleaned up. If you're writing a new CL that causes build + # dependency problems, fix the dependency problems instead of adding a + # landmine. + # + # Before adding or changing a landmine consider the consequences of doing so. + # Doing so will wipe out every output directory on every Chrome developer's + # machine. This can be particularly problematic on Windows where the directory + # deletion may well fail (locked files, command prompt in the directory, + # etc.), and generated .sln and .vcxproj files will be deleted. + # + # This output directory deletion will be repeated when going back and forth + # across the change that added the landmine, adding to the cost. There are + # usually less troublesome alternatives. + + if host_os() == 'win': + print('Compile on cc_unittests fails due to symbols removed in r185063.') + if host_os() == 'linux': + print('Builders switching from make to ninja will clobber on this.') + if host_os() == 'mac': + print('Switching from bundle to unbundled dylib (issue 14743002).') + if host_os() in ('win', 'mac'): + print('Improper dependency for create_nmf.py broke in r240802, ' + 'fixed in r240860.') + if host_os() == 'win': + print('Switch to VS2015 Update 3, 14393 SDK') + print('Need to clobber everything due to an IDL change in r154579 (blink)') + print('Need to clobber everything due to gen file moves in r175513 (Blink)') + print('Clobber to get rid of obselete test plugin after r248358') + print('Clobber to rebuild GN files for V8') + print('Clobber to get rid of stale generated mojom.h files') + print('Need to clobber everything due to build_nexe change in nacl r13424') + print( + '[chromium-dev] PSA: clobber build needed for IDR_INSPECTOR_* compil...') + print('blink_resources.grd changed: crbug.com/400860') + print('ninja dependency cycle: crbug.com/408192') + print('Clobber to fix missing NaCl gyp dependencies (crbug.com/427427).') + print('Another clobber for missing NaCl gyp deps (crbug.com/427427).') + print( + 'Clobber to fix GN not picking up increased ID range (crbug.com/444902)') + print('Remove NaCl toolchains from the output dir (crbug.com/456902)') + if host_os() == 'win': + print('Clobber to delete stale generated files (crbug.com/510086)') + if host_os() == 'mac': + print('Clobber to get rid of evil libsqlite3.dylib (crbug.com/526208)') + if host_os() == 'mac': + print('Clobber to remove libsystem.dylib. See crbug.com/620075') + if host_os() == 'mac': + print('Clobber to get past mojo gen build error (crbug.com/679607)') + if host_os() == 'win': + print('Clobber Windows to fix strange PCH-not-rebuilt errors.') + print('CLobber all to fix GN breakage (crbug.com/736215)') + print('The Great Blink mv for source files (crbug.com/768828)') + if host_os() == 'linux': + print('Clobber to workaround buggy .ninja_deps cycle (crbug.com/934404)') + print('Clobber to flush stale generated files. See crbug.com/1406628') + print('Clobber to flush old .ninja_log files for updating ninja. ' + 'See crbug.com/1406628#c14') + + +def main(): + print_landmines() + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/get_symlink_targets.py b/get_symlink_targets.py new file mode 100755 index 000000000000..850bbae70fae --- /dev/null +++ b/get_symlink_targets.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Prints the target paths of the given symlinks. + +Prints out each target in the order that the links were passed in. +""" + +import os +import sys + + +def main(): + for link_name in sys.argv[1:]: + if not os.path.islink(link_name): + sys.stderr.write("%s is not a link" % link_name) + return 1 + target = os.readlink(link_name) + if not os.path.isabs(target): + target = os.path.join(os.path.dirname(link_name), target) + print(os.path.realpath(target)) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/gn_helpers.py b/gn_helpers.py new file mode 100644 index 000000000000..34a282eec97b --- /dev/null +++ b/gn_helpers.py @@ -0,0 +1,542 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Helper functions useful when writing scripts that integrate with GN. + +The main functions are ToGNString() and FromGNString(), to convert between +serialized GN veriables and Python variables. + +To use in an arbitrary Python file in the build: + + import os + import sys + + sys.path.append(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, 'build')) + import gn_helpers + +Where the sequence of parameters to join is the relative path from your source +file to the build directory. +""" + +import json +import os +import re +import sys + + +_CHROMIUM_ROOT = os.path.join(os.path.dirname(__file__), os.pardir) + +BUILD_VARS_FILENAME = 'build_vars.json' +IMPORT_RE = re.compile(r'^import\("//(\S+)"\)') + + +class GNError(Exception): + pass + + +# Computes ASCII code of an element of encoded Python 2 str / Python 3 bytes. +_Ord = ord if sys.version_info.major < 3 else lambda c: c + + +def _TranslateToGnChars(s): + for decoded_ch in s.encode('utf-8'): # str in Python 2, bytes in Python 3. + code = _Ord(decoded_ch) # int + if code in (34, 36, 92): # For '"', '$', or '\\'. + yield '\\' + chr(code) + elif 32 <= code < 127: + yield chr(code) + else: + yield '$0x%02X' % code + + +def ToGNString(value, pretty=False): + """Returns a stringified GN equivalent of a Python value. + + Args: + value: The Python value to convert. + pretty: Whether to pretty print. If true, then non-empty lists are rendered + recursively with one item per line, with indents. Otherwise lists are + rendered without new line. + Returns: + The stringified GN equivalent to |value|. + + Raises: + GNError: |value| cannot be printed to GN. + """ + + if sys.version_info.major < 3: + basestring_compat = basestring + else: + basestring_compat = str + + # Emits all output tokens without intervening whitespaces. + def GenerateTokens(v, level): + if isinstance(v, basestring_compat): + yield '"' + ''.join(_TranslateToGnChars(v)) + '"' + + elif isinstance(v, bool): + yield 'true' if v else 'false' + + elif isinstance(v, int): + yield str(v) + + elif isinstance(v, list): + yield '[' + for i, item in enumerate(v): + if i > 0: + yield ',' + for tok in GenerateTokens(item, level + 1): + yield tok + yield ']' + + elif isinstance(v, dict): + if level > 0: + yield '{' + for key in sorted(v): + if not isinstance(key, basestring_compat): + raise GNError('Dictionary key is not a string.') + if not key or key[0].isdigit() or not key.replace('_', '').isalnum(): + raise GNError('Dictionary key is not a valid GN identifier.') + yield key # No quotations. + yield '=' + for tok in GenerateTokens(v[key], level + 1): + yield tok + if level > 0: + yield '}' + + else: # Not supporting float: Add only when needed. + raise GNError('Unsupported type when printing to GN.') + + can_start = lambda tok: tok and tok not in ',}]=' + can_end = lambda tok: tok and tok not in ',{[=' + + # Adds whitespaces, trying to keep everything (except dicts) in 1 line. + def PlainGlue(gen): + prev_tok = None + for i, tok in enumerate(gen): + if i > 0: + if can_end(prev_tok) and can_start(tok): + yield '\n' # New dict item. + elif prev_tok == '[' and tok == ']': + yield ' ' # Special case for []. + elif tok != ',': + yield ' ' + yield tok + prev_tok = tok + + # Adds whitespaces so non-empty lists can span multiple lines, with indent. + def PrettyGlue(gen): + prev_tok = None + level = 0 + for i, tok in enumerate(gen): + if i > 0: + if can_end(prev_tok) and can_start(tok): + yield '\n' + ' ' * level # New dict item. + elif tok == '=' or prev_tok in '=': + yield ' ' # Separator before and after '=', on same line. + if tok in ']}': + level -= 1 + # Exclude '[]' and '{}' cases. + if int(prev_tok == '[') + int(tok == ']') == 1 or \ + int(prev_tok == '{') + int(tok == '}') == 1: + yield '\n' + ' ' * level + yield tok + if tok in '[{': + level += 1 + if tok == ',': + yield '\n' + ' ' * level + prev_tok = tok + + token_gen = GenerateTokens(value, 0) + ret = ''.join((PrettyGlue if pretty else PlainGlue)(token_gen)) + # Add terminating '\n' for dict |value| or multi-line output. + if isinstance(value, dict) or '\n' in ret: + return ret + '\n' + return ret + + +def FromGNString(input_string): + """Converts the input string from a GN serialized value to Python values. + + For details on supported types see GNValueParser.Parse() below. + + If your GN script did: + something = [ "file1", "file2" ] + args = [ "--values=$something" ] + The command line would look something like: + --values="[ \"file1\", \"file2\" ]" + Which when interpreted as a command line gives the value: + [ "file1", "file2" ] + + You can parse this into a Python list using GN rules with: + input_values = FromGNValues(options.values) + Although the Python 'ast' module will parse many forms of such input, it + will not handle GN escaping properly, nor GN booleans. You should use this + function instead. + + + A NOTE ON STRING HANDLING: + + If you just pass a string on the command line to your Python script, or use + string interpolation on a string variable, the strings will not be quoted: + str = "asdf" + args = [ str, "--value=$str" ] + Will yield the command line: + asdf --value=asdf + The unquoted asdf string will not be valid input to this function, which + accepts only quoted strings like GN scripts. In such cases, you can just use + the Python string literal directly. + + The main use cases for this is for other types, in particular lists. When + using string interpolation on a list (as in the top example) the embedded + strings will be quoted and escaped according to GN rules so the list can be + re-parsed to get the same result. + """ + parser = GNValueParser(input_string) + return parser.Parse() + + +def FromGNArgs(input_string): + """Converts a string with a bunch of gn arg assignments into a Python dict. + + Given a whitespace-separated list of + + = (integer | string | boolean | ) + + gn assignments, this returns a Python dict, i.e.: + + FromGNArgs('foo=true\nbar=1\n') -> { 'foo': True, 'bar': 1 }. + + Only simple types and lists supported; variables, structs, calls + and other, more complicated things are not. + + This routine is meant to handle only the simple sorts of values that + arise in parsing --args. + """ + parser = GNValueParser(input_string) + return parser.ParseArgs() + + +def UnescapeGNString(value): + """Given a string with GN escaping, returns the unescaped string. + + Be careful not to feed with input from a Python parsing function like + 'ast' because it will do Python unescaping, which will be incorrect when + fed into the GN unescaper. + + Args: + value: Input string to unescape. + """ + result = '' + i = 0 + while i < len(value): + if value[i] == '\\': + if i < len(value) - 1: + next_char = value[i + 1] + if next_char in ('$', '"', '\\'): + # These are the escaped characters GN supports. + result += next_char + i += 1 + else: + # Any other backslash is a literal. + result += '\\' + else: + result += value[i] + i += 1 + return result + + +def _IsDigitOrMinus(char): + return char in '-0123456789' + + +class GNValueParser(object): + """Duplicates GN parsing of values and converts to Python types. + + Normally you would use the wrapper function FromGNValue() below. + + If you expect input as a specific type, you can also call one of the Parse* + functions directly. All functions throw GNError on invalid input. + """ + + def __init__(self, string, checkout_root=_CHROMIUM_ROOT): + self.input = string + self.cur = 0 + self.checkout_root = checkout_root + + def IsDone(self): + return self.cur == len(self.input) + + def ReplaceImports(self): + """Replaces import(...) lines with the contents of the imports. + + Recurses on itself until there are no imports remaining, in the case of + nested imports. + """ + lines = self.input.splitlines() + if not any(line.startswith('import(') for line in lines): + return + for line in lines: + if not line.startswith('import('): + continue + regex_match = IMPORT_RE.match(line) + if not regex_match: + raise GNError('Not a valid import string: %s' % line) + import_path = os.path.join(self.checkout_root, regex_match.group(1)) + with open(import_path) as f: + imported_args = f.read() + self.input = self.input.replace(line, imported_args) + # Call ourselves again if we've just replaced an import() with additional + # imports. + self.ReplaceImports() + + + def _ConsumeWhitespace(self): + while not self.IsDone() and self.input[self.cur] in ' \t\n': + self.cur += 1 + + def ConsumeCommentAndWhitespace(self): + self._ConsumeWhitespace() + + # Consume each comment, line by line. + while not self.IsDone() and self.input[self.cur] == '#': + # Consume the rest of the comment, up until the end of the line. + while not self.IsDone() and self.input[self.cur] != '\n': + self.cur += 1 + # Move the cursor to the next line (if there is one). + if not self.IsDone(): + self.cur += 1 + + self._ConsumeWhitespace() + + def Parse(self): + """Converts a string representing a printed GN value to the Python type. + + See additional usage notes on FromGNString() above. + + * GN booleans ('true', 'false') will be converted to Python booleans. + + * GN numbers ('123') will be converted to Python numbers. + + * GN strings (double-quoted as in '"asdf"') will be converted to Python + strings with GN escaping rules. GN string interpolation (embedded + variables preceded by $) are not supported and will be returned as + literals. + + * GN lists ('[1, "asdf", 3]') will be converted to Python lists. + + * GN scopes ('{ ... }') are not supported. + + Raises: + GNError: Parse fails. + """ + result = self._ParseAllowTrailing() + self.ConsumeCommentAndWhitespace() + if not self.IsDone(): + raise GNError("Trailing input after parsing:\n " + self.input[self.cur:]) + return result + + def ParseArgs(self): + """Converts a whitespace-separated list of ident=literals to a dict. + + See additional usage notes on FromGNArgs(), above. + + Raises: + GNError: Parse fails. + """ + d = {} + + self.ReplaceImports() + self.ConsumeCommentAndWhitespace() + + while not self.IsDone(): + ident = self._ParseIdent() + self.ConsumeCommentAndWhitespace() + if self.input[self.cur] != '=': + raise GNError("Unexpected token: " + self.input[self.cur:]) + self.cur += 1 + self.ConsumeCommentAndWhitespace() + val = self._ParseAllowTrailing() + self.ConsumeCommentAndWhitespace() + d[ident] = val + + return d + + def _ParseAllowTrailing(self): + """Internal version of Parse() that doesn't check for trailing stuff.""" + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + raise GNError("Expected input to parse.") + + next_char = self.input[self.cur] + if next_char == '[': + return self.ParseList() + elif next_char == '{': + return self.ParseScope() + elif _IsDigitOrMinus(next_char): + return self.ParseNumber() + elif next_char == '"': + return self.ParseString() + elif self._ConstantFollows('true'): + return True + elif self._ConstantFollows('false'): + return False + else: + raise GNError("Unexpected token: " + self.input[self.cur:]) + + def _ParseIdent(self): + ident = '' + + next_char = self.input[self.cur] + if not next_char.isalpha() and not next_char=='_': + raise GNError("Expected an identifier: " + self.input[self.cur:]) + + ident += next_char + self.cur += 1 + + next_char = self.input[self.cur] + while next_char.isalpha() or next_char.isdigit() or next_char=='_': + ident += next_char + self.cur += 1 + next_char = self.input[self.cur] + + return ident + + def ParseNumber(self): + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + raise GNError('Expected number but got nothing.') + + begin = self.cur + + # The first character can include a negative sign. + if not self.IsDone() and _IsDigitOrMinus(self.input[self.cur]): + self.cur += 1 + while not self.IsDone() and self.input[self.cur].isdigit(): + self.cur += 1 + + number_string = self.input[begin:self.cur] + if not len(number_string) or number_string == '-': + raise GNError('Not a valid number.') + return int(number_string) + + def ParseString(self): + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + raise GNError('Expected string but got nothing.') + + if self.input[self.cur] != '"': + raise GNError('Expected string beginning in a " but got:\n ' + + self.input[self.cur:]) + self.cur += 1 # Skip over quote. + + begin = self.cur + while not self.IsDone() and self.input[self.cur] != '"': + if self.input[self.cur] == '\\': + self.cur += 1 # Skip over the backslash. + if self.IsDone(): + raise GNError('String ends in a backslash in:\n ' + self.input) + self.cur += 1 + + if self.IsDone(): + raise GNError('Unterminated string:\n ' + self.input[begin:]) + + end = self.cur + self.cur += 1 # Consume trailing ". + + return UnescapeGNString(self.input[begin:end]) + + def ParseList(self): + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + raise GNError('Expected list but got nothing.') + + # Skip over opening '['. + if self.input[self.cur] != '[': + raise GNError('Expected [ for list but got:\n ' + self.input[self.cur:]) + self.cur += 1 + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + raise GNError('Unterminated list:\n ' + self.input) + + list_result = [] + previous_had_trailing_comma = True + while not self.IsDone(): + if self.input[self.cur] == ']': + self.cur += 1 # Skip over ']'. + return list_result + + if not previous_had_trailing_comma: + raise GNError('List items not separated by comma.') + + list_result += [ self._ParseAllowTrailing() ] + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + break + + # Consume comma if there is one. + previous_had_trailing_comma = self.input[self.cur] == ',' + if previous_had_trailing_comma: + # Consume comma. + self.cur += 1 + self.ConsumeCommentAndWhitespace() + + raise GNError('Unterminated list:\n ' + self.input) + + def ParseScope(self): + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + raise GNError('Expected scope but got nothing.') + + # Skip over opening '{'. + if self.input[self.cur] != '{': + raise GNError('Expected { for scope but got:\n ' + self.input[self.cur:]) + self.cur += 1 + self.ConsumeCommentAndWhitespace() + if self.IsDone(): + raise GNError('Unterminated scope:\n ' + self.input) + + scope_result = {} + while not self.IsDone(): + if self.input[self.cur] == '}': + self.cur += 1 + return scope_result + + ident = self._ParseIdent() + self.ConsumeCommentAndWhitespace() + if self.input[self.cur] != '=': + raise GNError("Unexpected token: " + self.input[self.cur:]) + self.cur += 1 + self.ConsumeCommentAndWhitespace() + val = self._ParseAllowTrailing() + self.ConsumeCommentAndWhitespace() + scope_result[ident] = val + + raise GNError('Unterminated scope:\n ' + self.input) + + def _ConstantFollows(self, constant): + """Checks and maybe consumes a string constant at current input location. + + Param: + constant: The string constant to check. + + Returns: + True if |constant| follows immediately at the current location in the + input. In this case, the string is consumed as a side effect. Otherwise, + returns False and the current position is unchanged. + """ + end = self.cur + len(constant) + if end > len(self.input): + return False # Not enough room. + if self.input[self.cur:end] == constant: + self.cur = end + return True + return False + + +def ReadBuildVars(output_directory): + """Parses $output_directory/build_vars.json into a dict.""" + with open(os.path.join(output_directory, BUILD_VARS_FILENAME)) as f: + return json.load(f) diff --git a/gn_helpers_unittest.py b/gn_helpers_unittest.py new file mode 100755 index 000000000000..bb0f31fdf281 --- /dev/null +++ b/gn_helpers_unittest.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import mock +import sys +import textwrap +import unittest + +import gn_helpers + + +class UnitTest(unittest.TestCase): + def test_ToGNString(self): + test_cases = [ + (42, '42', '42'), ('foo', '"foo"', '"foo"'), (True, 'true', 'true'), + (False, 'false', 'false'), ('', '""', '""'), + ('\\$"$\\', '"\\\\\\$\\"\\$\\\\"', '"\\\\\\$\\"\\$\\\\"'), + (' \t\r\n', '" $0x09$0x0D$0x0A"', '" $0x09$0x0D$0x0A"'), + (u'\u2713', '"$0xE2$0x9C$0x93"', '"$0xE2$0x9C$0x93"'), + ([], '[ ]', '[]'), ([1], '[ 1 ]', '[\n 1\n]\n'), + ([3, 1, 4, 1], '[ 3, 1, 4, 1 ]', '[\n 3,\n 1,\n 4,\n 1\n]\n'), + (['a', True, 2], '[ "a", true, 2 ]', '[\n "a",\n true,\n 2\n]\n'), + ({ + 'single': 'item' + }, 'single = "item"\n', 'single = "item"\n'), + ({ + 'kEy': 137, + '_42A_Zaz_': [False, True] + }, '_42A_Zaz_ = [ false, true ]\nkEy = 137\n', + '_42A_Zaz_ = [\n false,\n true\n]\nkEy = 137\n'), + ([1, 'two', + ['"thr,.$\\', True, False, [], + u'(\u2713)']], '[ 1, "two", [ "\\"thr,.\\$\\\\", true, false, ' + + '[ ], "($0xE2$0x9C$0x93)" ] ]', '''[ + 1, + "two", + [ + "\\"thr,.\\$\\\\", + true, + false, + [], + "($0xE2$0x9C$0x93)" + ] +] +'''), + ({ + 's': 'foo', + 'n': 42, + 'b': True, + 'a': [3, 'x'] + }, 'a = [ 3, "x" ]\nb = true\nn = 42\ns = "foo"\n', + 'a = [\n 3,\n "x"\n]\nb = true\nn = 42\ns = "foo"\n'), + ( + [[[], [[]]], []], + '[ [ [ ], [ [ ] ] ], [ ] ]', + '[\n [\n [],\n [\n []\n ]\n ],\n []\n]\n', + ), + ( + [{ + 'a': 1, + 'c': { + 'z': 8 + }, + 'b': [] + }], + '[ { a = 1\nb = [ ]\nc = { z = 8 } } ]\n', + '[\n {\n a = 1\n b = []\n c = {\n' + + ' z = 8\n }\n }\n]\n', + ) + ] + for obj, exp_ugly, exp_pretty in test_cases: + out_ugly = gn_helpers.ToGNString(obj) + self.assertEqual(exp_ugly, out_ugly) + out_pretty = gn_helpers.ToGNString(obj, pretty=True) + self.assertEqual(exp_pretty, out_pretty) + + def test_UnescapeGNString(self): + # Backslash followed by a \, $, or " means the folling character without + # the special meaning. Backslash followed by everything else is a literal. + self.assertEqual( + gn_helpers.UnescapeGNString('\\as\\$\\\\asd\\"'), + '\\as$\\asd"') + + def test_FromGNString(self): + self.assertEqual( + gn_helpers.FromGNString('[1, -20, true, false,["as\\"", []]]'), + [ 1, -20, True, False, [ 'as"', [] ] ]) + + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('123 456') + parser.Parse() + + def test_ParseBool(self): + parser = gn_helpers.GNValueParser('true') + self.assertEqual(parser.Parse(), True) + + parser = gn_helpers.GNValueParser('false') + self.assertEqual(parser.Parse(), False) + + def test_ParseNumber(self): + parser = gn_helpers.GNValueParser('123') + self.assertEqual(parser.ParseNumber(), 123) + + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('') + parser.ParseNumber() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('a123') + parser.ParseNumber() + + def test_ParseString(self): + parser = gn_helpers.GNValueParser('"asdf"') + self.assertEqual(parser.ParseString(), 'asdf') + + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('') # Empty. + parser.ParseString() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('asdf') # Unquoted. + parser.ParseString() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('"trailing') # Unterminated. + parser.ParseString() + + def test_ParseList(self): + parser = gn_helpers.GNValueParser('[1,]') # Optional end comma OK. + self.assertEqual(parser.ParseList(), [ 1 ]) + + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('') # Empty. + parser.ParseList() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('asdf') # No []. + parser.ParseList() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('[1, 2') # Unterminated + parser.ParseList() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('[1 2]') # No separating comma. + parser.ParseList() + + def test_ParseScope(self): + parser = gn_helpers.GNValueParser('{a = 1}') + self.assertEqual(parser.ParseScope(), {'a': 1}) + + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('') # Empty. + parser.ParseScope() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('asdf') # No {}. + parser.ParseScope() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('{a = 1') # Unterminated. + parser.ParseScope() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('{"a" = 1}') # Not identifier. + parser.ParseScope() + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser('{a = }') # No value. + parser.ParseScope() + + def test_FromGNArgs(self): + # Booleans and numbers should work; whitespace is allowed works. + self.assertEqual(gn_helpers.FromGNArgs('foo = true\nbar = 1\n'), + {'foo': True, 'bar': 1}) + + # Whitespace is not required; strings should also work. + self.assertEqual(gn_helpers.FromGNArgs('foo="bar baz"'), + {'foo': 'bar baz'}) + + # Comments should work (and be ignored). + gn_args_lines = [ + '# Top-level comment.', + 'foo = true', + 'bar = 1 # In-line comment followed by whitespace.', + ' ', + 'baz = false', + ] + self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)), { + 'foo': True, + 'bar': 1, + 'baz': False + }) + + # Lists should work. + self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'), + {'foo': [1, 2, 3]}) + + # Empty strings should return an empty dict. + self.assertEqual(gn_helpers.FromGNArgs(''), {}) + self.assertEqual(gn_helpers.FromGNArgs(' \n '), {}) + + # Comments should work everywhere (and be ignored). + gn_args_lines = [ + '# Top-level comment.', + '', + '# Variable comment.', + 'foo = true', + 'bar = [', + ' # Value comment in list.', + ' 1,', + ' 2,', + ']', + '', + 'baz # Comment anywhere, really', + ' = # also here', + ' 4', + ] + self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)), { + 'foo': True, + 'bar': [1, 2], + 'baz': 4 + }) + + # Scope should be parsed, even empty ones. + gn_args_lines = [ + 'foo = {', + ' a = 1', + ' b = [', + ' { },', + ' {', + ' c = 1', + ' },', + ' ]', + '}', + ] + self.assertEqual(gn_helpers.FromGNArgs('\n'.join(gn_args_lines)), + {'foo': { + 'a': 1, + 'b': [ + {}, + { + 'c': 1, + }, + ] + }}) + + # Non-identifiers should raise an exception. + with self.assertRaises(gn_helpers.GNError): + gn_helpers.FromGNArgs('123 = true') + + # References to other variables should raise an exception. + with self.assertRaises(gn_helpers.GNError): + gn_helpers.FromGNArgs('foo = bar') + + # References to functions should raise an exception. + with self.assertRaises(gn_helpers.GNError): + gn_helpers.FromGNArgs('foo = exec_script("//build/baz.py")') + + # Underscores in identifiers should work. + self.assertEqual(gn_helpers.FromGNArgs('_foo = true'), + {'_foo': True}) + self.assertEqual(gn_helpers.FromGNArgs('foo_bar = true'), + {'foo_bar': True}) + self.assertEqual(gn_helpers.FromGNArgs('foo_=true'), + {'foo_': True}) + + def test_ReplaceImports(self): + # Should be a no-op on args inputs without any imports. + parser = gn_helpers.GNValueParser( + textwrap.dedent(""" + some_arg1 = "val1" + some_arg2 = "val2" + """)) + parser.ReplaceImports() + self.assertEqual( + parser.input, + textwrap.dedent(""" + some_arg1 = "val1" + some_arg2 = "val2" + """)) + + # A single "import(...)" line should be replaced with the contents of the + # file being imported. + parser = gn_helpers.GNValueParser( + textwrap.dedent(""" + some_arg1 = "val1" + import("//some/args/file.gni") + some_arg2 = "val2" + """)) + fake_import = 'some_imported_arg = "imported_val"' + builtin_var = '__builtin__' if sys.version_info.major < 3 else 'builtins' + open_fun = '{}.open'.format(builtin_var) + with mock.patch(open_fun, mock.mock_open(read_data=fake_import)): + parser.ReplaceImports() + self.assertEqual( + parser.input, + textwrap.dedent(""" + some_arg1 = "val1" + some_imported_arg = "imported_val" + some_arg2 = "val2" + """)) + + # No trailing parenthesis should raise an exception. + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser( + textwrap.dedent('import("//some/args/file.gni"')) + parser.ReplaceImports() + + # No double quotes should raise an exception. + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser( + textwrap.dedent('import(//some/args/file.gni)')) + parser.ReplaceImports() + + # A path that's not source absolute should raise an exception. + with self.assertRaises(gn_helpers.GNError): + parser = gn_helpers.GNValueParser( + textwrap.dedent('import("some/relative/args/file.gni")')) + parser.ReplaceImports() + + +if __name__ == '__main__': + unittest.main() diff --git a/gn_logs.gni b/gn_logs.gni new file mode 100644 index 000000000000..495f38658321 --- /dev/null +++ b/gn_logs.gni @@ -0,0 +1,8 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/concurrent_links.gni") + +# Log lines for gn_logs.txt that originate from within //build. +build_gn_logs = [ "#### get_concurrent_links.py ####" ] + concurrent_links_logs diff --git a/gn_run_binary.py b/gn_run_binary.py new file mode 100644 index 000000000000..414f6952fe3c --- /dev/null +++ b/gn_run_binary.py @@ -0,0 +1,35 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Helper script for GN to run an arbitrary binary. See compiled_action.gni. + +Run with: + python gn_run_binary.py [args ...] +""" + + +import os +import subprocess +import sys + +# This script is designed to run binaries produced by the current build. We +# may prefix it with "./" to avoid picking up system versions that might +# also be on the path. +path = sys.argv[1] +if not os.path.isabs(path): + path = './' + path + +# The rest of the arguments are passed directly to the executable. +args = [path] + sys.argv[2:] + +ret = subprocess.call(args) +if ret != 0: + if ret <= -100: + # Windows error codes such as 0xC0000005 and 0xC0000409 are much easier to + # recognize and differentiate in hex. In order to print them as unsigned + # hex we need to add 4 Gig to them. + print('%s failed with exit code 0x%08X' % (sys.argv[1], ret + (1 << 32))) + else: + print('%s failed with exit code %d' % (sys.argv[1], ret)) +sys.exit(ret) diff --git a/install-build-deps.sh b/install-build-deps.sh new file mode 100755 index 000000000000..de4081dc1678 --- /dev/null +++ b/install-build-deps.sh @@ -0,0 +1,812 @@ +#!/bin/bash -e + +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Script to install everything needed to build chromium (well, ideally, anyway) +# including items requiring sudo privileges. +# See https://chromium.googlesource.com/chromium/src/+/main/docs/linux/build_instructions.md +# and https://chromium.googlesource.com/chromium/src/+/HEAD/docs/android_build_instructions.md + +usage() { + echo "Usage: $0 [--options]" + echo "Options:" + echo "--[no-]syms: enable or disable installation of debugging symbols" + echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot" + echo "--[no-]android: enable or disable installation of android dependencies" + echo "--[no-]arm: enable or disable installation of arm cross toolchain" + echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\ + "fonts" + echo "--[no-]nacl: enable or disable installation of prerequisites for"\ + "building standalone NaCl and all its toolchains" + echo "--[no-]backwards-compatible: enable or disable installation of packages + that are no longer currently needed and have been removed from this + script. Useful for bisection." + echo "--no-prompt: silently select standard options/defaults" + echo "--quick-check: quickly try to determine if dependencies are installed" + echo " (this avoids interactive prompts and sudo commands," + echo " so might not be 100% accurate)" + echo "--unsupported: attempt installation even on unsupported systems" + echo "Script will prompt interactively if options not given." + exit 1 +} + +# Build list of apt packages in dpkg --get-selections format. +build_apt_package_list() { + echo "Building apt package list." >&2 + apt-cache dumpavail | \ + python3 -c 'import re,sys; \ +o = sys.stdin.read(); \ +p = {"i386": ":i386"}; \ +f = re.M | re.S; \ +r = re.compile(r"^Package: (.+?)$.+?^Architecture: (.+?)$", f); \ +m = ["%s%s" % (x, p.get(y, "")) for x, y in re.findall(r, o)]; \ +print("\n".join(m))' +} + +# Checks whether a particular package is available in the repos. +# Uses pre-formatted ${apt_package_list}. +# USAGE: $ package_exists +package_exists() { + if [ -z "${apt_package_list}" ]; then + echo "Call build_apt_package_list() prior to calling package_exists()" >&2 + apt_package_list=$(build_apt_package_list) + fi + # `grep` takes a regex string, so the +'s in package names, e.g. "libstdc++", + # need to be escaped. + local escaped="$(echo $1 | sed 's/[\~\+\.\:-]/\\&/g')" + [ ! -z "$(grep "^${escaped}$" <<< "${apt_package_list}")" ] +} + +do_inst_arm=0 +do_inst_nacl=0 +do_inst_android=0 + +while [ "$1" != "" ] +do + case "$1" in + --syms) do_inst_syms=1;; + --no-syms) do_inst_syms=0;; + --lib32) do_inst_lib32=1;; + --android) do_inst_android=1;; + --no-android) do_inst_android=0;; + --arm) do_inst_arm=1;; + --no-arm) do_inst_arm=0;; + --chromeos-fonts) do_inst_chromeos_fonts=1;; + --no-chromeos-fonts) do_inst_chromeos_fonts=0;; + --nacl) do_inst_nacl=1;; + --no-nacl) do_inst_nacl=0;; + --backwards-compatible) do_inst_backwards_compatible=1;; + --no-backwards-compatible) do_inst_backwards_compatible=0;; + --add-cross-tool-repo) add_cross_tool_repo=1;; + --no-prompt) do_default=1 + do_quietly="-qq --assume-yes" + ;; + --quick-check) do_quick_check=1;; + --unsupported) do_unsupported=1;; + *) usage;; + esac + shift +done + +if [ "$do_inst_arm" = "1" ]; then + do_inst_lib32=1 +fi + +if [ "$do_inst_android" = "1" ]; then + do_inst_lib32=1 +fi + +# Check for lsb_release command in $PATH +if ! which lsb_release > /dev/null; then + echo "ERROR: lsb_release not found in \$PATH" >&2 + echo "try: sudo apt-get install lsb-release" >&2 + exit 1; +fi + +distro_codename=$(lsb_release --codename --short) +distro_id=$(lsb_release --id --short) +supported_codenames="(bionic|focal|jammy)" +supported_ids="(Debian)" +if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then + if [[ ! $distro_codename =~ $supported_codenames && + ! $distro_id =~ $supported_ids ]]; then + echo -e "WARNING: The following distributions are supported, + but distributions not in the list below can also try to install + dependencies by passing the `--unsupported` parameter\n" \ + "\tUbuntu 18.04 LTS (bionic with EoL April 2028)\n" \ + "\tUbuntu 20.04 LTS (focal with EoL April 2030)\n" \ + "\tUbuntu 22.04 LTS (jammy with EoL April 2032)\n" \ + "\tDebian 10 (buster) or later" >&2 + exit 1 + fi + +# Check system architecture + if ! uname -m | egrep -q "i686|x86_64"; then + echo "Only x86 architectures are currently supported" >&2 + exit + fi +fi + +if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then + echo "Running as non-root user." + echo "You might have to enter your password one or more times for 'sudo'." + echo +fi + +if [ 0 -eq "${do_quick_check-0}" ] ; then + if [ "$do_inst_lib32" = "1" ] || [ "$do_inst_nacl" = "1" ]; then + sudo dpkg --add-architecture i386 + fi + sudo apt-get update +fi + +# Populate ${apt_package_list} for package_exists() parsing. +apt_package_list=$(build_apt_package_list) + +# Packages needed for chromeos only +chromeos_dev_list="libbluetooth-dev libxkbcommon-dev mesa-common-dev zstd" + +if package_exists realpath; then + chromeos_dev_list="${chromeos_dev_list} realpath" +fi + +# Packages needed for development +dev_list="\ + binutils + bison + bzip2 + cdbs + curl + dbus-x11 + dpkg-dev + elfutils + devscripts + fakeroot + flex + git-core + gperf + libasound2-dev + libatspi2.0-dev + libbrlapi-dev + libbz2-dev + libcairo2-dev + libcap-dev + libc6-dev + libcups2-dev + libcurl4-gnutls-dev + libdrm-dev + libelf-dev + libevdev-dev + libffi-dev + libgbm-dev + libglib2.0-dev + libglu1-mesa-dev + libgtk-3-dev + libkrb5-dev + libnspr4-dev + libnss3-dev + libpam0g-dev + libpci-dev + libpulse-dev + libsctp-dev + libspeechd-dev + libsqlite3-dev + libssl-dev + libsystemd-dev + libudev-dev + libva-dev + libwww-perl + libxshmfence-dev + libxslt1-dev + libxss-dev + libxt-dev + libxtst-dev + lighttpd + locales + openbox + p7zip + patch + perl + pkg-config + rpm + ruby + subversion + uuid-dev + wdiff + x11-utils + xcompmgr + xz-utils + zip + $chromeos_dev_list +" + +# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built +# NaCl binaries. +if file -L /sbin/init | grep -q 'ELF 64-bit'; then + dev_list="${dev_list} libc6-i386 lib32stdc++6" + + # lib32gcc-s1 used to be called lib32gcc1 in older distros. + if package_exists lib32gcc-s1; then + dev_list="${dev_list} lib32gcc-s1" + elif package_exists lib32gcc1; then + dev_list="${dev_list} lib32gcc1" + fi +fi + +# Run-time libraries required by chromeos only +chromeos_lib_list="libpulse0 libbz2-1.0" + +# List of required run-time libraries +common_lib_list="\ + lib32z1 + libasound2 + libatk1.0-0 + libatspi2.0-0 + libc6 + libcairo2 + libcap2 + libcgi-session-perl + libcups2 + libdrm2 + libegl1 + libevdev2 + libexpat1 + libfontconfig1 + libfreetype6 + libgbm1 + libglib2.0-0 + libgl1 + libgtk-3-0 + libncurses5 + libpam0g + libpango-1.0-0 + libpangocairo-1.0-0 + libpci3 + libpcre3 + libpixman-1-0 + libspeechd2 + libstdc++6 + libsqlite3-0 + libuuid1 + libwayland-egl1 + libwayland-egl1-mesa + libx11-6 + libx11-xcb1 + libxau6 + libxcb1 + libxcomposite1 + libxcursor1 + libxdamage1 + libxdmcp6 + libxext6 + libxfixes3 + libxi6 + libxinerama1 + libxrandr2 + libxrender1 + libxtst6 + x11-utils + xvfb + zlib1g +" + +if package_exists libffi8; then + common_lib_list="${common_lib_list} libffi8" +elif package_exists libffi7; then + common_lib_list="${common_lib_list} libffi7" +elif package_exists libffi6; then + common_lib_list="${common_lib_list} libffi6" +fi + +# Full list of required run-time libraries +lib_list="\ + $common_lib_list + $chromeos_lib_list +" + +# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf +lib32_list="linux-libc-dev:i386 libpci3:i386" + +# 32-bit libraries needed for a 32-bit build +# includes some 32-bit libraries required by the Android SDK +# See https://developer.android.com/sdk/installing/index.html?pkg=tools +lib32_list="$lib32_list + libasound2:i386 + libatk-bridge2.0-0:i386 + libatk1.0-0:i386 + libatspi2.0-0:i386 + libdbus-1-3:i386 + libegl1:i386 + libgl1:i386 + libglib2.0-0:i386 + libncurses5:i386 + libnss3:i386 + libpango-1.0-0:i386 + libpangocairo-1.0-0:i386 + libstdc++6:i386 + libwayland-egl1:i386 + libx11-xcb1:i386 + libxcomposite1:i386 + libxdamage1:i386 + libxkbcommon0:i386 + libxrandr2:i386 + libxtst6:i386 + zlib1g:i386 +" + +# Packages that have been removed from this script. Regardless of configuration +# or options passed to this script, whenever a package is removed, it should be +# added here. +backwards_compatible_list="\ + 7za + fonts-indic + fonts-ipafont + fonts-stix + fonts-thai-tlwg + fonts-tlwg-garuda + g++ + g++-4.8-multilib-arm-linux-gnueabihf + gcc-4.8-multilib-arm-linux-gnueabihf + g++-9-multilib-arm-linux-gnueabihf + gcc-9-multilib-arm-linux-gnueabihf + gcc-arm-linux-gnueabihf + g++-10-multilib-arm-linux-gnueabihf + gcc-10-multilib-arm-linux-gnueabihf + g++-10-arm-linux-gnueabihf + gcc-10-arm-linux-gnueabihf + git-svn + language-pack-da + language-pack-fr + language-pack-he + language-pack-zh-hant + libappindicator-dev + libappindicator1 + libappindicator3-1 + libappindicator3-dev + libdconf-dev + libdconf1 + libdconf1:i386 + libexif-dev + libexif12 + libexif12:i386 + libgbm-dev + libgbm-dev-lts-trusty + libgbm-dev-lts-xenial + libgconf-2-4:i386 + libgconf2-dev + libgl1-mesa-dev + libgl1-mesa-dev-lts-trusty + libgl1-mesa-dev-lts-xenial + libgl1-mesa-glx:i386 + libgl1-mesa-glx-lts-trusty:i386 + libgl1-mesa-glx-lts-xenial:i386 + libgles2-mesa-dev + libgles2-mesa-dev-lts-trusty + libgles2-mesa-dev-lts-xenial + libgtk-3-0:i386 + libgtk2.0-0 + libgtk2.0-0:i386 + libgtk2.0-dev + mesa-common-dev + mesa-common-dev-lts-trusty + mesa-common-dev-lts-xenial + msttcorefonts + python-dev + python-setuptools + ttf-dejavu-core + ttf-indic-fonts + ttf-kochi-gothic + ttf-kochi-mincho + ttf-mscorefonts-installer + xfonts-mathml +" + +if package_exists python-is-python2; then + backwards_compatible_list="${backwards_compatible_list} python-is-python2 python2-dev" +else + backwards_compatible_list="${backwards_compatible_list} python" +fi + +if package_exists python-crypto; then + backwards_compatible_list="${backwards_compatible_list} python-crypto" +fi + +if package_exists python-numpy; then + backwards_compatible_list="${backwards_compatible_list} python-numpy" +fi + +if package_exists python-openssl; then + backwards_compatible_list="${backwards_compatible_list} python-openssl" +fi + +if package_exists python-psutil; then + backwards_compatible_list="${backwards_compatible_list} python-psutil" +fi + +if package_exists python-yaml; then + backwards_compatible_list="${backwards_compatible_list} python-yaml" +fi +if package_exists apache2.2-bin; then + backwards_compatible_list="${backwards_compatible_list} apache2.2-bin" +else + backwards_compatible_list="${backwards_compatible_list} apache2-bin" +fi +if package_exists php8.1-cgi; then + backwards_compatible_list="${backwards_compatible_list} php8.1-cgi libapache2-mod-php8.1" +elif package_exists php8.0-cgi; then + backwards_compatible_list="${backwards_compatible_list} php8.0-cgi libapache2-mod-php8.0" +elif package_exists php7.4-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.4-cgi libapache2-mod-php7.4" +elif package_exists php7.3-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.3-cgi libapache2-mod-php7.3" +elif package_exists php7.2-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.2-cgi libapache2-mod-php7.2" +elif package_exists php7.1-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.1-cgi libapache2-mod-php7.1" +elif package_exists php7.0-cgi; then + backwards_compatible_list="${backwards_compatible_list} php7.0-cgi libapache2-mod-php7.0" +elif package_exists php8.0-cgi; then + backwards_compatible_list="${backwards_compatible_list} php8.0-cgi libapache2-mod-php8.0" +else + backwards_compatible_list="${backwards_compatible_list} php5-cgi libapache2-mod-php5" +fi + +# arm cross toolchain packages needed to build chrome on armhf +arm_list="libc6-dev-armhf-cross + linux-libc-dev-armhf-cross + g++-arm-linux-gnueabihf" + +# Work around for dependency issue Ubuntu: http://crbug.com/435056 +case $distro_codename in + bionic) + arm_list+=" g++-5-multilib-arm-linux-gnueabihf + gcc-5-multilib-arm-linux-gnueabihf + gcc-arm-linux-gnueabihf" + ;; + focal) + arm_list+=" g++-10-multilib-arm-linux-gnueabihf + gcc-10-multilib-arm-linux-gnueabihf + gcc-arm-linux-gnueabihf" + ;; + jammy) + arm_list+=" gcc-arm-linux-gnueabihf + g++-11-arm-linux-gnueabihf + gcc-11-arm-linux-gnueabihf" + ;; +esac + +# Packages to build NaCl, its toolchains, and its ports. +naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc" +nacl_list="\ + g++-mingw-w64-i686 + lib32z1-dev + libasound2:i386 + libcap2:i386 + libelf-dev:i386 + libfontconfig1:i386 + libglib2.0-0:i386 + libgpm2:i386 + libncurses5:i386 + lib32ncurses5-dev + libnss3:i386 + libpango-1.0-0:i386 + libssl-dev:i386 + libtinfo-dev + libtinfo-dev:i386 + libtool + libuuid1:i386 + libxcomposite1:i386 + libxcursor1:i386 + libxdamage1:i386 + libxi6:i386 + libxrandr2:i386 + libxss1:i386 + libxtst6:i386 + texinfo + xvfb + ${naclports_list} +" + +# Some package names have changed over time +if package_exists libssl-dev; then + nacl_list="${nacl_list} libssl-dev:i386" +elif package_exists libssl1.1; then + nacl_list="${nacl_list} libssl1.1:i386" +elif package_exists libssl1.0.2; then + nacl_list="${nacl_list} libssl1.0.2:i386" +else + nacl_list="${nacl_list} libssl1.0.0:i386" +fi +if package_exists libtinfo5; then + nacl_list="${nacl_list} libtinfo5" +fi +if package_exists libpng16-16; then + lib_list="${lib_list} libpng16-16" +else + lib_list="${lib_list} libpng12-0" +fi +if package_exists libnspr4; then + lib_list="${lib_list} libnspr4 libnss3" +else + lib_list="${lib_list} libnspr4-0d libnss3-1d" +fi +if package_exists libjpeg-dev; then + dev_list="${dev_list} libjpeg-dev" +else + dev_list="${dev_list} libjpeg62-dev" +fi +if package_exists libudev1; then + dev_list="${dev_list} libudev1" + nacl_list="${nacl_list} libudev1:i386" +else + dev_list="${dev_list} libudev0" + nacl_list="${nacl_list} libudev0:i386" +fi +if package_exists libbrlapi0.8; then + dev_list="${dev_list} libbrlapi0.8" +elif package_exists libbrlapi0.7; then + dev_list="${dev_list} libbrlapi0.7" +elif package_exists libbrlapi0.6; then + dev_list="${dev_list} libbrlapi0.6" +else + dev_list="${dev_list} libbrlapi0.5" +fi +if package_exists libav-tools; then + dev_list="${dev_list} libav-tools" +fi + +# Some packages are only needed if the distribution actually supports +# installing them. +if package_exists appmenu-gtk; then + lib_list="$lib_list appmenu-gtk" +fi +if package_exists libgnome-keyring0; then + lib_list="${lib_list} libgnome-keyring0" +fi +if package_exists libgnome-keyring-dev; then + lib_list="${lib_list} libgnome-keyring-dev" +fi +if package_exists libvulkan-dev; then + dev_list="${dev_list} libvulkan-dev" +fi +if package_exists libvulkan1; then + lib_list="${lib_list} libvulkan1" +fi +if package_exists libinput10; then + lib_list="${lib_list} libinput10" +fi +if package_exists libinput-dev; then + dev_list="${dev_list} libinput-dev" +fi +if package_exists snapcraft; then + dev_list="${dev_list} snapcraft" +fi + +# Cross-toolchain strip is needed for building the sysroots. +if package_exists binutils-arm-linux-gnueabihf; then + dev_list="${dev_list} binutils-arm-linux-gnueabihf" +fi +if package_exists binutils-aarch64-linux-gnu; then + dev_list="${dev_list} binutils-aarch64-linux-gnu" +fi +if package_exists binutils-mipsel-linux-gnu; then + dev_list="${dev_list} binutils-mipsel-linux-gnu" +fi +if package_exists binutils-mips64el-linux-gnuabi64; then + dev_list="${dev_list} binutils-mips64el-linux-gnuabi64" +fi + +# When cross building for arm/Android on 64-bit systems the host binaries +# that are part of v8 need to be compiled with -m32 which means +# that basic multilib support is needed. +if file -L /sbin/init | grep -q 'ELF 64-bit'; then + # gcc-multilib conflicts with the arm cross compiler but + # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the + # appropriate value of X and Y by seeing what version the current + # distribution's g++-multilib package depends on. + multilib_package=$(apt-cache depends g++-multilib --important | \ + grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b') + lib32_list="$lib32_list $multilib_package" +fi + +if [ "$do_inst_syms" = "1" ]; then + echo "Including debugging symbols." + + # Debian is in the process of transitioning to automatic debug packages, which + # have the -dbgsym suffix (https://wiki.debian.org/AutomaticDebugPackages). + # Untransitioned packages have the -dbg suffix. And on some systems, neither + # will be available, so exclude the ones that are missing. + dbg_package_name() { + if package_exists "$1-dbgsym"; then + echo "$1-dbgsym" + elif package_exists "$1-dbg"; then + echo "$1-dbg" + fi + } + + for package in "${common_lib_list}"; do + dbg_list="$dbg_list $(dbg_package_name ${package})" + done + + # Debugging symbols packages not following common naming scheme + if [ "$(dbg_package_name libstdc++6)" == "" ]; then + if package_exists libstdc++6-8-dbg; then + dbg_list="${dbg_list} libstdc++6-8-dbg" + elif package_exists libstdc++6-7-dbg; then + dbg_list="${dbg_list} libstdc++6-7-dbg" + elif package_exists libstdc++6-6-dbg; then + dbg_list="${dbg_list} libstdc++6-6-dbg" + elif package_exists libstdc++6-5-dbg; then + dbg_list="${dbg_list} libstdc++6-5-dbg" + elif package_exists libstdc++6-4.9-dbg; then + dbg_list="${dbg_list} libstdc++6-4.9-dbg" + elif package_exists libstdc++6-4.8-dbg; then + dbg_list="${dbg_list} libstdc++6-4.8-dbg" + elif package_exists libstdc++6-4.7-dbg; then + dbg_list="${dbg_list} libstdc++6-4.7-dbg" + elif package_exists libstdc++6-4.6-dbg; then + dbg_list="${dbg_list} libstdc++6-4.6-dbg" + fi + fi + if [ "$(dbg_package_name libatk1.0-0)" == "" ]; then + dbg_list="$dbg_list $(dbg_package_name libatk1.0)" + fi + if [ "$(dbg_package_name libpango-1.0-0)" == "" ]; then + dbg_list="$dbg_list $(dbg_package_name libpango1.0-dev)" + fi +else + echo "Skipping debugging symbols." + dbg_list= +fi + +if [ "$do_inst_lib32" = "1" ]; then + echo "Including 32-bit libraries." +else + echo "Skipping 32-bit libraries." + lib32_list= +fi + +if [ "$do_inst_android" = "1" ]; then + echo "Including Android dependencies." +else + echo "Skipping Android dependencies." +fi + +if [ "$do_inst_arm" = "1" ]; then + echo "Including ARM cross toolchain." +else + echo "Skipping ARM cross toolchain." + arm_list= +fi + +if [ "$do_inst_nacl" = "1" ]; then + echo "Including NaCl, NaCl toolchain, NaCl ports dependencies." +else + echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies." + nacl_list= +fi + +filtered_backwards_compatible_list= +if [ "$do_inst_backwards_compatible" = "1" ]; then + echo "Including backwards compatible packages." + for package in ${backwards_compatible_list}; do + if package_exists ${package}; then + filtered_backwards_compatible_list+=" ${package}" + fi + done +fi + +# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid +# confusing dpkg-query (crbug.com/446172). +packages="$( + echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}" \ + "${nacl_list}" ${filtered_backwards_compatible_list} | tr " " "\n" | \ + sort -u | sort -r -s -t: -k2 | tr "\n" " " +)" + +if [ 1 -eq "${do_quick_check-0}" ] ; then + if ! missing_packages="$(dpkg-query -W -f ' ' ${packages} 2>&1)"; then + # Distinguish between packages that actually aren't available to the + # system (i.e. not in any repo) and packages that just aren't known to + # dpkg (i.e. managed by apt). + missing_packages="$(echo "${missing_packages}" | awk '{print $NF}')" + not_installed="" + unknown="" + for p in ${missing_packages}; do + if apt-cache show ${p} > /dev/null 2>&1; then + not_installed="${p}\n${not_installed}" + else + unknown="${p}\n${unknown}" + fi + done + if [ -n "${not_installed}" ]; then + echo "WARNING: The following packages are not installed:" + echo -e "${not_installed}" | sed -e "s/^/ /" + fi + if [ -n "${unknown}" ]; then + echo "WARNING: The following packages are unknown to your system" + echo "(maybe missing a repo or need to 'sudo apt-get update'):" + echo -e "${unknown}" | sed -e "s/^/ /" + fi + exit 1 + fi + exit 0 +fi + +echo "Finding missing packages..." +# Intentionally leaving $packages unquoted so it's more readable. +echo "Packages required: " $packages +echo +query_cmd="apt-get --just-print install $(echo $packages)" +if cmd_output="$(LANGUAGE=en LANG=C $query_cmd)"; then + new_list=$(echo "$cmd_output" | + sed -e '1,/The following NEW packages will be installed:/d;s/^ //;t;d' | + sed 's/ *$//') + upgrade_list=$(echo "$cmd_output" | + sed -e '1,/The following packages will be upgraded:/d;s/^ //;t;d' | + sed 's/ *$//') + if [ -z "$new_list" ] && [ -z "$upgrade_list" ]; then + echo "No missing packages, and the packages are up to date." + else + echo "Installing and upgrading packages: $new_list $upgrade_list." + sudo apt-get install ${do_quietly-} ${new_list} ${upgrade_list} + fi + echo +else + # An apt-get exit status of 100 indicates that a real error has occurred. + + # I am intentionally leaving out the '"'s around query_cmd, + # as this makes it easier to cut and paste the output + echo "The following command failed: " ${query_cmd} + echo + echo "It produced the following output:" + echo "$cmd_output" + echo + echo "You will have to install the above packages yourself." + echo + exit 100 +fi + +# Install the Chrome OS default fonts. This must go after running +# apt-get, since install-chromeos-fonts depends on curl. +if [ "$do_inst_chromeos_fonts" != "0" ]; then + echo + echo "Installing Chrome OS fonts." + dir=`echo $0 | sed -r -e 's/\/[^/]+$//'` + if ! sudo $dir/linux/install-chromeos-fonts.py; then + echo "ERROR: The installation of the Chrome OS default fonts failed." + if [ `stat -f -c %T $dir` == "nfs" ]; then + echo "The reason is that your repo is installed on a remote file system." + else + echo "This is expected if your repo is installed on a remote file system." + fi + echo "It is recommended to install your repo on a local file system." + echo "You can skip the installation of the Chrome OS default fonts with" + echo "the command line option: --no-chromeos-fonts." + exit 1 + fi +else + echo "Skipping installation of Chrome OS fonts." +fi + +echo "Installing locales." +CHROMIUM_LOCALES="da_DK.UTF-8 fr_FR.UTF-8 he_IL.UTF-8 zh_TW.UTF-8" +LOCALE_GEN=/etc/locale.gen +if [ -e ${LOCALE_GEN} ]; then + OLD_LOCALE_GEN="$(cat /etc/locale.gen)" + for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do + sudo sed -i "s/^# ${CHROMIUM_LOCALE}/${CHROMIUM_LOCALE}/" ${LOCALE_GEN} + done + # Regenerating locales can take a while, so only do it if we need to. + if (echo "${OLD_LOCALE_GEN}" | cmp -s ${LOCALE_GEN}); then + echo "Locales already up-to-date." + else + sudo locale-gen + fi +else + for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do + sudo locale-gen ${CHROMIUM_LOCALE} + done +fi diff --git a/install-chroot.sh b/install-chroot.sh new file mode 100755 index 000000000000..a750e1897ce7 --- /dev/null +++ b/install-chroot.sh @@ -0,0 +1,888 @@ +#!/bin/bash -e + +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script installs Debian-derived distributions in a chroot environment. +# It can for example be used to have an accurate 32bit build and test +# environment when otherwise working on a 64bit machine. +# N. B. it is unlikely that this script will ever work on anything other than a +# Debian-derived system. + +# Older Debian based systems had both "admin" and "adm" groups, with "admin" +# apparently being used in more places. Newer distributions have standardized +# on just the "adm" group. Check /etc/group for the preferred name of the +# administrator group. +admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm) + +usage() { + echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]" + echo "-b dir additional directories that should be bind mounted," + echo ' or "NONE".' + echo " Default: if local filesystems present, ask user for help" + echo "-g group,... groups that can use the chroot unauthenticated" + echo " Default: '${admin}' and current user's group ('$(id -gn)')" + echo "-l List all installed chroot environments" + echo "-m mirror an alternate repository mirror for package downloads" + echo "-s configure default deb-srcs" + echo "-c always copy 64bit helper binaries to 32bit chroot" + echo "-h this help message" +} + +process_opts() { + local OPTNAME OPTIND OPTERR OPTARG + while getopts ":b:g:lm:sch" OPTNAME; do + case "$OPTNAME" in + b) + if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then + bind_mounts="${OPTARG}" + else + if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \ + ! -d "${OPTARG}" ]; then + echo "Invalid -b option(s)" + usage + exit 1 + fi + bind_mounts="${bind_mounts} +${OPTARG} ${OPTARG} none rw,bind 0 0" + fi + ;; + g) + [ -n "${OPTARG}" ] && + chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}" + ;; + l) + list_all_chroots + exit + ;; + m) + if [ -n "${mirror}" ]; then + echo "You can only specify exactly one mirror location" + usage + exit 1 + fi + mirror="$OPTARG" + ;; + s) + add_srcs="y" + ;; + c) + copy_64="y" + ;; + h) + usage + exit 0 + ;; + \:) + echo "'-$OPTARG' needs an argument." + usage + exit 1 + ;; + *) + echo "invalid command-line option: $OPTARG" + usage + exit 1 + ;; + esac + done + + if [ $# -ge ${OPTIND} ]; then + eval echo "Unexpected command line argument: \${${OPTIND}}" + usage + exit 1 + fi +} + +list_all_chroots() { + for i in /var/lib/chroot/*; do + i="${i##*/}" + [ "${i}" = "*" ] && continue + [ -x "/usr/local/bin/${i%bit}" ] || continue + grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue + [ -r "/etc/schroot/script-${i}" -a \ + -r "/etc/schroot/mount-${i}" ] || continue + echo "${i%bit}" + done +} + +getkey() { + ( + trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP + stty -echo iuclc -icanon 2>/dev/null + dd count=1 bs=1 2>/dev/null + ) +} + +chr() { + printf "\\$(printf '%03o' "$1")" +} + +ord() { + printf '%d' $(printf '%c' "$1" | od -tu1 -An) +} + +is_network_drive() { + stat -c %T -f "$1/" 2>/dev/null | + egrep -qs '^nfs|cifs|smbfs' +} + +# Check that we are running as a regular user +[ "$(id -nu)" = root ] && { + echo "Run this script as a regular user and provide your \"sudo\"" \ + "password if requested" >&2 + exit 1 +} + +process_opts "$@" + +echo "This script will help you through the process of installing a" +echo "Debian or Ubuntu distribution in a chroot environment. You will" +echo "have to provide your \"sudo\" password when requested." +echo + +# Error handler +trap 'exit 1' INT TERM QUIT HUP +trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT + +# Install any missing applications that this script relies on. If these packages +# are already installed, don't force another "apt-get install". That would +# prevent them from being auto-removed, if they ever become eligible for that. +# And as this script only needs the packages once, there is no good reason to +# introduce a hard dependency on things such as dchroot and debootstrap. +dep= +for i in dchroot debootstrap libwww-perl; do + [ -d /usr/share/doc/"$i" ] || dep="$dep $i" +done +[ -n "$dep" ] && sudo apt-get -y install $dep +sudo apt-get -y install schroot + +# Create directory for chroot +sudo mkdir -p /var/lib/chroot + +# Find chroot environments that can be installed with debootstrap +targets="$(cd /usr/share/debootstrap/scripts + ls | grep '^[a-z]*$')" + +# Ask user to pick one of the available targets +echo "The following targets are available to be installed in a chroot:" +j=1; for i in $targets; do + printf '%4d: %s\n' "$j" "$i" + j=$(($j+1)) +done +while :; do + printf "Which target would you like to install: " + read n + [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break +done +j=1; for i in $targets; do + [ "$j" -eq "$n" ] && { distname="$i"; break; } + j=$(($j+1)) +done +echo + +# On x86-64, ask whether the user wants to install x86-32 or x86-64 +archflag= +arch= +if [ "$(uname -m)" = x86_64 ]; then + while :; do + echo "You are running a 64bit kernel. This allows you to install either a" + printf "32bit or a 64bit chroot environment. %s" \ + "Which one do you want (32, 64) " + read arch + [ "${arch}" == 32 -o "${arch}" == 64 ] && break + done + [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64" + arch="${arch}bit" + echo +fi +target="${distname}${arch}" + +# Don't accidentally overwrite an existing installation +[ -d /var/lib/chroot/"${target}" ] && { + while :; do + echo "This chroot already exists on your machine." + if schroot -l --all-sessions 2>&1 | + sed 's/^session://' | + grep -qs "^${target%bit}-"; then + echo "And it appears to be in active use. Terminate all programs that" + echo "are currently using the chroot environment and then re-run this" + echo "script." + echo "If you still get an error message, you might have stale mounts" + echo "that you forgot to delete. You can always clean up mounts by" + echo "executing \"${target%bit} -c\"." + exit 1 + fi + echo "I can abort installation, I can overwrite the existing chroot," + echo "or I can delete the old one and then exit. What would you like to" + printf "do (a/o/d)? " + read choice + case "${choice}" in + a|A) exit 1;; + o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;; + d|D) sudo rm -rf "/var/lib/chroot/${target}" \ + "/usr/local/bin/${target%bit}" \ + "/etc/schroot/mount-${target}" \ + "/etc/schroot/script-${target}" \ + "/etc/schroot/${target}" + sudo sed -ni '/^[[]'"${target%bit}"']$/,${ + :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \ + "/etc/schroot/schroot.conf" + trap '' INT TERM QUIT HUP + trap '' EXIT + echo "Deleted!" + exit 0;; + esac + done + echo +} +sudo mkdir -p /var/lib/chroot/"${target}" + +# Offer to include additional standard repositories for Ubuntu-based chroots. +alt_repos= +grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && { + while :; do + echo "Would you like to add ${distname}-updates and ${distname}-security " + printf "to the chroot's sources.list (y/n)? " + read alt_repos + case "${alt_repos}" in + y|Y) + alt_repos="y" + break + ;; + n|N) + break + ;; + esac + done + echo +} + +# Check for non-standard file system mount points and ask the user whether +# they should be imported into the chroot environment +# We limit to the first 26 mount points that much some basic heuristics, +# because a) that allows us to enumerate choices with a single character, +# and b) if we find more than 26 mount points, then these are probably +# false-positives and something is very unusual about the system's +# configuration. No need to spam the user with even more information that +# is likely completely irrelevant. +if [ -z "${bind_mounts}" ]; then + mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" && + $2 !~ "^/media" && $2 !~ "^/run" && + ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" || + $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" || + $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" || + $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") { + print $2 + }' /proc/mounts | + head -n26)" + if [ -n "${mounts}" ]; then + echo "You appear to have non-standard mount points that you" + echo "might want to import into the chroot environment:" + echo + sel= + while :; do + # Print a menu, listing all non-default mounts of local or network + # file systems. + j=1; for m in ${mounts}; do + c="$(printf $(printf '\\%03o' $((64+$j))))" + echo "$sel" | grep -qs $c && + state="mounted in chroot" || state="$(tput el)" + printf " $c) %-40s${state}\n" "$m" + j=$(($j+1)) + done + # Allow user to interactively (de-)select any of the entries + echo + printf "Select mount points that you want to be included or press %s" \ + "SPACE to continue" + c="$(getkey | tr a-z A-Z)" + [ "$c" == " " ] && { echo; echo; break; } + if [ -z "$c" ] || + [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then + # Invalid input, ring the console bell + tput bel + else + # Toggle the selection for the given entry + if echo "$sel" | grep -qs $c; then + sel="$(printf "$sel" | sed "s/$c//")" + else + sel="$sel$c" + fi + fi + # Reposition cursor to the top of the list of entries + tput cuu $(($j + 1)) + echo + done + fi + j=1; for m in ${mounts}; do + c="$(chr $(($j + 64)))" + if echo "$sel" | grep -qs $c; then + bind_mounts="${bind_mounts}$m $m none rw,bind 0 0 +" + fi + j=$(($j+1)) + done +fi + +# Remove stale entry from /etc/schroot/schroot.conf. Entries start +# with the target name in square brackets, followed by an arbitrary +# number of lines. The entry stops when either the end of file has +# been reached, or when the beginning of a new target is encountered. +# This means, we cannot easily match for a range of lines in +# "sed". Instead, we actually have to iterate over each line and check +# whether it is the beginning of a new entry. +sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p' \ + /etc/schroot/schroot.conf + +# Download base system. This takes some time +if [ -z "${mirror}" ]; then + grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && + mirror="http://archive.ubuntu.com/ubuntu" || + mirror="http://ftp.us.debian.org/debian" +fi + +sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \ + "${distname}" "/var/lib/chroot/${target}" "$mirror" + +# Add new entry to /etc/schroot/schroot.conf +grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && + brand="Ubuntu" || brand="Debian" +if [ -z "${chroot_groups}" ]; then + chroot_groups="${admin},$(id -gn)" +fi + +if [ -d '/etc/schroot/default' ]; then + new_version=1 + fstab="/etc/schroot/${target}/fstab" +else + new_version=0 + fstab="/etc/schroot/mount-${target}" +fi + +if [ "$new_version" = "1" ]; then + sudo cp -ar /etc/schroot/default /etc/schroot/${target} + + sudo sh -c 'cat >>/etc/schroot/schroot.conf' <>${fstab}" +else + # Older versions of schroot wanted a "priority=" line, whereas recent + # versions deprecate "priority=" and warn if they see it. We don't have + # a good feature test, but scanning for the string "priority=" in the + # existing "schroot.conf" file is a good indication of what to do. + priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf && + echo 'priority=3' || :) + sudo sh -c 'cat >>/etc/schroot/schroot.conf' </etc/schroot/script-'"${target}" + sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \ + /etc/schroot/mount-defaults | + sudo sh -c "cat > ${fstab}" +fi + +# Add the extra mount points that the user told us about +[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] && + printf "${bind_mounts}" | + sudo sh -c 'cat >>'"${fstab}" + +# If this system has a "/media" mountpoint, import it into the chroot +# environment. Most modern distributions use this mount point to +# automatically mount devices such as CDROMs, USB sticks, etc... +if [ -d /media ] && + ! grep -qs '^/media' "${fstab}"; then + echo '/media /media none rw,rbind 0 0' | + sudo sh -c 'cat >>'"${fstab}" +fi + +# Share /dev/shm, /run and /run/shm. +grep -qs '^/dev/shm' "${fstab}" || + echo '/dev/shm /dev/shm none rw,bind 0 0' | + sudo sh -c 'cat >>'"${fstab}" +if [ ! -d "/var/lib/chroot/${target}/run" ] && + ! grep -qs '^/run' "${fstab}"; then + echo '/run /run none rw,bind 0 0' | + sudo sh -c 'cat >>'"${fstab}" +fi +if ! grep -qs '^/run/shm' "${fstab}"; then + { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' || + echo '/dev/shm /run/shm none rw,bind 0 0'; } | + sudo sh -c 'cat >>'"${fstab}" +fi + +# Set up a special directory that changes contents depending on the target +# that is executing. +d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")" +s="${d}/.${target}" +echo "${s} ${d} none rw,bind 0 0" | + sudo sh -c 'cat >>'"${target}" +mkdir -p "${s}" + +# Install a helper script to launch commands in the chroot +sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF' +#!/bin/bash + +chroot="${0##*/}" + +wrap() { + # Word-wrap the text passed-in on stdin. Optionally, on continuation lines + # insert the same number of spaces as the number of characters in the + # parameter(s) passed to this function. + # If the "fold" program cannot be found, or if the actual width of the + # terminal cannot be determined, this function doesn't attempt to do any + # wrapping. + local f="$(type -P fold)" + [ -z "${f}" ] && { cat; return; } + local c="$(stty -a /dev/null | + sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')" + [ -z "${c}" ] && { cat; return; } + local i="$(echo "$*"|sed 's/./ /g')" + local j="$(printf %s "${i}"|wc -c)" + if [ "${c}" -gt "${j}" ]; then + dd bs=1 count="${j}" 2>/dev/null + "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/' + else + "${f}" -sw "${c}" + fi +} + +help() { + echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} " + echo " help: print this message" | wrap " " + echo " list: list all known chroot environments" | wrap " " + echo " clean: remove all old chroot sessions for \"${chroot}\"" | wrap " " + echo " clean-all: remove all old chroot sessions for all environments" | wrap " " + exit 0 +} + +clean() { + local s t rc + rc=0 + for s in $(schroot -l --all-sessions); do + if [ -n "$1" ]; then + t="${s#session:}" + [ "${t#${chroot}-}" == "${t}" ] && continue + fi + if ls -l /proc/*/{cwd,fd} 2>/dev/null | + fgrep -qs "/var/lib/schroot/mount/${t}"; then + echo "Session \"${t}\" still has active users, not cleaning up" | wrap + rc=1 + continue + fi + sudo schroot -c "${s}" -e || rc=1 + done + exit ${rc} +} + +list() { + for e in $(schroot -l); do + e="${e#chroot:}" + [ -x "/usr/local/bin/${e}" ] || continue + if schroot -l --all-sessions 2>/dev/null | + sed 's/^session://' | + grep -qs "^${e}-"; then + echo "${e} is currently active" + else + echo "${e}" + fi + done + exit 0 +} + +while [ "$#" -ne 0 ]; do + case "$1" in + --) shift; break;; + -h|--help) shift; help;; + -l|--list) shift; list;; + -c|--clean) shift; clean "${chroot}";; + -C|--clean-all) shift; clean;; + *) break;; + esac +done + +# Start a new chroot session and keep track of the session id. We inject this +# id into all processes that run inside the chroot. Unless they go out of their +# way to clear their environment, we can then later identify our child and +# grand-child processes by scanning their environment. +session="$(schroot -c "${chroot}" -b)" +export CHROOT_SESSION_ID="${session}" + +# Set GOMA_TMP_DIR for better handling of goma inside chroot. +export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID" +mkdir -p "$GOMA_TMP_DIR" + +if [ $# -eq 0 ]; then + # Run an interactive shell session + schroot -c "${session}" -r -p +else + # Run a command inside of the chroot environment + p="$1"; shift + schroot -c "${session}" -r -p "$p" -- "$@" +fi +rc=$? + +# Compute the inode of the root directory inside of the chroot environment. +i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. | + awk '{ print $1 }') 2>/dev/null +other_pids= +while [ -n "$i" ]; do + # Identify processes by the inode number of their root directory. Then + # remove all processes that we know belong to other sessions. We use + # "sort | uniq -u" to do what amounts to a "set subtraction operation". + pids=$({ ls -id1 /proc/*/root/. 2>/dev/null | + sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1, + t + d'; + echo "${other_pids}"; + echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1 + # Kill all processes that are still left running in the session. This is + # typically an assortment of daemon processes that were started + # automatically. They result in us being unable to tear down the session + # cleanly. + [ -z "${pids}" ] && break + for j in $pids; do + # Unfortunately, the way that schroot sets up sessions has the + # side-effect of being unable to tell one session apart from another. + # This can result in us attempting to kill processes in other sessions. + # We make a best-effort to avoid doing so. + k="$( ( xargs -0 -n1 /dev/null | + sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')" + if [ -n "${k}" -a "${k#x}" != "${session}" ]; then + other_pids="${other_pids} +${j}" + continue + fi + kill -9 $pids + done +done +# End the chroot session. This should clean up all temporary files. But if we +# earlier failed to terminate all (daemon) processes inside of the session, +# deleting the session could fail. When that happens, the user has to manually +# clean up the stale files by invoking us with "--clean" after having killed +# all running processes. +schroot -c "${session}" -e +# Since no goma processes are running, we can remove goma directory. +rm -rf "$GOMA_TMP_DIR" +exit $rc +EOF +sudo chown root:root /usr/local/bin/"${target%bit}" +sudo chmod 755 /usr/local/bin/"${target%bit}" + +# Add the standard Ubuntu update repositories if requested. +[ "${alt_repos}" = "y" -a \ + -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && +sudo sed -i '/^deb .* [^ -]\+ main$/p + s/^\(deb .* [^ -]\+\) main/\1-security main/ + p + t1 + d + :1;s/-security main/-updates main/ + t + d' "/var/lib/chroot/${target}/etc/apt/sources.list" + +# Add a few more repositories to the chroot +[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && +sudo sed -i 's/ main$/ main restricted universe multiverse/' \ + "/var/lib/chroot/${target}/etc/apt/sources.list" + +# Add the Ubuntu "partner" repository, if available +if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && + HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \ + >&/dev/null; then + sudo sh -c ' + echo "deb http://archive.canonical.com/ubuntu" \ + "'"${distname}"' partner" \ + >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"' +fi + +# Add source repositories, if the user requested we do so +[ "${add_srcs}" = "y" -a \ + -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && +sudo sed -i '/^deb[^-]/p + s/^deb\([^-]\)/deb-src\1/' \ + "/var/lib/chroot/${target}/etc/apt/sources.list" + +# Set apt proxy if host has set http_proxy +if [ -n "${http_proxy}" ]; then + sudo sh -c ' + echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \ + >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"' +fi + +# Update packages +sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' + apt-get update; apt-get -y dist-upgrade' || : + +# Install a couple of missing packages +for i in debian-keyring ubuntu-keyring locales sudo; do + [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] || + sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || : +done + +# Configure locales +sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' + l='"${LANG:-en_US}"'; l="${l%%.*}" + [ -r /etc/locale.gen ] && + sed -i "s/^# \($l\)/\1/" /etc/locale.gen + locale-gen $LANG en_US en_US.UTF-8' || : + +# Enable multi-arch support, if available +sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null && + [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && { + sudo sed -i 's/ / [arch=amd64,i386] /' \ + "/var/lib/chroot/${target}/etc/apt/sources.list" + [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] && + sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \ + $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null || + echo foreign-architecture \ + $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) | + sudo sh -c \ + "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'" +} + +# Configure "sudo" package +sudo "/usr/local/bin/${target%bit}" /bin/sh -c ' + egrep -qs '"'^$(id -nu) '"' /etc/sudoers || + echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers' + +# Install a few more commonly used packages +sudo "/usr/local/bin/${target%bit}" apt-get -y install \ + autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool \ + lsof strace + +# If running a 32bit environment on a 64bit machine, install a few binaries +# as 64bit. This is only done automatically if the chroot distro is the same as +# the host, otherwise there might be incompatibilities in build settings or +# runtime dependencies. The user can force it with the '-c' flag. +host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \ + cut -d "=" -f 2) +if [ "${copy_64}" = "y" -o \ + "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \ + file /bin/bash 2>/dev/null | grep -q x86-64; then + readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \ + 'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1') + sudo "/usr/local/bin/${target%bit}" apt-get -y install \ + lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6 + dep= + for i in binutils gdb; do + [ -d /usr/share/doc/"$i" ] || dep="$dep $i" + done + [ -n "$dep" ] && sudo apt-get -y install $dep + sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64" + for i in libbfd libpython; do + lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } | + grep -s "$i" | awk '{ print $3 }')" + if [ -n "$lib" -a -r "$lib" ]; then + sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64" + fi + done + for lib in libssl libcrypt; do + for path in /usr/lib /usr/lib/x86_64-linux-gnu; do + sudo cp $path/$lib* \ + "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || : + done + done + for i in gdb ld; do + sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/" + sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <&/dev/null; then + tmp_script="/tmp/${script##*/}" + cp "${script}" "${tmp_script}" + fi + # Some distributions automatically start an instance of the system- + # wide dbus daemon, cron daemon or of the logging daemon, when + # installing the Chrome build depencies. This prevents the chroot + # session from being closed. So, we always try to shut down any running + # instance of dbus and rsyslog. + sudo /usr/local/bin/"${target%bit}" sh -c "${script}; + rc=$?; + /etc/init.d/cron stop >/dev/null 2>&1 || :; + /etc/init.d/rsyslog stop >/dev/null 2>&1 || :; + /etc/init.d/dbus stop >/dev/null 2>&1 || :; + exit $rc" + rc=$? + [ -n "${tmp_script}" ] && rm -f "${tmp_script}" + [ $rc -ne 0 ] && exit $rc + break + ;; + n|N) + break + ;; + esac + done + echo +fi + +# Check whether ~/chroot is on a (slow) network file system and offer to +# relocate it. Also offer relocation, if the user appears to have multiple +# spindles (as indicated by "${bind_mount}" being non-empty). +# We only offer this option, if it doesn't look as if a chroot environment +# is currently active. Otherwise, relocation is unlikely to work and it +# can be difficult for the user to recover from the failed attempt to relocate +# the ~/chroot directory. +# We don't aim to solve this problem for every configuration, +# but try to help with the common cases. For more advanced configuration +# options, the user can always manually adjust things. +mkdir -p "${HOME}/chroot/" +if [ ! -h "${HOME}/chroot" ] && + ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab && + { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] || + is_network_drive "${HOME}/chroot"; } && + ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then + echo "${HOME}/chroot is currently located on the same device as your" + echo "home directory." + echo "This might not be what you want. Do you want me to move it somewhere" + echo "else?" + # If the computer has multiple spindles, many users configure all or part of + # the secondary hard disk to be writable by the primary user of this machine. + # Make some reasonable effort to detect this type of configuration and + # then offer a good location for where to put the ~/chroot directory. + suggest= + for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do + if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] && + ! is_network_drive "$i"; then + suggest="$i" + else + for j in "$i/"*; do + if [ -d "$j" -a -w "$j" -a \ + \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] && + ! is_network_drive "$j"; then + suggest="$j" + else + for k in "$j/"*; do + if [ -d "$k" -a -w "$k" -a \ + \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] && + ! is_network_drive "$k"; then + suggest="$k" + break + fi + done + fi + [ -n "${suggest}" ] && break + done + fi + [ -n "${suggest}" ] && break + done + def_suggest="${HOME}" + if [ -n "${suggest}" ]; then + # For home directories that reside on network drives, make our suggestion + # the default option. For home directories that reside on a local drive, + # require that the user manually enters the new location. + if is_network_drive "${HOME}"; then + def_suggest="${suggest}" + else + echo "A good location would probably be in \"${suggest}\"" + fi + fi + while :; do + printf "Physical location [${def_suggest}]: " + read dir + [ -z "${dir}" ] && dir="${def_suggest}" + [ "${dir%%/}" == "${HOME%%/}" ] && break + if ! [ -d "${dir}" -a -w "${dir}" ] || + [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then + echo "Cannot write to ${dir}/chroot. Please try again" + else + mv "${HOME}/chroot" "${dir}/chroot" + ln -s "${dir}/chroot" "${HOME}/chroot" + for i in $(list_all_chroots); do + sudo "$i" mkdir -p "${dir}/chroot" + done + sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-* + break + fi + done +fi + +# Clean up package files +sudo schroot -c "${target%bit}" -p -- apt-get clean +sudo apt-get clean + +trap '' INT TERM QUIT HUP +trap '' EXIT + +# Let the user know what we did +cat <= 1140, + "intent_definition requires Xcode 11.4 or higher") + + _compile_intent_target = "${target_name}_compile_intent" + _compile_intent_output = + "$target_gen_dir/" + get_path_info(invoker.intent_file, "file") + + action(_compile_intent_target) { + script = "//build/apple/xcrun.py" + inputs = [ invoker.intent_file ] + outputs = [ _compile_intent_output ] + args = [ + "intentbuilderc", + "compile", + "-input", + rebase_path(invoker.intent_file, root_build_dir), + "-output", + rebase_path(target_gen_dir, root_build_dir), + ] + + if (defined(invoker.developer_dir)) { + args += [ + "--developer-dir", + rebase_path(invoker.developer_dir, root_build_dir), + ] + } + } + + _compile_intent_bundle = "${target_name}_compile_intent_bundle" + bundle_data(_compile_intent_bundle) { + sources = [ _compile_intent_output ] + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + public_deps = [ ":$_compile_intent_target" ] + } + + _generate_source_target = "${target_name}_generate_source" + _generate_source_output = [] + foreach(_intent, invoker.intent_names) { + _generate_source_output += [ + "$target_gen_dir/$_intent.h", + "$target_gen_dir/$_intent.m", + ] + } + + action(_generate_source_target) { + script = "//build/apple/xcrun.py" + inputs = [ invoker.intent_file ] + outputs = _generate_source_output + args = [ + "intentbuilderc", + "generate", + "-input", + rebase_path(invoker.intent_file, root_build_dir), + "-output", + rebase_path(target_gen_dir, root_build_dir), + "-language", + "Objective-C", + ] + + if (defined(invoker.developer_dir)) { + args += [ + "--developer-dir", + rebase_path(invoker.developer_dir, root_build_dir), + ] + } + } + + source_set(target_name) { + forward_variables_from(invoker, + "*", + [ + "developer_dir", + "intent_file", + "intent_names", + "sources", + ]) + + if (!defined(deps)) { + deps = [] + } + if (!defined(frameworks)) { + frameworks = [] + } + + sources = _generate_source_output + configs += [ "//build/config/compiler:enable_arc" ] + deps += [ + ":$_compile_intent_bundle", + ":$_generate_source_target", + ] + frameworks += [ "Intents.framework" ] + } +} diff --git a/ios/presubmit_support.py b/ios/presubmit_support.py new file mode 100644 index 000000000000..773d1586e769 --- /dev/null +++ b/ios/presubmit_support.py @@ -0,0 +1,39 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit helpers for ios + +See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts +for more details about the presubmit API built into depot_tools. +""" + +from . import update_bundle_filelist + + +def CheckBundleData(input_api, output_api, base, globroot='//'): + root = input_api.change.RepositoryRoot() + filelist = input_api.os_path.join(input_api.PresubmitLocalPath(), + base + '.filelist') + globlist = input_api.os_path.join(input_api.PresubmitLocalPath(), + base + '.globlist') + if globroot.startswith('//'): + globroot = input_api.os_path.join(input_api.change.RepositoryRoot(), + globroot[2:]) + else: + globroot = input_api.os_path.join(input_api.PresubmitLocalPath(), globroot) + if update_bundle_filelist.process_filelist(filelist, + globlist, + globroot, + check=True, + verbose=input_api.verbose) == 0: + return [] + else: + script = input_api.os_path.join(input_api.change.RepositoryRoot(), 'build', + 'ios', 'update_bundle_filelist.py') + + return [ + output_api.PresubmitError( + 'Filelist needs to be re-generated. Please run \'python3 %s %s %s ' + '%s\' and include the changes in this CL' % + (script, filelist, globlist, globroot)) + ] diff --git a/ios/presubmit_support_test.py b/ios/presubmit_support_test.py new file mode 100755 index 000000000000..6bbc6024efee --- /dev/null +++ b/ios/presubmit_support_test.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import datetime +import os.path +import sys +import tempfile +import unittest + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) + +from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi +from build.ios import presubmit_support + +_TEMP_FILELIST_CONTENTS = """# Copyright %d The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +""" % (datetime.datetime.now().year) + +_TEMP_GLOBLIST_CONTENTS = """** +-*.globlist +-*.filelist +""" + + +class BundleDataPresubmit(unittest.TestCase): + def setUp(self): + self.mock_input_api = MockInputApi() + self.mock_input_api.change.RepositoryRoot = lambda: os.path.join( + os.path.dirname(__file__), '..', '..') + self.mock_input_api.PresubmitLocalPath = lambda: os.path.dirname(__file__) + self.mock_output_api = MockOutputApi() + + def testBasic(self): + """ + Checks that a glob can be expanded to build a file list and if it + matches the existing file list, we should see no error. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/basic', '.') + self.assertEqual([], results) + + def testExclusion(self): + """ + Check that globs can be used to exclude files from file lists. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/exclusions', '.') + self.assertEqual([], results) + + def testDifferentLocalPath(self): + """ + Checks the case where the presubmit directory is not the same as the + globroot, but it is still local (i.e., not relative to the repository + root) + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/different_local_path', 'test_data') + self.assertEqual([], results) + + def testRepositoryRelative(self): + """ + Checks the case where globs are relative to the repository root. + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/repository_relative') + self.assertEqual([], results) + + def testMissingFilesInFilelist(self): + """ + Checks that we do indeed return an error if the filelist is missing a + file. In this case, all of the test .filelist and .globlist files are + excluded. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/missing', '.') + self.assertEqual(1, len(results)) + + def testExtraFilesInFilelist(self): + """ + Checks the case where extra files have been included in the file list. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/extra', '.') + self.assertEqual(1, len(results)) + + def testOrderInsensitive(self): + """ + Checks that we do not trigger an error for cases where the file list is + correct, but in a different order than the globlist expansion. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/reorder', '.') + self.assertEqual([], results) + + def testUnexpectedHeader(self): + """ + Checks an unexpected header in a file list causes an error. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/comment', '.') + self.assertEqual(1, len(results)) + + def testUntrackedFiles(self): + """ + Checks that the untracked files are correctly ignored. + """ + with tempfile.TemporaryDirectory() as temp_dir: + with open(os.path.join(temp_dir, 'untracked.filelist'), 'w') as f: + f.write(_TEMP_FILELIST_CONTENTS) + with open(os.path.join(temp_dir, 'untracked.globlist'), 'w') as f: + f.write(_TEMP_GLOBLIST_CONTENTS) + with open(os.path.join(temp_dir, 'untracked.txt'), 'w') as f: + f.write('Hello, World!') + path = os.path.join(temp_dir, 'untracked') + self.mock_input_api.change.RepositoryRoot = lambda: temp_dir + self.mock_input_api.PresubmitLocalPath = lambda: temp_dir + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'untracked') + self.assertEqual([], results) + + def testExcludeDuplicates(self): + """ + Checks that duplicate filenames are not added to a filelist. + """ + results = presubmit_support.CheckBundleData(self.mock_input_api, + self.mock_output_api, + 'test_data/duplicates', '.') + self.assertEqual([], results) + + def testCheckOutsideGloblistDir(self): + """ + Checks that including files outside the globlist directory is an error. + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/outside_globlist_dir', '.') + self.assertEqual(1, len(results)) + + def testCheckIgnoreOutsideGloblistDir(self): + """ + Checks that files outside the globlist directory can be ignored. + """ + results = presubmit_support.CheckBundleData( + self.mock_input_api, self.mock_output_api, + 'test_data/ignore_outside_globlist_dir', '.') + self.assertEqual([], results) + + +if __name__ == '__main__': + unittest.main() diff --git a/ios/test_data/bar.html b/ios/test_data/bar.html new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/ios/test_data/basic.filelist b/ios/test_data/basic.filelist new file mode 100644 index 000000000000..496dcbda1078 --- /dev/null +++ b/ios/test_data/basic.filelist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt diff --git a/ios/test_data/basic.globlist b/ios/test_data/basic.globlist new file mode 100644 index 000000000000..b4d7d66aa12b --- /dev/null +++ b/ios/test_data/basic.globlist @@ -0,0 +1,5 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* diff --git a/ios/test_data/comment.filelist b/ios/test_data/comment.filelist new file mode 100644 index 000000000000..0f6c30fcd822 --- /dev/null +++ b/ios/test_data/comment.filelist @@ -0,0 +1,2 @@ +# This comment is an unexpected header. +test_data/subdirectory/baz.txt diff --git a/ios/test_data/comment.globlist b/ios/test_data/comment.globlist new file mode 100644 index 000000000000..93c82c61225a --- /dev/null +++ b/ios/test_data/comment.globlist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Some comment followed by an empty line. + +test_data/subdirectory/* diff --git a/ios/test_data/different_local_path.filelist b/ios/test_data/different_local_path.filelist new file mode 100644 index 000000000000..a45d180b2696 --- /dev/null +++ b/ios/test_data/different_local_path.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +bar.html +foo.css +subdirectory/baz.txt diff --git a/ios/test_data/different_local_path.globlist b/ios/test_data/different_local_path.globlist new file mode 100644 index 000000000000..a17a781dec39 --- /dev/null +++ b/ios/test_data/different_local_path.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +** +-**list diff --git a/ios/test_data/duplicates.filelist b/ios/test_data/duplicates.filelist new file mode 100644 index 000000000000..496dcbda1078 --- /dev/null +++ b/ios/test_data/duplicates.filelist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt diff --git a/ios/test_data/duplicates.globlist b/ios/test_data/duplicates.globlist new file mode 100644 index 000000000000..79bf591dad9c --- /dev/null +++ b/ios/test_data/duplicates.globlist @@ -0,0 +1,7 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* +# This duplicate glob should have no effect on the resulting filelist. +test_data/subdirectory/* diff --git a/ios/test_data/exclusions.filelist b/ios/test_data/exclusions.filelist new file mode 100644 index 000000000000..d9e69f187770 --- /dev/null +++ b/ios/test_data/exclusions.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/bar.html +test_data/foo.css +test_data/subdirectory/baz.txt diff --git a/ios/test_data/exclusions.globlist b/ios/test_data/exclusions.globlist new file mode 100644 index 000000000000..92c241a70013 --- /dev/null +++ b/ios/test_data/exclusions.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/** +-test_data/**list diff --git a/ios/test_data/extra.filelist b/ios/test_data/extra.filelist new file mode 100644 index 000000000000..3597a457dda2 --- /dev/null +++ b/ios/test_data/extra.filelist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/bar.html +test_data/foo.css diff --git a/ios/test_data/extra.globlist b/ios/test_data/extra.globlist new file mode 100644 index 000000000000..9fe758f1f178 --- /dev/null +++ b/ios/test_data/extra.globlist @@ -0,0 +1,5 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/*.css diff --git a/ios/test_data/foo.css b/ios/test_data/foo.css new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/ios/test_data/ignore_outside_globlist_dir.filelist b/ios/test_data/ignore_outside_globlist_dir.filelist new file mode 100644 index 000000000000..a306b7ea044e --- /dev/null +++ b/ios/test_data/ignore_outside_globlist_dir.filelist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +presubmit_support_test.py +test_data/subdirectory/baz.txt diff --git a/ios/test_data/ignore_outside_globlist_dir.globlist b/ios/test_data/ignore_outside_globlist_dir.globlist new file mode 100644 index 000000000000..471a0c46f066 --- /dev/null +++ b/ios/test_data/ignore_outside_globlist_dir.globlist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* +# push(ignore-relative) +presubmit_support_test.py +# pop(ignore-relative) diff --git a/ios/test_data/missing.filelist b/ios/test_data/missing.filelist new file mode 100644 index 000000000000..d9e69f187770 --- /dev/null +++ b/ios/test_data/missing.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/bar.html +test_data/foo.css +test_data/subdirectory/baz.txt diff --git a/ios/test_data/missing.globlist b/ios/test_data/missing.globlist new file mode 100644 index 000000000000..267b25246fcc --- /dev/null +++ b/ios/test_data/missing.globlist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This should cover every file in test_data/ and its subdirectories (including +# test files). + +test_data/** diff --git a/ios/test_data/outside_globlist_dir.filelist b/ios/test_data/outside_globlist_dir.filelist new file mode 100644 index 000000000000..a81d5ad7386b --- /dev/null +++ b/ios/test_data/outside_globlist_dir.filelist @@ -0,0 +1,8 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt +presubmit_support_test.py diff --git a/ios/test_data/outside_globlist_dir.globlist b/ios/test_data/outside_globlist_dir.globlist new file mode 100644 index 000000000000..31bb073bc3fe --- /dev/null +++ b/ios/test_data/outside_globlist_dir.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/subdirectory/* +presubmit_support_test.py diff --git a/ios/test_data/reorder.filelist b/ios/test_data/reorder.filelist new file mode 100644 index 000000000000..58921bc4facd --- /dev/null +++ b/ios/test_data/reorder.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +test_data/subdirectory/baz.txt +test_data/foo.css +test_data/bar.html diff --git a/ios/test_data/reorder.globlist b/ios/test_data/reorder.globlist new file mode 100644 index 000000000000..92c241a70013 --- /dev/null +++ b/ios/test_data/reorder.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +test_data/** +-test_data/**list diff --git a/ios/test_data/repository_relative.filelist b/ios/test_data/repository_relative.filelist new file mode 100644 index 000000000000..796087b1da87 --- /dev/null +++ b/ios/test_data/repository_relative.filelist @@ -0,0 +1,9 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +//build/ios/test_data/bar.html +//build/ios/test_data/foo.css +//build/ios/test_data/subdirectory/baz.txt diff --git a/ios/test_data/repository_relative.globlist b/ios/test_data/repository_relative.globlist new file mode 100644 index 000000000000..b7c42100ac2c --- /dev/null +++ b/ios/test_data/repository_relative.globlist @@ -0,0 +1,6 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +//build/ios/test_data/** +-//build/ios/test_data/**list diff --git a/ios/test_data/subdirectory/baz.txt b/ios/test_data/subdirectory/baz.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/ios/update_bundle_filelist.py b/ios/update_bundle_filelist.py new file mode 100755 index 000000000000..2e21205c308d --- /dev/null +++ b/ios/update_bundle_filelist.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +""" +Updates .filelist files using data from corresponding .globlist files (or +checks whether they are up to date). + +bundle_data targets require an explicit source list, but maintaining these large +lists can be cumbersome. This script aims to simplify the process of updating +these lists by either expanding globs to update file lists or check that an +existing file list matches such an expansion (i.e., checking during presubmit). + +The .globlist file contains a list of globs that will be expanded to either +compare or replace a corresponding .filelist. It is possible to exclude items +from the file list with globs as well. These lines are prefixed with '-' and are +processed in order, so be sure that exclusions succeed inclusions in the list of +globs. Comments and empty lines are permitted in .globfiles; comments are +prefixed with '#'. + +By convention, the base name of the .globlist and .filelist files matches the +label of their corresponding bundle_data from the .gn file. In order to ensure +that these filelists don't get stale, there should also be a PRESUBMIT.py +which uses this script to check that list is up to date. + +By default, the script will update the file list to match the expanded globs. +""" + +import argparse +import datetime +import difflib +import glob +import os.path +import re +import subprocess +import sys + +# Character to set colors in terminal. Taken, along with the printing routine +# below, from update_deps.py. +TERMINAL_ERROR_COLOR = '\033[91m' +TERMINAL_RESET_COLOR = '\033[0m' + +_HEADER = """# Copyright %d The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +""" % (datetime.datetime.now().year) + +_HEADER_PATTERN = re.compile(r"""# Copyright [0-9]+ The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# NOTE: this file is generated by build/ios/update_bundle_filelist.py +# If it requires updating, you should get a presubmit error with +# instructions on how to regenerate. Otherwise, do not edit. +""") + +_HEADER_HEIGHT = 6 + +_START_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR = '# push(ignore-relative)' +_STOP_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR = '# pop(ignore-relative)' + + +def parse_filelist(filelist_name): + try: + with open(filelist_name) as filelist: + unfiltered = [l for l in filelist] + header = ''.join(unfiltered[:_HEADER_HEIGHT]) + files = sorted(l.strip() for l in unfiltered[_HEADER_HEIGHT:]) + return (files, header) + except Exception as e: + print_error(f'Could not read file list: {filelist_name}', f'{type(e)}: {e}') + return [] + + +def get_git_command_name(): + if sys.platform.startswith('win'): + return 'git.bat' + return 'git' + + +def get_tracked_files(directory, globroot, repository_root_relative, verbose): + try: + git_cmd = get_git_command_name() + with subprocess.Popen([git_cmd, 'ls-files', '--error-unmatch', directory], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=globroot) as p: + output = p.communicate() + if p.returncode != 0: + if verbose: + print_error( + f'Could not gather a list of tracked files in {directory}', + f'{output[1]}') + return set() + + files = [f.decode('utf-8') for f in output[0].splitlines()] + + # Need paths to be relative to directory in order to match expansions. + # This should happen naturally due to cwd above, but we need to take + # special care if relative to the repository root. + if repository_root_relative: + files = ['//' + f for f in files] + + # Handle Windows backslashes + files = [f.replace('\\', '/') for f in files] + + return set(files) + + except Exception as e: + if verbose: + print_error(f'Could not gather a list of tracked files in {directory}', + f'{type(e)}: {e}') + return set() + + +def combine_potentially_repository_root_relative_paths(a, b): + if b.startswith('//'): + # If b is relative to the repository root, os.path will consider it absolute + # and os.path.join will fail. In this case, we can simply concatenate the + # paths. + return (a + b, True) + else: + return (os.path.join(a, b), False) + + +def parse_and_expand_globlist(globlist_name, glob_root): + # The following expects glob_root not to end in a trailing slash. + if glob_root.endswith('/'): + glob_root = glob_root[:-1] + + check_expansions_outside_globlist_dir = True + globlist_dir = os.path.dirname(globlist_name) + + with open(globlist_name) as globlist: + # Paths in |files| and |to_check| must use unix separators. Using a set + # ensures no unwanted duplicates. The files in |to_check| must be in the + # globroot or a subdirectory. + files = set() + to_check = set() + for g in globlist: + g = g.strip() + + # Ignore blank lines + if not g: + continue + + # Toggle error checking. + if g == _START_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR: + check_expansions_outside_globlist_dir = False + elif g == _STOP_IGNORE_EXPANSIONS_OUTSIDE_GLOBLIST_DIR: + check_expansions_outside_globlist_dir = True + + # Ignore comments. + if not g or g.startswith('#'): + continue + + # Exclusions are prefixed with '-'. + is_exclusion = g.startswith('-') + if is_exclusion: + g = g[1:] + + (combined, + root_relative) = combine_potentially_repository_root_relative_paths( + glob_root, g) + + prefix_size = len(glob_root) + if not root_relative: + # We need to account for the separator. + prefix_size += 1 + + expansion = glob.glob(combined, recursive=True) + + # Filter out directories. + expansion = [f for f in expansion if os.path.isfile(f)] + + if check_expansions_outside_globlist_dir: + for f in expansion: + relative = os.path.relpath(f, globlist_dir) + if relative.startswith('..'): + raise Exception(f'Globlist expansion outside globlist dir: {f}') + + # Make relative to |glob_root|. + expansion = [f[prefix_size:] for f in expansion] + + # Handle Windows backslashes + expansion = [f.replace('\\', '/') for f in expansion] + + # Since paths in |expansion| only use unix separators, it is safe to + # compare for both the purpose of exclusion and addition. + if is_exclusion: + files = files.difference(expansion) + else: + files = files.union(expansion) + + # Return a sorted list. + return sorted(files) + + +def compare_lists(a, b): + differ = difflib.Differ() + full_diff = differ.compare(a, b) + lines = [d for d in full_diff if not d.startswith(' ')] + additions = [l[2:] for l in lines if l.startswith('+ ')] + removals = [l[2:] for l in lines if l.startswith('- ')] + return (additions, removals) + + +def write_filelist(filelist_name, files, header): + try: + with open(filelist_name, 'w', encoding='utf-8', newline='') as filelist: + if not _HEADER_PATTERN.search(header): + header = _HEADER + filelist.write(header) + for file in files: + filelist.write(f'{file}\n') + except Exception as e: + print_error(f'Could not write file list: {filelist_name}', + f'{type(e)}: {e}') + return [] + + +def process_filelist(filelist, globlist, globroot, check=False, verbose=False): + files_from_globlist = [] + try: + files_from_globlist = parse_and_expand_globlist(globlist, globroot) + except Exception as e: + if verbose: + print_error(f'Could not read glob list: {globlist}', f'{type(e)}: {e}') + return 1 + + (files, header) = parse_filelist(filelist) + + (additions, removals) = compare_lists(files, files_from_globlist) + to_ignore = set() + + # Ignore additions of untracked files. + if additions: + directories = set([os.path.dirname(f) for f in additions]) + tracked_files = set() + for d in directories: + (combined, + root_relative) = combine_potentially_repository_root_relative_paths( + globroot, d) + relative = os.path.relpath(combined, globroot) + tracked_files = tracked_files.union( + get_tracked_files(relative, globroot, root_relative, verbose)) + to_ignore = set(additions).difference(tracked_files) + additions = [f for f in additions if f in tracked_files] + + files_from_globlist = [f for f in files_from_globlist if f not in to_ignore] + + if check: + if not _HEADER_PATTERN.search(header): + if verbose: + print_error(f'Unexpected header for {filelist}', f'{header}') + return 1 + if not additions and not removals: + return 0 + if verbose: + pretty_additions = ['+ ' + f for f in additions] + pretty_removals = ['- ' + f for f in removals] + pretty_diff = '\n'.join(pretty_additions + pretty_removals) + print_error('File list does not match glob expansion', f'{pretty_diff}') + return 1 + else: + write_filelist(filelist, files_from_globlist, header) + return 0 + + +def main(args): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('filelist', help='Contains one file per line') + parser.add_argument('globlist', + help='Contains globs that, when expanded, ' + 'should match the filelist. Use ' + '--help for details on syntax') + parser.add_argument('globroot', + help='Directory from which globs are relative') + parser.add_argument('-c', + '--check', + action='store_true', + help='Prevents modifying the file list') + parser.add_argument('-v', + '--verbose', + action='store_true', + help='Use this to print details on differences') + args = parser.parse_args() + return process_filelist(args.filelist, + args.globlist, + args.globroot, + check=args.check, + verbose=args.verbose) + + +def print_error(error_message, error_info): + """ Print the `error_message` with additional `error_info` """ + color_start, color_end = adapted_color_for_output(TERMINAL_ERROR_COLOR, + TERMINAL_RESET_COLOR) + + error_message = color_start + 'ERROR: ' + error_message + color_end + if len(error_info) > 0: + error_message = error_message + '\n' + error_info + print(error_message, file=sys.stderr) + + +def adapted_color_for_output(color_start, color_end): + """ Returns a the `color_start`, `color_end` tuple if the output is a + terminal, or empty strings otherwise """ + if not sys.stdout.isatty(): + return '', '' + return color_start, color_end + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/lacros/BUILD.gn b/lacros/BUILD.gn new file mode 100644 index 000000000000..0a93891469a0 --- /dev/null +++ b/lacros/BUILD.gn @@ -0,0 +1,28 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/python.gni") +import("//build/util/process_version.gni") + +python_library("lacros_resource_sizes_py") { + pydeps_file = "lacros_resource_sizes.pydeps" + data = [ "//buildtools/third_party/eu-strip/bin/eu-strip" ] + data_deps = [ + "//build/util:test_results", + "//third_party/catapult/tracing:convert_chart_json", + ] +} + +# Lacros is built with "{arch}-generic" configuration. However, in Chrome +# OS, it is just "one board variation", so the libraries on the *-generic +# boards may not be compatible with the ones on the actual DUTs. +# One of the common pattern recently we hit is symbols exposed by libgcc. +# The symbols start to be exposed recently because of libunwind transition +# and along with it they are or are not re-exposed by other libraries, too, +# depending on per-board implementation. +# To mitigate the situation, marking -shared-libgcc to look up the system +# libgcc always. +config("optional_shared_libgcc") { + ldflags = [ "-shared-libgcc" ] +} diff --git a/lacros/OWNERS b/lacros/OWNERS new file mode 100644 index 000000000000..e9865487453b --- /dev/null +++ b/lacros/OWNERS @@ -0,0 +1,2 @@ +svenzheng@chromium.org +erikchen@chromium.org diff --git a/lacros/PRESUBMIT.py b/lacros/PRESUBMIT.py new file mode 100644 index 000000000000..642ee7e8fbd4 --- /dev/null +++ b/lacros/PRESUBMIT.py @@ -0,0 +1,28 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit script for changes affecting //build/lacros""" + +USE_PYTHON3 = True + + +def _CommonChecks(input_api, output_api): + # Don't run lacros tests on Windows. + if input_api.is_windows: + return [] + tests = input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + '.', [r'^.+_test\.py$'], + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True) + return input_api.RunTests(tests) + + +def CheckChangeOnUpload(input_api, output_api): + return _CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return _CommonChecks(input_api, output_api) diff --git a/lacros/README.md b/lacros/README.md new file mode 100644 index 000000000000..be0a243f0d94 --- /dev/null +++ b/lacros/README.md @@ -0,0 +1,11 @@ +This folder contains code for running lacros in tests. + +This includes: +* test_runner.py +Run linux-lacros related tests. + +* mojo_connection_lacros_launcher +Script for launching lacros for debugging. + +* lacros_resource_sizes.py +Monitoring lacros binary size script used by builders. diff --git a/lacros/lacros_resource_sizes.gni b/lacros/lacros_resource_sizes.gni new file mode 100644 index 000000000000..7d1e91965a9b --- /dev/null +++ b/lacros/lacros_resource_sizes.gni @@ -0,0 +1,21 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/util/generate_wrapper.gni") + +# Generates a script in the bin directory that runs +# //build/lacros/lacros_resource_sizes.py for the provided configuration. +template("lacros_resource_sizes_test") { + generate_wrapper(target_name) { + forward_variables_from(invoker, [ "data_deps" ]) + executable = "//build/lacros/lacros_resource_sizes.py" + wrapper_script = "$root_out_dir/bin/run_${target_name}" + + deps = [ "//build/lacros:lacros_resource_sizes_py" ] + executable_args = [ + "--chromium-output-directory", + "@WrappedPath(.)", + ] + } +} diff --git a/lacros/lacros_resource_sizes.py b/lacros/lacros_resource_sizes.py new file mode 100755 index 000000000000..6004ae7f8832 --- /dev/null +++ b/lacros/lacros_resource_sizes.py @@ -0,0 +1,398 @@ +#!/usr/bin/env python3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Reports binary size metrics for LaCrOS build artifacts. + +More information at //docs/speed/binary_size/metrics.md. +""" + +import argparse +import collections +import contextlib +import json +import logging +import os +import subprocess +import sys +import tempfile +SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) +sys.path.insert(0, os.path.join(SRC_DIR, 'build', 'util')) +from lib.results import result_sink +from lib.results import result_types + + +@contextlib.contextmanager +def _SysPath(path): + """Library import context that temporarily appends |path| to |sys.path|.""" + if path and path not in sys.path: + sys.path.insert(0, path) + else: + path = None # Indicates that |sys.path| is not modified. + try: + yield + finally: + if path: + sys.path.pop(0) + + +DIR_SOURCE_ROOT = os.environ.get( + 'CHECKOUT_SOURCE_ROOT', + os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))) + +BUILD_COMMON_PATH = os.path.join(DIR_SOURCE_ROOT, 'build', 'util', 'lib', + 'common') + +TRACING_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'catapult', + 'tracing') + +EU_STRIP_PATH = os.path.join(DIR_SOURCE_ROOT, 'buildtools', 'third_party', + 'eu-strip', 'bin', 'eu-strip') + +with _SysPath(BUILD_COMMON_PATH): + import perf_tests_results_helper # pylint: disable=import-error + +with _SysPath(TRACING_PATH): + from tracing.value import convert_chart_json # pylint: disable=import-error + +_BASE_CHART = { + 'format_version': '0.1', + 'benchmark_name': 'resource_sizes', + 'trace_rerun_options': [], + 'charts': {} +} + +_KEY_RAW = 'raw' +_KEY_GZIPPED = 'gzipped' +_KEY_STRIPPED = 'stripped' +_KEY_STRIPPED_GZIPPED = 'stripped_then_gzipped' + + +class _Group: + """A group of build artifacts whose file sizes are summed and tracked. + + Build artifacts for size tracking fall under these categories: + * File: A single file. + * Group: A collection of files. + * Dir: All files under a directory. + + Attributes: + paths: A list of files or directories to be tracked together. + title: The display name of the group. + track_stripped: Whether to also track summed stripped ELF sizes. + track_compressed: Whether to also track summed compressed sizes. + """ + + def __init__(self, paths, title, track_stripped=False, + track_compressed=False): + self.paths = paths + self.title = title + self.track_stripped = track_stripped + self.track_compressed = track_compressed + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, _Group): + return (self.paths == other.paths) & (self.title == other.title) & ( + self.track_stripped == other.track_stripped) & ( + self.track_compressed == other.track_compressed) + return False + +# Common artifacts in official builder lacros-arm32 and lacros64 in +# src-internal. The artifcts can be found in +# chromium/src-internal/testing/buildbot/archive/lacros64.json and +# chromium/src-internal/testing/buildbot/archive/lacros-arm32.json +# chromium/src-internal/testing/buildbot/archive/lacros-arm64.json +_TRACKED_GROUPS = [ + _Group(paths=['chrome'], + title='File: chrome', + track_stripped=True, + track_compressed=True), + _Group(paths=['chrome_crashpad_handler'], + title='File: chrome_crashpad_handler'), + _Group(paths=['icudtl.dat'], title='File: icudtl.dat'), + _Group(paths=['icudtl.dat.hash'], title='File: icudtl.dat.hash'), + _Group(paths=['libEGL.so'], title='File: libEGL.so'), + _Group(paths=['libGLESv2.so'], title='File: libGLESv2.so'), + _Group(paths=['nacl_helper'], title='File: nacl_helper'), + _Group(paths=['resources.pak'], title='File: resources.pak'), + _Group(paths=[ + 'chrome_100_percent.pak', 'chrome_200_percent.pak', + 'headless_lib_data.pak', 'headless_lib_strings.pak' + ], + title='Group: Other PAKs'), + _Group(paths=['snapshot_blob.bin'], title='Group: Misc'), + _Group(paths=['locales/'], title='Dir: locales'), + _Group(paths=['WidevineCdm/'], title='Dir: WidevineCdm'), +] + + +def _visit_paths(base_dir, paths): + """Itemizes files specified by a list of paths. + + Args: + base_dir: Base directory for all elements in |paths|. + paths: A list of filenames or directory names to specify files whose sizes + to be counted. Directories are recursed. There's no de-duping effort. + Non-existing files or directories are ignored (with warning message). + """ + for path in paths: + full_path = os.path.join(base_dir, path) + if os.path.exists(full_path): + if os.path.isdir(full_path): + for dirpath, _, filenames in os.walk(full_path): + for filename in filenames: + yield os.path.join(dirpath, filename) + else: # Assume is file. + yield full_path + else: + logging.critical('Not found: %s', path) + + +def _is_probably_elf(filename): + """Heuristically decides whether |filename| is ELF via magic signature.""" + with open(filename, 'rb') as fh: + return fh.read(4) == '\x7FELF' + + +def _is_unstrippable_elf(filename): + """Identifies known-unstrippable ELF files to denoise the system.""" + return filename.endswith('.nexe') or filename.endswith('libwidevinecdm.so') + + +def _get_filesize(filename): + """Returns the size of a file, or 0 if file is not found.""" + try: + return os.path.getsize(filename) + except OSError: + logging.critical('Failed to get size: %s', filename) + return 0 + + +def _get_gzipped_filesize(filename): + """Returns the gzipped size of a file, or 0 if file is not found.""" + BUFFER_SIZE = 65536 + if not os.path.isfile(filename): + return 0 + try: + # Call gzip externally instead of using gzip package since it's > 2x faster. + cmd = ['gzip', '-c', filename] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE) + # Manually counting bytes instead of using len(p.communicate()[0]) to avoid + # buffering the entire compressed data (can be ~100 MB). + ret = 0 + while True: + chunk = len(p.stdout.read(BUFFER_SIZE)) + if chunk == 0: + break + ret += chunk + return ret + except OSError: + logging.critical('Failed to get gzipped size: %s', filename) + return 0 + + +def _get_catagorized_filesizes(filename): + """Measures |filename| sizes under various transforms. + + Returns: A Counter (keyed by _Key_* constants) that stores measured sizes. + """ + sizes = collections.Counter() + sizes[_KEY_RAW] = _get_filesize(filename) + sizes[_KEY_GZIPPED] = _get_gzipped_filesize(filename) + + # Pre-assign values for non-ELF, or in case of failure for ELF. + sizes[_KEY_STRIPPED] = sizes[_KEY_RAW] + sizes[_KEY_STRIPPED_GZIPPED] = sizes[_KEY_GZIPPED] + + if _is_probably_elf(filename) and not _is_unstrippable_elf(filename): + try: + fd, temp_file = tempfile.mkstemp() + os.close(fd) + cmd = [EU_STRIP_PATH, filename, '-o', temp_file] + subprocess.check_output(cmd) + sizes[_KEY_STRIPPED] = _get_filesize(temp_file) + sizes[_KEY_STRIPPED_GZIPPED] = _get_gzipped_filesize(temp_file) + if sizes[_KEY_STRIPPED] > sizes[_KEY_RAW]: + # This weird case has been observed for libwidevinecdm.so. + logging.critical('Stripping made things worse for %s' % filename) + except subprocess.CalledProcessError: + logging.critical('Failed to strip file: %s' % filename) + finally: + os.unlink(temp_file) + return sizes + + +def _dump_chart_json(output_dir, chartjson): + """Writes chart histogram to JSON files. + + Output files: + results-chart.json contains the chart JSON. + perf_results.json contains histogram JSON for Catapult. + + Args: + output_dir: Directory to place the JSON files. + chartjson: Source JSON data for output files. + """ + results_path = os.path.join(output_dir, 'results-chart.json') + logging.critical('Dumping chartjson to %s', results_path) + with open(results_path, 'w') as json_file: + json.dump(chartjson, json_file, indent=2) + + # We would ideally generate a histogram set directly instead of generating + # chartjson then converting. However, perf_tests_results_helper is in + # //build, which doesn't seem to have any precedent for depending on + # anything in Catapult. This can probably be fixed, but since this doesn't + # need to be super fast or anything, converting is a good enough solution + # for the time being. + histogram_result = convert_chart_json.ConvertChartJson(results_path) + if histogram_result.returncode != 0: + raise Exception('chartjson conversion failed with error: ' + + histogram_result.stdout) + + histogram_path = os.path.join(output_dir, 'perf_results.json') + logging.critical('Dumping histograms to %s', histogram_path) + with open(histogram_path, 'wb') as json_file: + json_file.write(histogram_result.stdout) + + +def _run_resource_sizes(args): + """Main flow to extract and output size data.""" + chartjson = _BASE_CHART.copy() + chartjson.update({ + 'benchmark_description': + ('LaCrOS %s resource size information.' % args.arch) + }) + report_func = perf_tests_results_helper.ReportPerfResult + total_sizes = collections.Counter() + + def report_sizes(sizes, title, track_stripped, track_compressed): + report_func(chart_data=chartjson, + graph_title=title, + trace_title='size', + value=sizes[_KEY_RAW], + units='bytes') + + if track_stripped: + report_func(chart_data=chartjson, + graph_title=title + ' (Stripped)', + trace_title='size', + value=sizes[_KEY_STRIPPED], + units='bytes') + + if track_compressed: + report_func(chart_data=chartjson, + graph_title=title + ' (Gzipped)', + trace_title='size', + value=sizes[_KEY_GZIPPED], + units='bytes') + + if track_stripped and track_compressed: + report_func(chart_data=chartjson, + graph_title=title + ' (Stripped, Gzipped)', + trace_title='size', + value=sizes[_KEY_STRIPPED_GZIPPED], + units='bytes') + + tracked_groups = _TRACKED_GROUPS.copy() + # Architecture amd64 requires artifact nacl_irt_x86_64.nexe. + if args.arch == 'amd64': + tracked_groups.append( + _Group(paths=['nacl_irt_x86_64.nexe'], + title='File: nacl_irt_x86_64.nexe')) + # Architecture arm32 requires artifact nacl_irt_arm.nexe. + elif args.arch == 'arm32': + tracked_groups.append( + _Group(paths=['nacl_irt_arm.nexe'], title='File: nacl_irt_arm.nexe')) + tracked_groups.append( + _Group(paths=['nacl_helper_bootstrap'], + title='File: nacl_helper_bootstrap')) + # TODO(https://crbug.com/1356761): remove the following part once nacl files + # are available. + elif args.arch == 'arm64': + tracked_groups.remove( + _Group(paths=['nacl_helper'], title='File: nacl_helper')) + for g in tracked_groups: + sizes = sum( + map(_get_catagorized_filesizes, _visit_paths(args.out_dir, g.paths)), + collections.Counter()) + report_sizes(sizes, g.title, g.track_stripped, g.track_compressed) + + # Total compressed size is summed over individual compressed sizes, instead + # of concatanating first, then compress everything. This is done for + # simplicity. It also gives a conservative size estimate (assuming file + # metadata and overheads are negligible). + total_sizes += sizes + + report_sizes(total_sizes, 'Total', True, True) + + _dump_chart_json(args.output_dir, chartjson) + + +def main(): + """Parses arguments and runs high level flows.""" + argparser = argparse.ArgumentParser(description='Writes LaCrOS size metrics.') + + argparser.add_argument('--chromium-output-directory', + dest='out_dir', + required=True, + type=os.path.realpath, + help='Location of the build artifacts.') + argparser.add_argument('--arch', + required=True, + type=str, + help='The architecture of lacros, valid values: amd64,' + ' arm32, arm64') + + output_group = argparser.add_mutually_exclusive_group() + + output_group.add_argument('--output-dir', + default='.', + help='Directory to save chartjson to.') + + # Accepted to conform to the isolated script interface, but ignored. + argparser.add_argument('--isolated-script-test-filter', + help=argparse.SUPPRESS) + argparser.add_argument('--isolated-script-test-perf-output', + type=os.path.realpath, + help=argparse.SUPPRESS) + + output_group.add_argument( + '--isolated-script-test-output', + type=os.path.realpath, + help='File to which results will be written in the simplified JSON ' + 'output format.') + + args = argparser.parse_args() + + isolated_script_output = {'valid': False, 'failures': []} + if args.isolated_script_test_output: + test_name = 'lacros_resource_sizes' + args.output_dir = os.path.join( + os.path.dirname(args.isolated_script_test_output), test_name) + if not os.path.exists(args.output_dir): + os.makedirs(args.output_dir) + + try: + _run_resource_sizes(args) + isolated_script_output = {'valid': True, 'failures': []} + finally: + if args.isolated_script_test_output: + results_path = os.path.join(args.output_dir, 'test_results.json') + with open(results_path, 'w') as output_file: + json.dump(isolated_script_output, output_file) + with open(args.isolated_script_test_output, 'w') as output_file: + json.dump(isolated_script_output, output_file) + result_sink_client = result_sink.TryInitClient() + if result_sink_client: + status = result_types.PASS + if not isolated_script_output['valid']: + status = result_types.UNKNOWN + elif isolated_script_output['failures']: + status = result_types.FAIL + result_sink_client.Post(test_name, status, None, None, None) + + +if __name__ == '__main__': + main() diff --git a/lacros/lacros_resource_sizes.pydeps b/lacros/lacros_resource_sizes.pydeps new file mode 100644 index 000000000000..c2437ca51252 --- /dev/null +++ b/lacros/lacros_resource_sizes.pydeps @@ -0,0 +1,15 @@ +# Generated by running: +# build/print_python_deps.py --root build/lacros --output build/lacros/lacros_resource_sizes.pydeps build/lacros/lacros_resource_sizes.py +../../third_party/catapult/third_party/vinn/vinn/__init__.py +../../third_party/catapult/third_party/vinn/vinn/_vinn.py +../../third_party/catapult/tracing/tracing/__init__.py +../../third_party/catapult/tracing/tracing/value/__init__.py +../../third_party/catapult/tracing/tracing/value/convert_chart_json.py +../../third_party/catapult/tracing/tracing_project.py +../util/lib/__init__.py +../util/lib/common/perf_result_data_type.py +../util/lib/common/perf_tests_results_helper.py +../util/lib/results/__init__.py +../util/lib/results/result_sink.py +../util/lib/results/result_types.py +lacros_resource_sizes.py diff --git a/lacros/mojo_connection_lacros_launcher.py b/lacros/mojo_connection_lacros_launcher.py new file mode 100755 index 000000000000..a0f2cfcd2ad8 --- /dev/null +++ b/lacros/mojo_connection_lacros_launcher.py @@ -0,0 +1,210 @@ +#!/usr/bin/env vpython3 +# +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helps launch lacros-chrome with mojo connection established on Linux + or Chrome OS. Use on Chrome OS is for dev purposes. + + The main use case is to be able to launch lacros-chrome in a debugger. + + Please first launch an ash-chrome in the background as usual except without + the '--lacros-chrome-path' argument and with an additional + '--lacros-mojo-socket-for-testing' argument pointing to a socket path: + + XDG_RUNTIME_DIR=/tmp/ash_chrome_xdg_runtime ./out/ash/chrome \\ + --user-data-dir=/tmp/ash-chrome --enable-wayland-server \\ + --no-startup-window --enable-features=LacrosSupport \\ + --lacros-mojo-socket-for-testing=/tmp/lacros.sock + + Then, run this script with '-s' pointing to the same socket path used to + launch ash-chrome, followed by a command one would use to launch lacros-chrome + inside a debugger: + + EGL_PLATFORM=surfaceless XDG_RUNTIME_DIR=/tmp/ash_chrome_xdg_runtime \\ + ./build/lacros/mojo_connection_lacros_launcher.py -s /tmp/lacros.sock + gdb --args ./out/lacros-release/chrome --user-data-dir=/tmp/lacros-chrome +""" + +import argparse +import array +import contextlib +import getpass +import grp +import os +import pathlib +import pwd +import resource +import socket +import sys +import subprocess + + +_NUM_FDS_MAX = 3 + + +# contextlib.nullcontext is introduced in 3.7, while Python version on +# CrOS is still 3.6. This is for backward compatibility. +class NullContext: + def __init__(self, enter_ret=None): + self.enter_ret = enter_ret + + def __enter__(self): + return self.enter_ret + + def __exit__(self, exc_type, exc_value, trace): + pass + + +def _ReceiveFDs(sock): + """Receives FDs from ash-chrome that will be used to launch lacros-chrome. + + Args: + sock: A connected unix domain socket. + + Returns: + File objects for the mojo connection and maybe startup data file. + """ + # This function is borrowed from with modifications: + # https://docs.python.org/3/library/socket.html#socket.socket.recvmsg + fds = array.array("i") # Array of ints + # Along with the file descriptor, ash-chrome also sends the version in the + # regular data. + version, ancdata, _, _ = sock.recvmsg( + 1, socket.CMSG_LEN(fds.itemsize * _NUM_FDS_MAX)) + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS: + # There are three versions currently this script supports. + # The oldest one: ash-chrome returns one FD, the mojo connection of + # old bootstrap procedure (i.e., it will be BrowserService). + # The middle one: ash-chrome returns two FDs, the mojo connection of + # old bootstrap procedure, and the second for the start up data FD. + # The newest one: ash-chrome returns three FDs, the mojo connection of + # old bootstrap procedure, the second for the start up data FD, and + # the third for another mojo connection of new bootstrap procedure. + # TODO(crbug.com/1156033): Clean up the code to drop the support of + # oldest one after M91. + # TODO(crbug.com/1180712): Clean up the mojo procedure support of the + # the middle one after M92. + cmsg_len_candidates = [(i + 1) * fds.itemsize + for i in range(_NUM_FDS_MAX)] + assert len(cmsg_data) in cmsg_len_candidates, ( + 'CMSG_LEN is unexpected: %d' % (len(cmsg_data), )) + fds.frombytes(cmsg_data[:]) + + if version == b'\x01': + assert len(fds) == 2, 'Expecting exactly 2 FDs' + startup_fd = os.fdopen(fds[0]) + mojo_fd = os.fdopen(fds[1]) + elif version: + raise AssertionError('Unknown version: \\x%s' % version.hex()) + else: + raise AssertionError('Failed to receive startup message from ash-chrome. ' + 'Make sure you\'re logged in to Chrome OS.') + return startup_fd, mojo_fd + + +def _MaybeClosing(fileobj): + """Returns closing context manager, if given fileobj is not None. + + If the given fileobj is none, return nullcontext. + """ + return (contextlib.closing if fileobj else NullContext)(fileobj) + + +def _ApplyCgroups(): + """Applies cgroups used in ChromeOS to lacros chrome as well.""" + # Cgroup directories taken from ChromeOS session_manager job configuration. + UI_FREEZER_CGROUP_DIR = '/sys/fs/cgroup/freezer/ui' + UI_CPU_CGROUP_DIR = '/sys/fs/cgroup/cpu/ui' + pid = os.getpid() + with open(os.path.join(UI_CPU_CGROUP_DIR, 'tasks'), 'a') as f: + f.write(str(pid) + '\n') + with open(os.path.join(UI_FREEZER_CGROUP_DIR, 'cgroup.procs'), 'a') as f: + f.write(str(pid) + '\n') + + +def _PreExec(uid, gid, groups): + """Set environment up for running the chrome binary.""" + # Nice and realtime priority values taken ChromeOSs session_manager job + # configuration. + resource.setrlimit(resource.RLIMIT_NICE, (40, 40)) + resource.setrlimit(resource.RLIMIT_RTPRIO, (10, 10)) + os.setgroups(groups) + os.setgid(gid) + os.setuid(uid) + + +def Main(): + arg_parser = argparse.ArgumentParser() + arg_parser.usage = __doc__ + arg_parser.add_argument( + '-r', + '--root-env-setup', + action='store_true', + help='Set typical cgroups and environment for chrome. ' + 'If this is set, this script must be run as root.') + arg_parser.add_argument( + '-s', + '--socket-path', + type=pathlib.Path, + required=True, + help='Absolute path to the socket that were used to start ash-chrome, ' + 'for example: "/tmp/lacros.socket"') + flags, args = arg_parser.parse_known_args() + + assert 'XDG_RUNTIME_DIR' in os.environ + assert os.environ.get('EGL_PLATFORM') == 'surfaceless' + + if flags.root_env_setup: + # Check if we are actually root and error otherwise. + assert getpass.getuser() == 'root', \ + 'Root required environment flag specified, but user is not root.' + # Apply necessary cgroups to our own process, so they will be inherited by + # lacros chrome. + _ApplyCgroups() + else: + print('WARNING: Running chrome without appropriate environment. ' + 'This may affect performance test results. ' + 'Set -r and run as root to avoid this.') + + with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock: + sock.connect(flags.socket_path.as_posix()) + startup_connection, mojo_connection = (_ReceiveFDs(sock)) + + with _MaybeClosing(startup_connection), _MaybeClosing(mojo_connection): + cmd = args[:] + pass_fds = [] + if startup_connection: + cmd.append('--cros-startup-data-fd=%d' % startup_connection.fileno()) + pass_fds.append(startup_connection.fileno()) + if mojo_connection: + cmd.append('--crosapi-mojo-platform-channel-handle=%d' % + mojo_connection.fileno()) + pass_fds.append(mojo_connection.fileno()) + + env = os.environ.copy() + if flags.root_env_setup: + username = 'chronos' + p = pwd.getpwnam(username) + uid = p.pw_uid + gid = p.pw_gid + groups = [g.gr_gid for g in grp.getgrall() if username in g.gr_mem] + env['HOME'] = p.pw_dir + env['LOGNAME'] = username + env['USER'] = username + + def fn(): + return _PreExec(uid, gid, groups) + else: + + def fn(): + return None + + proc = subprocess.Popen(cmd, pass_fds=pass_fds, preexec_fn=fn) + + return proc.wait() + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/lacros/test_runner.py b/lacros/test_runner.py new file mode 100755 index 000000000000..ab319dec62e3 --- /dev/null +++ b/lacros/test_runner.py @@ -0,0 +1,856 @@ +#!/usr/bin/env python3 +# +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""This script facilitates running tests for lacros on Linux. + + In order to run lacros tests on Linux, please first follow bit.ly/3juQVNJ + to setup build directory with the lacros-chrome-on-linux build configuration, + and corresponding test targets are built successfully. + +Example usages + + ./build/lacros/test_runner.py test out/lacros/url_unittests + ./build/lacros/test_runner.py test out/lacros/browser_tests + + The commands above run url_unittests and browser_tests respecitively, and more + specifically, url_unitests is executed directly while browser_tests is + executed with the latest version of prebuilt ash-chrome, and the behavior is + controlled by |_TARGETS_REQUIRE_ASH_CHROME|, and it's worth noting that the + list is maintained manually, so if you see something is wrong, please upload a + CL to fix it. + + ./build/lacros/test_runner.py test out/lacros/browser_tests \\ + --gtest_filter=BrowserTest.Title + + The above command only runs 'BrowserTest.Title', and any argument accepted by + the underlying test binary can be specified in the command. + + ./build/lacros/test_runner.py test out/lacros/browser_tests \\ + --ash-chrome-version=793554 + + The above command runs tests with a given version of ash-chrome, which is + useful to reproduce test failures, the version corresponds to the commit + position of commits on the master branch, and a list of prebuilt versions can + be found at: gs://ash-chromium-on-linux-prebuilts/x86_64. + + ./testing/xvfb.py ./build/lacros/test_runner.py test out/lacros/browser_tests + + The above command starts ash-chrome with xvfb instead of an X11 window, and + it's useful when running tests without a display attached, such as sshing. + + For version skew testing when passing --ash-chrome-path-override, the runner + will try to find the ash major version and Lacros major version. If ash is + newer(major version larger), the runner will not run any tests and just + returns success. + +Interactively debugging tests + + Any of the previous examples accept the switches + --gdb + --lldb + to run the tests in the corresponding debugger. +""" + +import argparse +import json +import os +import logging +import re +import shutil +import signal +import subprocess +import sys +import tempfile +import time +import zipfile + +_SRC_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)) +sys.path.append(os.path.join(_SRC_ROOT, 'third_party', 'depot_tools')) + + +# The cipd path for prebuilt ash chrome. +_ASH_CIPD_PATH = 'chromium/testing/linux-ash-chromium/x86_64/ash.zip' + + +# Directory to cache downloaded ash-chrome versions to avoid re-downloading. +_PREBUILT_ASH_CHROME_DIR = os.path.join(os.path.dirname(__file__), + 'prebuilt_ash_chrome') + +# File path to the asan symbolizer executable. +_ASAN_SYMBOLIZER_PATH = os.path.join(_SRC_ROOT, 'tools', 'valgrind', 'asan', + 'asan_symbolize.py') + +# Number of seconds to wait for ash-chrome to start. +ASH_CHROME_TIMEOUT_SECONDS = ( + 300 if os.environ.get('ASH_WRAPPER', None) else 10) + +# List of targets that require ash-chrome as a Wayland server in order to run. +_TARGETS_REQUIRE_ASH_CHROME = [ + 'app_shell_unittests', + 'aura_unittests', + 'browser_tests', + 'components_unittests', + 'compositor_unittests', + 'content_unittests', + 'dbus_unittests', + 'extensions_unittests', + 'media_unittests', + 'message_center_unittests', + 'snapshot_unittests', + 'sync_integration_tests', + 'unit_tests', + 'views_unittests', + 'wm_unittests', + + # regex patterns. + '.*_browsertests', + '.*interactive_ui_tests' +] + +# List of targets that require ash-chrome to support crosapi mojo APIs. +_TARGETS_REQUIRE_MOJO_CROSAPI = [ + # TODO(jamescook): Add 'browser_tests' after multiple crosapi connections + # are allowed. For now we only enable crosapi in targets that run tests + # serially. + 'interactive_ui_tests', + 'lacros_chrome_browsertests', + 'lacros_chrome_browsertests_run_in_series' +] + +# Default test filter file for each target. These filter files will be +# used by default if no other filter file get specified. +_DEFAULT_FILTER_FILES_MAPPING = { + 'browser_tests': 'linux-lacros.browser_tests.filter', + 'components_unittests': 'linux-lacros.components_unittests.filter', + 'content_browsertests': 'linux-lacros.content_browsertests.filter', + 'interactive_ui_tests': 'linux-lacros.interactive_ui_tests.filter', + 'lacros_chrome_browsertests': + 'linux-lacros.lacros_chrome_browsertests.filter', + 'sync_integration_tests': 'linux-lacros.sync_integration_tests.filter', + 'unit_tests': 'linux-lacros.unit_tests.filter', +} + + +def _GetAshChromeDirPath(version): + """Returns a path to the dir storing the downloaded version of ash-chrome.""" + return os.path.join(_PREBUILT_ASH_CHROME_DIR, version) + + +def _remove_unused_ash_chrome_versions(version_to_skip): + """Removes unused ash-chrome versions to save disk space. + + Currently, when an ash-chrome zip is downloaded and unpacked, the atime/mtime + of the dir and the files are NOW instead of the time when they were built, but + there is no garanteen it will always be the behavior in the future, so avoid + removing the current version just in case. + + Args: + version_to_skip (str): the version to skip removing regardless of its age. + """ + days = 7 + expiration_duration = 60 * 60 * 24 * days + + for f in os.listdir(_PREBUILT_ASH_CHROME_DIR): + if f == version_to_skip: + continue + + p = os.path.join(_PREBUILT_ASH_CHROME_DIR, f) + if os.path.isfile(p): + # The prebuilt ash-chrome dir is NOT supposed to contain any files, remove + # them to keep the directory clean. + os.remove(p) + continue + chrome_path = os.path.join(p, 'test_ash_chrome') + if not os.path.exists(chrome_path): + chrome_path = p + age = time.time() - os.path.getatime(chrome_path) + if age > expiration_duration: + logging.info( + 'Removing ash-chrome: "%s" as it hasn\'t been used in the ' + 'past %d days', p, days) + shutil.rmtree(p) + + +def _GetLatestVersionOfAshChrome(): + '''Get the latest ash chrome version. + + Get the package version info with canary ref. + + Returns: + A string with the chrome version. + + Raises: + RuntimeError: if we can not get the version. + ''' + cp = subprocess.run( + ['cipd', 'describe', _ASH_CIPD_PATH, '-version', 'canary'], + capture_output=True) + assert (cp.returncode == 0) + groups = re.search(r'version:(?P[\d\.]+)', str(cp.stdout)) + if not groups: + raise RuntimeError('Can not find the version. Error message: %s' % + cp.stdout) + return groups.group('version') + + +def _DownloadAshChromeFromCipd(path, version): + '''Download the ash chrome with the requested version. + + Args: + path: string for the downloaded ash chrome folder. + version: string for the ash chrome version. + + Returns: + A string representing the path for the downloaded ash chrome. + ''' + with tempfile.TemporaryDirectory() as temp_dir: + ensure_file_path = os.path.join(temp_dir, 'ensure_file.txt') + f = open(ensure_file_path, 'w+') + f.write(_ASH_CIPD_PATH + ' version:' + version) + f.close() + subprocess.run( + ['cipd', 'ensure', '-ensure-file', ensure_file_path, '-root', path]) + + +def _DoubleCheckDownloadedAshChrome(path, version): + '''Check the downloaded ash is the expected version. + + Double check by running the chrome binary with --version. + + Args: + path: string for the downloaded ash chrome folder. + version: string for the expected ash chrome version. + + Raises: + RuntimeError if no test_ash_chrome binary can be found. + ''' + test_ash_chrome = os.path.join(path, 'test_ash_chrome') + if not os.path.exists(test_ash_chrome): + raise RuntimeError('Can not find test_ash_chrome binary under %s' % path) + cp = subprocess.run([test_ash_chrome, '--version'], capture_output=True) + assert (cp.returncode == 0) + if str(cp.stdout).find(version) == -1: + logging.warning( + 'The downloaded ash chrome version is %s, but the ' + 'expected ash chrome is %s. There is a version mismatch. Please ' + 'file a bug to OS>Lacros so someone can take a look.' % + (cp.stdout, version)) + + +def _DownloadAshChromeIfNecessary(version): + """Download a given version of ash-chrome if not already exists. + + Args: + version: A string representing the version, such as "793554". + + Raises: + RuntimeError: If failed to download the specified version, for example, + if the version is not present on gcs. + """ + + def IsAshChromeDirValid(ash_chrome_dir): + # This function assumes that once 'chrome' is present, other dependencies + # will be present as well, it's not always true, for example, if the test + # runner process gets killed in the middle of unzipping (~2 seconds), but + # it's unlikely for the assumption to break in practice. + return os.path.isdir(ash_chrome_dir) and os.path.isfile( + os.path.join(ash_chrome_dir, 'test_ash_chrome')) + + ash_chrome_dir = _GetAshChromeDirPath(version) + if IsAshChromeDirValid(ash_chrome_dir): + return + + shutil.rmtree(ash_chrome_dir, ignore_errors=True) + os.makedirs(ash_chrome_dir) + _DownloadAshChromeFromCipd(ash_chrome_dir, version) + _DoubleCheckDownloadedAshChrome(ash_chrome_dir, version) + _remove_unused_ash_chrome_versions(version) + + +def _WaitForAshChromeToStart(tmp_xdg_dir, lacros_mojo_socket_file, + enable_mojo_crosapi, ash_ready_file): + """Waits for Ash-Chrome to be up and running and returns a boolean indicator. + + Determine whether ash-chrome is up and running by checking whether two files + (lock file + socket) have been created in the |XDG_RUNTIME_DIR| and the lacros + mojo socket file has been created if enabling the mojo "crosapi" interface. + TODO(crbug.com/1107966): Figure out a more reliable hook to determine the + status of ash-chrome, likely through mojo connection. + + Args: + tmp_xdg_dir (str): Path to the XDG_RUNTIME_DIR. + lacros_mojo_socket_file (str): Path to the lacros mojo socket file. + enable_mojo_crosapi (bool): Whether to bootstrap the crosapi mojo interface + between ash and the lacros test binary. + ash_ready_file (str): Path to a non-existing file. After ash is ready for + testing, the file will be created. + + Returns: + A boolean indicating whether Ash-chrome is up and running. + """ + + def IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file, + enable_mojo_crosapi, ash_ready_file): + # There should be 2 wayland files. + if len(os.listdir(tmp_xdg_dir)) < 2: + return False + if enable_mojo_crosapi and not os.path.exists(lacros_mojo_socket_file): + return False + return os.path.exists(ash_ready_file) + + time_counter = 0 + while not IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file, + enable_mojo_crosapi, ash_ready_file): + time.sleep(0.5) + time_counter += 0.5 + if time_counter > ASH_CHROME_TIMEOUT_SECONDS: + break + + return IsAshChromeReady(tmp_xdg_dir, lacros_mojo_socket_file, + enable_mojo_crosapi, ash_ready_file) + + +def _ExtractAshMajorVersion(file_path): + """Extract major version from file_path. + + File path like this: + ../../lacros_version_skew_tests_v94.0.4588.0/test_ash_chrome + + Returns: + int representing the major version. Or 0 if it can't extract + major version. + """ + m = re.search( + 'lacros_version_skew_tests_v(?P[0-9]+).[0-9]+.[0-9]+.[0-9]+/', + file_path) + if (m and 'version' in m.groupdict().keys()): + return int(m.group('version')) + logging.warning('Can not find the ash version in %s.' % file_path) + # Returns ash major version as 0, so we can still run tests. + # This is likely happen because user is running in local environments. + return 0 + + +def _FindLacrosMajorVersionFromMetadata(): + # This handles the logic on bots. When running on bots, + # we don't copy source files to test machines. So we build a + # metadata.json file which contains version information. + if not os.path.exists('metadata.json'): + logging.error('Can not determine current version.') + # Returns 0 so it can't run any tests. + return 0 + version = '' + with open('metadata.json', 'r') as file: + content = json.load(file) + version = content['content']['version'] + return int(version[:version.find('.')]) + + +def _FindLacrosMajorVersion(): + """Returns the major version in the current checkout. + + It would try to read src/chrome/VERSION. If it's not available, + then try to read metadata.json. + + Returns: + int representing the major version. Or 0 if it fails to + determine the version. + """ + version_file = os.path.abspath( + os.path.join(os.path.abspath(os.path.dirname(__file__)), + '../../chrome/VERSION')) + # This is mostly happens for local development where + # src/chrome/VERSION exists. + if os.path.exists(version_file): + lines = open(version_file, 'r').readlines() + return int(lines[0][lines[0].find('=') + 1:-1]) + return _FindLacrosMajorVersionFromMetadata() + + +def _ParseSummaryOutput(forward_args): + """Find the summary output file path. + + Args: + forward_args (list): Args to be forwarded to the test command. + + Returns: + None if not found, or str representing the output file path. + """ + logging.warning(forward_args) + for arg in forward_args: + if arg.startswith('--test-launcher-summary-output='): + return arg[len('--test-launcher-summary-output='):] + return None + + +def _IsRunningOnBots(forward_args): + """Detects if the script is running on bots or not. + + Args: + forward_args (list): Args to be forwarded to the test command. + + Returns: + True if the script is running on bots. Otherwise returns False. + """ + return '--test-launcher-bot-mode' in forward_args + + +def _KillNicely(proc, timeout_secs=2, first_wait_secs=0): + """Kills a subprocess nicely. + + Args: + proc: The subprocess to kill. + timeout_secs: The timeout to wait in seconds. + first_wait_secs: The grace period before sending first SIGTERM in seconds. + """ + if not proc: + return + + if first_wait_secs: + try: + proc.wait(first_wait_secs) + return + except subprocess.TimeoutExpired: + pass + + if proc.poll() is None: + proc.terminate() + try: + proc.wait(timeout_secs) + except subprocess.TimeoutExpired: + proc.kill() + proc.wait() + + +def _ClearDir(dirpath): + """Deletes everything within the directory. + + Args: + dirpath: The path of the directory. + """ + for e in os.scandir(dirpath): + if e.is_dir(): + shutil.rmtree(e.path) + elif e.is_file(): + os.remove(e.path) + + +def _LaunchDebugger(args, forward_args, test_env): + """Launches the requested debugger. + + This is used to wrap the test invocation in a debugger. It returns the + created Popen class of the debugger process. + + Args: + args (dict): Args for this script. + forward_args (list): Args to be forwarded to the test command. + test_env (dict): Computed environment variables for the test. + """ + logging.info('Starting debugger.') + + # Force the tests into single-process-test mode for debugging unless manually + # specified. Otherwise the tests will run in a child process that the debugger + # won't be attached to and the debugger won't do anything. + if not ("--single-process" in forward_args + or "--single-process-tests" in forward_args): + forward_args += ["--single-process-tests"] + + # Adding --single-process-tests can cause some tests to fail when they're + # run in the same process. Forcing the user to specify a filter will prevent + # a later error. + if not [i for i in forward_args if i.startswith("--gtest_filter")]: + logging.error("""Interactive debugging requested without --gtest_filter + +This script adds --single-process-tests to support interactive debugging but +some tests will fail in this mode unless run independently. To debug a test +specify a --gtest_filter=Foo.Bar to name the test you want to debug. +""") + sys.exit(1) + + # This code attempts to source the debugger configuration file. Some + # users will have this in their init but sourcing it more than once is + # harmless and helps people that haven't configured it. + if args.gdb: + gdbinit_file = os.path.normpath( + os.path.join(os.path.realpath(__file__), "../../../tools/gdb/gdbinit")) + debugger_command = [ + 'gdb', '--init-eval-command', 'source ' + gdbinit_file, '--args' + ] + else: + lldbinit_dir = os.path.normpath( + os.path.join(os.path.realpath(__file__), "../../../tools/lldb")) + debugger_command = [ + 'lldb', '-O', + "script sys.path[:0] = ['%s']" % lldbinit_dir, '-O', + 'script import lldbinit', '--' + ] + debugger_command += [args.command] + forward_args + return subprocess.Popen(debugger_command, env=test_env) + + +def _RunTestWithAshChrome(args, forward_args): + """Runs tests with ash-chrome. + + Args: + args (dict): Args for this script. + forward_args (list): Args to be forwarded to the test command. + """ + if args.ash_chrome_path_override: + ash_chrome_file = args.ash_chrome_path_override + ash_major_version = _ExtractAshMajorVersion(ash_chrome_file) + lacros_major_version = _FindLacrosMajorVersion() + if ash_major_version > lacros_major_version: + logging.warning('''Not running any tests, because we do not \ +support version skew testing for Lacros M%s against ash M%s''' % + (lacros_major_version, ash_major_version)) + # Create an empty output.json file so result adapter can read + # the file. Or else result adapter will report no file found + # and result infra failure. + output_json = _ParseSummaryOutput(forward_args) + if output_json: + with open(output_json, 'w') as f: + f.write("""{"all_tests":[],"disabled_tests":[],"global_tags":[], +"per_iteration_data":[],"test_locations":{}}""") + # Although we don't run any tests, this is considered as success. + return 0 + if not os.path.exists(ash_chrome_file): + logging.error("""Can not find ash chrome at %s. Did you download \ +the ash from CIPD? If you don't plan to build your own ash, you need \ +to download first. Example commandlines: + $ cipd auth-login + $ echo "chromium/testing/linux-ash-chromium/x86_64/ash.zip \ +version:92.0.4515.130" > /tmp/ensure-file.txt + $ cipd ensure -ensure-file /tmp/ensure-file.txt \ +-root lacros_version_skew_tests_v92.0.4515.130 + Then you can use --ash-chrome-path-override=\ +lacros_version_skew_tests_v92.0.4515.130/test_ash_chrome +""" % ash_chrome_file) + return 1 + elif args.ash_chrome_path: + ash_chrome_file = args.ash_chrome_path + else: + ash_chrome_version = (args.ash_chrome_version + or _GetLatestVersionOfAshChrome()) + _DownloadAshChromeIfNecessary(ash_chrome_version) + logging.info('Ash-chrome version: %s', ash_chrome_version) + + ash_chrome_file = os.path.join(_GetAshChromeDirPath(ash_chrome_version), + 'test_ash_chrome') + try: + # Starts Ash-Chrome. + tmp_xdg_dir_name = tempfile.mkdtemp() + tmp_ash_data_dir_name = tempfile.mkdtemp() + + # Please refer to below file for how mojo connection is set up in testing. + # //chrome/browser/ash/crosapi/test_mojo_connection_manager.h + lacros_mojo_socket_file = '%s/lacros.sock' % tmp_ash_data_dir_name + lacros_mojo_socket_arg = ('--lacros-mojo-socket-for-testing=%s' % + lacros_mojo_socket_file) + ash_ready_file = '%s/ash_ready.txt' % tmp_ash_data_dir_name + enable_mojo_crosapi = any(t == os.path.basename(args.command) + for t in _TARGETS_REQUIRE_MOJO_CROSAPI) + ash_wayland_socket_name = 'wayland-exo' + + ash_process = None + ash_env = os.environ.copy() + ash_env['XDG_RUNTIME_DIR'] = tmp_xdg_dir_name + ash_cmd = [ + ash_chrome_file, + '--user-data-dir=%s' % tmp_ash_data_dir_name, + '--enable-wayland-server', + '--no-startup-window', + '--disable-input-event-activation-protection', + '--disable-lacros-keep-alive', + '--disable-login-lacros-opening', + '--enable-field-trial-config', + '--enable-features=LacrosSupport,LacrosPrimary,LacrosOnly', + '--ash-ready-file-path=%s' % ash_ready_file, + '--wayland-server-socket=%s' % ash_wayland_socket_name, + ] + if '--enable-pixel-output-in-tests' not in forward_args: + ash_cmd.append('--disable-gl-drawing-for-tests') + + if enable_mojo_crosapi: + ash_cmd.append(lacros_mojo_socket_arg) + + # Users can specify a wrapper for the ash binary to do things like + # attaching debuggers. For example, this will open a new terminal window + # and run GDB. + # $ export ASH_WRAPPER="gnome-terminal -- gdb --ex=r --args" + ash_wrapper = os.environ.get('ASH_WRAPPER', None) + if ash_wrapper: + logging.info('Running ash with "ASH_WRAPPER": %s', ash_wrapper) + ash_cmd = list(ash_wrapper.split()) + ash_cmd + + ash_process_has_started = False + total_tries = 3 + num_tries = 0 + ash_start_time = None + + # Create a log file if the user wanted to have one. + ash_log = None + ash_log_path = None + + run_tests_in_debugger = args.gdb or args.lldb + + if args.ash_logging_path: + ash_log_path = args.ash_logging_path + # Put ash logs in a separate file on bots. + # For asan builds, the ash log is not symbolized. In order to + # read the stack strace, we don't redirect logs to another file. + elif _IsRunningOnBots(forward_args) and not args.combine_ash_logs_on_bots: + summary_file = _ParseSummaryOutput(forward_args) + if summary_file: + ash_log_path = os.path.join(os.path.dirname(summary_file), + 'ash_chrome.log') + elif run_tests_in_debugger: + # The debugger is unusable when all Ash logs are getting dumped to the + # same terminal. Redirect to a log file if there isn't one specified. + logging.info("Running in the debugger and --ash-logging-path is not " + + "specified, defaulting to the current directory.") + ash_log_path = 'ash_chrome.log' + + if ash_log_path: + ash_log = open(ash_log_path, 'a') + logging.info('Writing ash-chrome logs to: %s', ash_log_path) + + ash_stdout = ash_log or None + test_stdout = None + + # Setup asan symbolizer. + ash_symbolize_process = None + test_symbolize_process = None + should_symbolize = False + if args.asan_symbolize_output and os.path.exists(_ASAN_SYMBOLIZER_PATH): + should_symbolize = True + ash_symbolize_stdout = ash_stdout + ash_stdout = subprocess.PIPE + test_stdout = subprocess.PIPE + + while not ash_process_has_started and num_tries < total_tries: + num_tries += 1 + ash_start_time = time.monotonic() + logging.info('Starting ash-chrome.') + ash_process = subprocess.Popen(ash_cmd, + env=ash_env, + stdout=ash_stdout, + stderr=subprocess.STDOUT) + + if should_symbolize: + logging.info('Symbolizing ash logs with asan symbolizer.') + ash_symbolize_process = subprocess.Popen([_ASAN_SYMBOLIZER_PATH], + stdin=ash_process.stdout, + stdout=ash_symbolize_stdout, + stderr=subprocess.STDOUT) + # Allow ash_process to receive a SIGPIPE if symbolize process exits. + ash_process.stdout.close() + + ash_process_has_started = _WaitForAshChromeToStart( + tmp_xdg_dir_name, lacros_mojo_socket_file, enable_mojo_crosapi, + ash_ready_file) + if ash_process_has_started: + break + + logging.warning('Starting ash-chrome timed out after %ds', + ASH_CHROME_TIMEOUT_SECONDS) + logging.warning('Are you using test_ash_chrome?') + logging.warning('Printing the output of "ps aux" for debugging:') + subprocess.call(['ps', 'aux']) + _KillNicely(ash_process) + _KillNicely(ash_symbolize_process, first_wait_secs=1) + + # Clean up for retry. + _ClearDir(tmp_xdg_dir_name) + _ClearDir(tmp_ash_data_dir_name) + + if not ash_process_has_started: + raise RuntimeError('Timed out waiting for ash-chrome to start') + + ash_elapsed_time = time.monotonic() - ash_start_time + logging.info('Started ash-chrome in %.3fs on try %d.', ash_elapsed_time, + num_tries) + + # Starts tests. + if enable_mojo_crosapi: + forward_args.append(lacros_mojo_socket_arg) + + forward_args.append('--ash-chrome-path=' + ash_chrome_file) + test_env = os.environ.copy() + test_env['WAYLAND_DISPLAY'] = ash_wayland_socket_name + test_env['EGL_PLATFORM'] = 'surfaceless' + test_env['XDG_RUNTIME_DIR'] = tmp_xdg_dir_name + + if run_tests_in_debugger: + test_process = _LaunchDebugger(args, forward_args, test_env) + else: + logging.info('Starting test process.') + test_process = subprocess.Popen([args.command] + forward_args, + env=test_env, + stdout=test_stdout, + stderr=subprocess.STDOUT) + if should_symbolize: + logging.info('Symbolizing test logs with asan symbolizer.') + test_symbolize_process = subprocess.Popen([_ASAN_SYMBOLIZER_PATH], + stdin=test_process.stdout) + # Allow test_process to receive a SIGPIPE if symbolize process exits. + test_process.stdout.close() + return test_process.wait() + + finally: + _KillNicely(ash_process) + # Give symbolizer processes time to finish writing with first_wait_secs. + _KillNicely(ash_symbolize_process, first_wait_secs=1) + _KillNicely(test_symbolize_process, first_wait_secs=1) + + shutil.rmtree(tmp_xdg_dir_name, ignore_errors=True) + shutil.rmtree(tmp_ash_data_dir_name, ignore_errors=True) + + +def _RunTestDirectly(args, forward_args): + """Runs tests by invoking the test command directly. + + args (dict): Args for this script. + forward_args (list): Args to be forwarded to the test command. + """ + try: + p = None + p = subprocess.Popen([args.command] + forward_args) + return p.wait() + finally: + _KillNicely(p) + + +def _HandleSignal(sig, _): + """Handles received signals to make sure spawned test process are killed. + + sig (int): An integer representing the received signal, for example SIGTERM. + """ + logging.warning('Received signal: %d, killing spawned processes', sig) + + # Don't do any cleanup here, instead, leave it to the finally blocks. + # Assumption is based on https://docs.python.org/3/library/sys.html#sys.exit: + # cleanup actions specified by finally clauses of try statements are honored. + + # https://tldp.org/LDP/abs/html/exitcodes.html: + # Exit code 128+n -> Fatal error signal "n". + sys.exit(128 + sig) + + +def _ExpandFilterFileIfNeeded(test_target, forward_args): + if (test_target in _DEFAULT_FILTER_FILES_MAPPING.keys() and not any( + [arg.startswith('--test-launcher-filter-file') for arg in forward_args])): + file_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', 'testing', + 'buildbot', 'filters', + _DEFAULT_FILTER_FILES_MAPPING[test_target])) + forward_args.append(f'--test-launcher-filter-file={file_path}') + + +def _RunTest(args, forward_args): + """Runs tests with given args. + + args (dict): Args for this script. + forward_args (list): Args to be forwarded to the test command. + + Raises: + RuntimeError: If the given test binary doesn't exist or the test runner + doesn't know how to run it. + """ + + if not os.path.isfile(args.command): + raise RuntimeError('Specified test command: "%s" doesn\'t exist' % + args.command) + + test_target = os.path.basename(args.command) + _ExpandFilterFileIfNeeded(test_target, forward_args) + + # |_TARGETS_REQUIRE_ASH_CHROME| may not always be accurate as it is updated + # with a best effort only, therefore, allow the invoker to override the + # behavior with a specified ash-chrome version, which makes sure that + # automated CI/CQ builders would always work correctly. + requires_ash_chrome = any( + re.match(t, test_target) for t in _TARGETS_REQUIRE_ASH_CHROME) + if not requires_ash_chrome and not args.ash_chrome_version: + return _RunTestDirectly(args, forward_args) + + return _RunTestWithAshChrome(args, forward_args) + + +def Main(): + for sig in (signal.SIGTERM, signal.SIGINT): + signal.signal(sig, _HandleSignal) + + logging.basicConfig(level=logging.INFO) + arg_parser = argparse.ArgumentParser() + arg_parser.usage = __doc__ + + subparsers = arg_parser.add_subparsers() + + test_parser = subparsers.add_parser('test', help='Run tests') + test_parser.set_defaults(func=_RunTest) + + test_parser.add_argument( + 'command', + help='A single command to invoke the tests, for example: ' + '"./url_unittests". Any argument unknown to this test runner script will ' + 'be forwarded to the command, for example: "--gtest_filter=Suite.Test"') + + version_group = test_parser.add_mutually_exclusive_group() + version_group.add_argument( + '--ash-chrome-version', + type=str, + help='Version of an prebuilt ash-chrome to use for testing, for example: ' + '"793554", and the version corresponds to the commit position of commits ' + 'on the main branch. If not specified, will use the latest version ' + 'available') + version_group.add_argument( + '--ash-chrome-path', + type=str, + help='Path to an locally built ash-chrome to use for testing. ' + 'In general you should build //chrome/test:test_ash_chrome.') + + debugger_group = test_parser.add_mutually_exclusive_group() + debugger_group.add_argument('--gdb', + action='store_true', + help='Run the test in GDB.') + debugger_group.add_argument('--lldb', + action='store_true', + help='Run the test in LLDB.') + + # This is for version skew testing. The current CI/CQ builder builds + # an ash chrome and pass it using --ash-chrome-path. In order to use the same + # builder for version skew testing, we use a new argument to override + # the ash chrome. + test_parser.add_argument( + '--ash-chrome-path-override', + type=str, + help='The same as --ash-chrome-path. But this will override ' + '--ash-chrome-path or --ash-chrome-version if any of these ' + 'arguments exist.') + test_parser.add_argument( + '--ash-logging-path', + type=str, + help='File & path to ash-chrome logging output while running Lacros ' + 'browser tests. If not provided, no output will be generated.') + test_parser.add_argument('--combine-ash-logs-on-bots', + action='store_true', + help='Whether to combine ash logs on bots.') + test_parser.add_argument( + '--asan-symbolize-output', + action='store_true', + help='Whether to run subprocess log outputs through the asan symbolizer.') + + args = arg_parser.parse_known_args() + if not hasattr(args[0], "func"): + # No command specified. + print(__doc__) + sys.exit(1) + + return args[0].func(args[0], args[1]) + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/lacros/test_runner_test.py b/lacros/test_runner_test.py new file mode 100755 index 000000000000..77f7325f2e3d --- /dev/null +++ b/lacros/test_runner_test.py @@ -0,0 +1,300 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import logging +import os +import subprocess +import sys +import tempfile +import time +import unittest + +import mock +from parameterized import parameterized + +import test_runner + + +class TestRunnerTest(unittest.TestCase): + def setUp(self): + logging.disable(logging.CRITICAL) + time.sleep = mock.Mock() + + def tearDown(self): + logging.disable(logging.NOTSET) + + @mock.patch.object(os.path, + 'dirname', + return_value='chromium/src/build/lacros') + def test_expand_filter_file(self, _): + args = ['--some_flag="flag"'] + test_runner._ExpandFilterFileIfNeeded('browser_tests', args) + self.assertTrue(args[1].endswith( + 'chromium/src/' + 'testing/buildbot/filters/linux-lacros.browser_tests.filter')) + self.assertTrue(args[1].startswith('--test-launcher-filter-file=')) + + args = ['--some_flag="flag"'] + test_runner._ExpandFilterFileIfNeeded('random_tests', args) + self.assertEqual(len(args), 1) + + args = ['--test-launcher-filter-file=new/filter'] + test_runner._ExpandFilterFileIfNeeded('browser_tests', args) + self.assertEqual(len(args), 1) + self.assertTrue(args[0].endswith('new/filter')) + + @parameterized.expand([ + 'url_unittests', + './url_unittests', + 'out/release/url_unittests', + './out/release/url_unittests', + ]) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that the test runner doesn't attempt to download ash-chrome if not + # required. + def test_do_not_require_ash_chrome(self, command, mock_popen, mock_download, + _): + args = ['script_name', 'test', command] + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + self.assertEqual(1, mock_popen.call_count) + mock_popen.assert_called_with([command]) + self.assertFalse(mock_download.called) + + @parameterized.expand([ + 'browser_tests', 'components_browsertests', 'content_browsertests', + 'lacros_chrome_browsertests', + 'browser_tests --enable-pixel-output-in-tests' + ]) + @mock.patch.object(os, + 'listdir', + return_value=['wayland-exo', 'wayland-exo.lock']) + @mock.patch.object(tempfile, + 'mkdtemp', + side_effect=['/tmp/xdg', '/tmp/ash-data']) + @mock.patch.object(os.environ, 'copy', side_effect=[{}, {}]) + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(os.path, 'abspath', return_value='/a/b/filter') + @mock.patch.object(test_runner, + '_GetLatestVersionOfAshChrome', + return_value='793554') + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that the test runner downloads and spawns ash-chrome if ash-chrome is + # required. + def test_require_ash_chrome(self, command, mock_popen, mock_download, *_): + command_parts = command.split() + args = ['script_name', 'test'] + args.extend(command_parts) + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + mock_download.assert_called_with('793554') + self.assertEqual(2, mock_popen.call_count) + + ash_chrome_args = mock_popen.call_args_list[0][0][0] + self.assertTrue(ash_chrome_args[0].endswith( + 'build/lacros/prebuilt_ash_chrome/793554/test_ash_chrome')) + expected_ash_chrome_args = [ + '--user-data-dir=/tmp/ash-data', + '--enable-wayland-server', + '--no-startup-window', + '--disable-input-event-activation-protection', + '--disable-lacros-keep-alive', + '--disable-login-lacros-opening', + '--enable-field-trial-config', + '--enable-features=LacrosSupport,LacrosPrimary,LacrosOnly', + '--ash-ready-file-path=/tmp/ash-data/ash_ready.txt', + '--wayland-server-socket=wayland-exo', + ] + if '--enable-pixel-output-in-tests' not in command_parts: + expected_ash_chrome_args.append('--disable-gl-drawing-for-tests') + if command == 'lacros_chrome_browsertests': + expected_ash_chrome_args.append( + '--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock') + self.assertListEqual(expected_ash_chrome_args, ash_chrome_args[1:]) + ash_chrome_env = mock_popen.call_args_list[0][1].get('env', {}) + self.assertDictEqual({'XDG_RUNTIME_DIR': '/tmp/xdg'}, ash_chrome_env) + + test_args = mock_popen.call_args_list[1][0][0] + if command == 'lacros_chrome_browsertests': + self.assertListEqual([ + command, + '--test-launcher-filter-file=/a/b/filter', + '--lacros-mojo-socket-for-testing=/tmp/ash-data/lacros.sock', + '--ash-chrome-path=' + ash_chrome_args[0], + ], test_args) + else: + self.assertListEqual(test_args[:len(command_parts)], command_parts) + + test_env = mock_popen.call_args_list[1][1].get('env', {}) + self.assertDictEqual( + { + 'WAYLAND_DISPLAY': 'wayland-exo', + 'XDG_RUNTIME_DIR': '/tmp/xdg', + 'EGL_PLATFORM': 'surfaceless' + }, test_env) + + @mock.patch.object(os, + 'listdir', + return_value=['wayland-exo', 'wayland-exo.lock']) + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, + '_GetLatestVersionOfAshChrome', + return_value='793554') + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that when a ash-chrome version is specified, that version is used + # instead of the latest one. + def test_specify_ash_chrome_version(self, mock_popen, mock_download, *_): + args = [ + 'script_name', 'test', 'browser_tests', '--ash-chrome-version', '781122' + ] + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + mock_download.assert_called_with('781122') + + @mock.patch.object(os, + 'listdir', + return_value=['wayland-exo', 'wayland-exo.lock']) + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that if a ash-chrome version is specified, uses ash-chrome to run + # tests anyway even if |_TARGETS_REQUIRE_ASH_CHROME| indicates an ash-chrome + # is not required. + def test_overrides_do_not_require_ash_chrome(self, mock_popen, mock_download, + *_): + args = [ + 'script_name', 'test', './url_unittests', '--ash-chrome-version', + '793554' + ] + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + mock_download.assert_called_with('793554') + self.assertEqual(2, mock_popen.call_count) + + @mock.patch.object(os, + 'listdir', + return_value=['wayland-exo', 'wayland-exo.lock']) + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, '_GetLatestVersionOfAshChrome') + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that when an ash-chrome path is specified, the test runner doesn't try + # to download prebuilt ash-chrome. + def test_specify_ash_chrome_path(self, mock_popen, mock_download, + mock_get_latest_version, *_): + args = [ + 'script_name', + 'test', + 'browser_tests', + '--ash-chrome-path', + '/ash/test_ash_chrome', + ] + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + self.assertFalse(mock_get_latest_version.called) + self.assertFalse(mock_download.called) + + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that arguments not known to the test runner are forwarded to the + # command that invokes tests. + def test_command_arguments(self, mock_popen, mock_download, _): + args = [ + 'script_name', 'test', './url_unittests', '--gtest_filter=Suite.Test' + ] + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + mock_popen.assert_called_with( + ['./url_unittests', '--gtest_filter=Suite.Test']) + self.assertFalse(mock_download.called) + + @mock.patch.dict(os.environ, {'ASH_WRAPPER': 'gdb --args'}, clear=False) + @mock.patch.object(os, + 'listdir', + return_value=['wayland-exo', 'wayland-exo.lock']) + @mock.patch.object(tempfile, + 'mkdtemp', + side_effect=['/tmp/xdg', '/tmp/ash-data']) + @mock.patch.object(os.environ, 'copy', side_effect=[{}, {}]) + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, + '_GetLatestVersionOfAshChrome', + return_value='793554') + @mock.patch.object(test_runner, '_DownloadAshChromeIfNecessary') + @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock()) + # Tests that, when the ASH_WRAPPER environment variable is set, it forwards + # the commands to the invocation of ash. + def test_ash_wrapper(self, mock_popen, *_): + args = [ + 'script_name', 'test', './browser_tests', '--gtest_filter=Suite.Test' + ] + with mock.patch.object(sys, 'argv', args): + test_runner.Main() + ash_args = mock_popen.call_args_list[0][0][0] + self.assertTrue(ash_args[2].endswith('test_ash_chrome')) + self.assertEqual(['gdb', '--args'], ash_args[:2]) + + + # Test when ash is newer, test runner skips running tests and returns 0. + @mock.patch.object(os.path, 'exists', return_value=True) + @mock.patch.object(os.path, 'isfile', return_value=True) + @mock.patch.object(test_runner, '_FindLacrosMajorVersion', return_value=91) + def test_version_skew_ash_newer(self, *_): + args = [ + 'script_name', 'test', './browser_tests', '--gtest_filter=Suite.Test', + '--ash-chrome-path-override=\ +lacros_version_skew_tests_v92.0.100.0/test_ash_chrome' + ] + with mock.patch.object(sys, 'argv', args): + self.assertEqual(test_runner.Main(), 0) + + @mock.patch.object(os.path, 'exists', return_value=True) + def test_lacros_version_from_chrome_version(self, *_): + version_data = '''\ +MAJOR=95 +MINOR=0 +BUILD=4615 +PATCH=0\ +''' + open_lib = '__builtin__.open' + if sys.version_info[0] >= 3: + open_lib = 'builtins.open' + with mock.patch(open_lib, + mock.mock_open(read_data=version_data)) as mock_file: + version = test_runner._FindLacrosMajorVersion() + self.assertEqual(95, version) + + @mock.patch.object(os.path, 'exists', return_value=True) + def test_lacros_version_from_metadata(self, *_): + metadata_json = ''' +{ + "content": { + "version": "92.1.4389.2" + }, + "metadata_version": 1 +} + ''' + open_lib = '__builtin__.open' + if sys.version_info[0] >= 3: + open_lib = 'builtins.open' + with mock.patch(open_lib, + mock.mock_open(read_data=metadata_json)) as mock_file: + version = test_runner._FindLacrosMajorVersionFromMetadata() + self.assertEqual(92, version) + mock_file.assert_called_with('metadata.json', 'r') + + +if __name__ == '__main__': + unittest.main() diff --git a/landmine_utils.py b/landmine_utils.py new file mode 100644 index 000000000000..b126f4fff0fa --- /dev/null +++ b/landmine_utils.py @@ -0,0 +1,33 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import sys + + +def IsWindows(): + return sys.platform in ['win32', 'cygwin'] + + +def IsLinux(): + return sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd')) + + +def IsMac(): + return sys.platform == 'darwin' + + +def host_os(): + """ + Returns a string representing the host_os of the current system. + Possible values: 'win', 'mac', 'linux', 'unknown'. + """ + if IsWindows(): + return 'win' + elif IsLinux(): + return 'linux' + elif IsMac(): + return 'mac' + else: + return 'unknown' diff --git a/landmines.py b/landmines.py new file mode 100755 index 000000000000..844ee3808c60 --- /dev/null +++ b/landmines.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +This script runs every gclient runhooks as the first hook (See DEPS). If it +detects that the build should be clobbered, it will delete the contents of the +build directory. + +A landmine is tripped when a builder checks out a different revision, and the +diff between the new landmines and the old ones is non-null. At this point, the +build is clobbered. + +Before adding or changing a landmine consider the consequences of doing so. +Doing so will wipe out every output directory on every Chrome developer's +machine. This can be particularly problematic on Windows where the directory +deletion may well fail (locked files, command prompt in the directory, etc.), +and generated .sln and .vcxproj files will be deleted. + +This output directory deletion will be repeated when going back and forth across +the change that added the landmine, adding to the cost. There are usually less +troublesome alternatives. +""" + +import difflib +import errno +import logging +import optparse +import os +import sys +import subprocess +import time + +import clobber +import landmine_utils + + +def get_build_dir(src_dir): + r""" + Returns the absolute path to the directory containing the build directories. + Examples: + 'C:\src\out' + '/b/s/w/ir/cache/builder/src/out' + """ + if 'CHROMIUM_OUT_DIR' in os.environ: + output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip() + if not output_dir: + raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!') + else: + output_dir = 'out' + return os.path.abspath(os.path.join(src_dir, output_dir)) + + +def clobber_if_necessary(new_landmines, src_dir, landmines_path): + """Does the work of setting, planting, and triggering landmines.""" + out_dir = get_build_dir(src_dir) + try: + os.makedirs(out_dir) + except OSError as e: + if e.errno == errno.EEXIST: + pass + + if os.path.exists(landmines_path): + with open(landmines_path, 'r') as f: + old_landmines = f.readlines() + if old_landmines != new_landmines: + old_date = time.ctime(os.stat(landmines_path).st_ctime) + diff = difflib.unified_diff(old_landmines, new_landmines, + fromfile='old_landmines', tofile='new_landmines', + fromfiledate=old_date, tofiledate=time.ctime(), n=0) + sys.stdout.write('Clobbering due to:\n') + sys.stdout.writelines(diff) + sys.stdout.flush() + + clobber.clobber(out_dir) + + # Save current set of landmines for next time. + with open(landmines_path, 'w') as f: + f.writelines(new_landmines) + + +def process_options(): + """Returns an options object containing the configuration for this script.""" + parser = optparse.OptionParser() + parser.add_option( + '-s', '--landmine-scripts', action='append', + help='Path to the script which emits landmines to stdout. The target ' + 'is passed to this script via option -t. Note that an extra ' + 'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.') + parser.add_option('-d', '--src-dir', + help='Path of the source root dir. Overrides the default location of the ' + 'source root dir when calculating the build directory.') + parser.add_option( + '-l', + '--landmines-path', + help='Path to the landmines file to use (defaults to .landmines)') + parser.add_option('-v', '--verbose', action='store_true', + default=('LANDMINES_VERBOSE' in os.environ), + help=('Emit some extra debugging information (default off). This option ' + 'is also enabled by the presence of a LANDMINES_VERBOSE environment ' + 'variable.')) + + options, args = parser.parse_args() + + if args: + parser.error('Unknown arguments %s' % args) + + logging.basicConfig( + level=logging.DEBUG if options.verbose else logging.ERROR) + + if options.src_dir: + if not os.path.isdir(options.src_dir): + parser.error('Cannot find source root dir at %s' % options.src_dir) + logging.debug('Overriding source root dir. Using: %s', options.src_dir) + else: + options.src_dir = \ + os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + + if not options.landmine_scripts: + options.landmine_scripts = [os.path.join(options.src_dir, 'build', + 'get_landmines.py')] + + extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT') + if extra_script: + options.landmine_scripts += [extra_script] + + return options + + +def main(): + options = process_options() + + landmines = [] + for s in options.landmine_scripts: + proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE, + universal_newlines=True) + output, _ = proc.communicate() + landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()]) + if options.landmines_path: + landmines_path = options.landmines_path + else: + landmines_path = os.path.join(options.src_dir, '.landmines') + clobber_if_necessary(landmines, options.src_dir, + os.path.normpath(landmines_path)) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/linux/BUILD.gn b/linux/BUILD.gn new file mode 100644 index 000000000000..b298abb709b2 --- /dev/null +++ b/linux/BUILD.gn @@ -0,0 +1,32 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/features.gni") +import("//build/config/freetype/freetype.gni") +import("//build/config/linux/pkg_config.gni") + +if (use_gio) { + pkg_config("gio_config") { + packages = [ "gio-2.0" ] + + defines = [ "USE_GIO" ] + } +} + +# Looking for libspeechd? Use //third_party/speech-dispatcher + +if (use_system_freetype) { + assert(!is_castos) + + # Only provided for distributions which prefer to keep linking to FreeType on + # the system, use with caution,for details see build/config/freetype/BUILD.gn. + pkg_config("freetype_from_pkgconfig") { + visibility = [ + "//build/config/freetype:freetype", + "//third_party:freetype_harfbuzz", + "//third_party/harfbuzz-ng:harfbuzz_source", + ] + packages = [ "freetype2" ] + } +} diff --git a/linux/OWNERS b/linux/OWNERS new file mode 100644 index 000000000000..8e1cb557290d --- /dev/null +++ b/linux/OWNERS @@ -0,0 +1,3 @@ +mmoss@chromium.org +thestig@chromium.org +thomasanderson@chromium.org diff --git a/linux/chrome.map b/linux/chrome.map new file mode 100644 index 000000000000..3038318821dd --- /dev/null +++ b/linux/chrome.map @@ -0,0 +1,97 @@ +{ +global: + __bss_start; + __data_start; + data_start; + _edata; + _end; + _IO_stdin_used; + + # Initialization and finalization functions for static global + # variables. + _fini; + _init; + __libc_csu_fini; + __libc_csu_init; + + # Chrome's main function. Exported for historical purposes. + ChromeMain; + + # Program entry point. + _start; + + # Memory allocation symbols. We want chrome and any libraries to + # share the same heap, so it is correct to export these symbols. + aligned_alloc; + calloc; + cfree; + free; + __free_hook; + __libc_calloc; + __libc_cfree; + __libc_free; + __libc_malloc; + __libc_memalign; + __libc_pvalloc; + __libc_realloc; + __libc_valloc; + mallinfo; + malloc; + __malloc_hook; + malloc_size; + malloc_stats; + malloc_usable_size; + mallopt; + memalign; + __memalign_hook; + __posix_memalign; + posix_memalign; + pvalloc; + realloc; + __realloc_hook; + valloc; + + # Various flavors of operator new and operator delete. + _ZdaPv; + _ZdaPvm; + _ZdaPvmSt11align_val_t; + _ZdaPvRKSt9nothrow_t; + _ZdaPvSt11align_val_t; + _ZdaPvSt11align_val_tRKSt9nothrow_t; + _ZdlPv; + _ZdlPvm; + _ZdlPvmSt11align_val_t; + _ZdlPvRKSt9nothrow_t; + _ZdlPvSt11align_val_t; + _ZdlPvSt11align_val_tRKSt9nothrow_t; + _Znam; + _ZnamRKSt9nothrow_t; + _ZnamSt11align_val_t; + _ZnamSt11align_val_tRKSt9nothrow_t; + _Znwm; + _ZnwmRKSt9nothrow_t; + _ZnwmSt11align_val_t; + _ZnwmSt11align_val_tRKSt9nothrow_t; + + # Various flavors of localtime(). These are exported by the chrome + # sandbox to intercept calls to localtime(), which would otherwise + # fail in untrusted processes that don't have permission to read + # /etc/localtime. These overrides forward the request to the browser + # process, which uses dlsym(localtime) to make the real calls. + localtime; + localtime64; + localtime64_r; + localtime_r; + + # getaddrinfo() is exported by the sandbox to ensure the network service and + # other sandboxed processes don't try to run system DNS resolution + # in-process, which is not supported by the sandbox. This override + # uses dlsym(getaddrinfo) to make the real calls in unsandboxed + # processes. + getaddrinfo; + + v8dbg_*; + +local: + *; +}; diff --git a/linux/dump_app_syms.py b/linux/dump_app_syms.py new file mode 100644 index 000000000000..ca2d700e272a --- /dev/null +++ b/linux/dump_app_syms.py @@ -0,0 +1,30 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Helper script to run dump_syms on Chrome Linux executables and strip +# them if needed. + + +import os +import subprocess +import sys + +if len(sys.argv) != 5: + print("dump_app_syms.py ") + print(" ") + sys.exit(1) + +dumpsyms = sys.argv[1] +strip_binary = sys.argv[2] +infile = sys.argv[3] +outfile = sys.argv[4] + +# Dump only when the output file is out-of-date. +if not os.path.isfile(outfile) or \ + os.stat(outfile).st_mtime < os.stat(infile).st_mtime: + with open(outfile, 'w') as outfileobj: + subprocess.check_call([dumpsyms, '-m', '-d', infile], stdout=outfileobj) + +if strip_binary != '0': + subprocess.check_call(['strip', infile]) diff --git a/linux/extract_symbols.gni b/linux/extract_symbols.gni new file mode 100644 index 000000000000..8fef1312d825 --- /dev/null +++ b/linux/extract_symbols.gni @@ -0,0 +1,41 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/toolchain.gni") + +# Extracts symbols from a binary into a symbol file using dump_app_syms.py. +# +# Args: +# binary: Path to the binary containing symbols to extract, e.g.: +# "$root_out_dir/chrome" +# symbol_file: Desired output file for symbols, e.g.: +# "$root_out_dir/chrome.breakpad.$current_cpu" +template("extract_symbols") { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + action("${target_name}") { + dump_syms_label = "//third_party/breakpad:dump_syms($host_toolchain)" + dump_syms_binary = + get_label_info(dump_syms_label, "root_out_dir") + "/" + "dump_syms" + + pool = "//build/toolchain:link_pool($default_toolchain)" + script = "//build/linux/dump_app_syms.py" + inputs = [ + invoker.binary, + dump_syms_binary, + ] + outputs = [ invoker.symbol_file ] + args = [ + "./" + rebase_path(dump_syms_binary, root_build_dir), + "0", # strip_binary = false + rebase_path(invoker.binary, root_build_dir), + rebase_path(invoker.symbol_file, root_build_dir), + ] + + deps += [ dump_syms_label ] + } +} diff --git a/linux/install-chromeos-fonts.py b/linux/install-chromeos-fonts.py new file mode 100755 index 000000000000..8ac242389f3c --- /dev/null +++ b/linux/install-chromeos-fonts.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Script to install the Chrome OS fonts on Linux. +# This script can be run manually (as root), but is also run as part +# install-build-deps.sh. + + +import os +import shutil +import subprocess +import sys + +URL_TEMPLATE = ('https://commondatastorage.googleapis.com/chromeos-localmirror/' + 'distfiles/%(name)s-%(version)s.tar.bz2') + +# Taken from the media-fonts/ ebuilds in chromiumos-overlay. +# noto-cjk used to be here, but is removed because fc-cache takes too long +# regenerating the fontconfig cache (See crbug.com/697954.) +# TODO(jshin): Add it back when the above issue can be avoided. +SOURCES = [ + { + 'name': 'notofonts', + 'version': '20161129' + }, { + 'name': 'robotofonts', + 'version': '2.132' + } +] + +URLS = sorted([URL_TEMPLATE % d for d in SOURCES]) +FONTS_DIR = '/usr/local/share/fonts' + +def main(args): + if not sys.platform.startswith('linux'): + print("Error: %s must be run on Linux." % __file__) + return 1 + + if os.getuid() != 0: + print("Error: %s must be run as root." % __file__) + return 1 + + if not os.path.isdir(FONTS_DIR): + print("Error: Destination directory does not exist: %s" % FONTS_DIR) + return 1 + + dest_dir = os.path.join(FONTS_DIR, 'chromeos') + + stamp = os.path.join(dest_dir, ".stamp02") + if os.path.exists(stamp): + with open(stamp) as s: + if s.read() == '\n'.join(URLS): + print("Chrome OS fonts already up to date in %s." % dest_dir) + return 0 + + if os.path.isdir(dest_dir): + shutil.rmtree(dest_dir) + os.mkdir(dest_dir) + os.chmod(dest_dir, 0o755) + + print("Installing Chrome OS fonts to %s." % dest_dir) + for url in URLS: + tarball = os.path.join(dest_dir, os.path.basename(url)) + subprocess.check_call(['curl', '-L', url, '-o', tarball]) + subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions', + '-xf', tarball, '-C', dest_dir]) + os.remove(tarball) + + readme = os.path.join(dest_dir, "README") + with open(readme, 'w') as s: + s.write("This directory and its contents are auto-generated.\n") + s.write("It may be deleted and recreated. Do not modify.\n") + s.write("Script: %s\n" % __file__) + + with open(stamp, 'w') as s: + s.write('\n'.join(URLS)) + + for base, dirs, files in os.walk(dest_dir): + for dir in dirs: + os.chmod(os.path.join(base, dir), 0o755) + for file in files: + os.chmod(os.path.join(base, file), 0o644) + + print("""\ + +Chrome OS font rendering settings are specified using Fontconfig. If your +system's configuration doesn't match Chrome OS's (which vary for different +devices), fonts may be rendered with different subpixel rendering, subpixel +positioning, or hinting settings. This may affect font metrics. + +Chrome OS's settings are stored in the media-libs/fontconfig package, which is +at src/third_party/chromiumos-overlay/media-libs/fontconfig in a Chrome OS +checkout. You can configure your system to match Chrome OS's defaults by +creating or editing a ~/.fonts.conf file: + + + + + + true + true + true + hintslight + rgb + + + +To load additional per-font configs (and assuming you have Chrome OS checked +out), add the following immediately before the "" line: + + /path/to/src/third_party/chromiumos-overlay/media-libs/fontconfig/files/local.conf +""") + + return 0 + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/linux/libncursesw/DIR_METADATA b/linux/libncursesw/DIR_METADATA new file mode 100644 index 000000000000..6bbf490f43ce --- /dev/null +++ b/linux/libncursesw/DIR_METADATA @@ -0,0 +1,5 @@ +monorail { + component: "Internals>Accessibility" +} + +team_email: "chromium-accessibility@chromium.org" diff --git a/linux/libncursesw/OWNERS b/linux/libncursesw/OWNERS new file mode 100644 index 000000000000..976b9550c69f --- /dev/null +++ b/linux/libncursesw/OWNERS @@ -0,0 +1 @@ +file://ui/accessibility/OWNERS diff --git a/linux/libpci/BUILD.gn b/linux/libpci/BUILD.gn new file mode 100644 index 000000000000..a6abfdef0511 --- /dev/null +++ b/linux/libpci/BUILD.gn @@ -0,0 +1,22 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//tools/generate_library_loader/generate_library_loader.gni") + +# This generates a target named "libpci". +generate_library_loader("libpci") { + name = "LibPciLoader" + output_h = "libpci.h" + output_cc = "libpci_loader.cc" + header = "" + + functions = [ + "pci_alloc", + "pci_init", + "pci_cleanup", + "pci_scan_bus", + "pci_fill_info", + "pci_lookup_name", + ] +} diff --git a/linux/libudev/BUILD.gn b/linux/libudev/BUILD.gn new file mode 100644 index 000000000000..312b092d8485 --- /dev/null +++ b/linux/libudev/BUILD.gn @@ -0,0 +1,67 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//tools/generate_library_loader/generate_library_loader.gni") + +libudev_functions = [ + "udev_device_get_action", + "udev_device_get_devnode", + "udev_device_get_devtype", + "udev_device_get_parent", + "udev_device_get_parent_with_subsystem_devtype", + "udev_device_get_properties_list_entry", + "udev_device_get_property_value", + "udev_device_get_subsystem", + "udev_device_get_sysattr_value", + "udev_device_get_sysname", + "udev_device_get_syspath", + "udev_device_new_from_devnum", + "udev_device_new_from_subsystem_sysname", + "udev_device_new_from_syspath", + "udev_device_unref", + "udev_enumerate_add_match_subsystem", + "udev_enumerate_get_list_entry", + "udev_enumerate_new", + "udev_enumerate_scan_devices", + "udev_enumerate_unref", + "udev_list_entry_get_next", + "udev_list_entry_get_name", + "udev_monitor_enable_receiving", + "udev_monitor_filter_add_match_subsystem_devtype", + "udev_monitor_get_fd", + "udev_monitor_new_from_netlink", + "udev_monitor_receive_device", + "udev_monitor_unref", + "udev_new", + "udev_set_log_fn", + "udev_set_log_priority", + "udev_unref", +] + +# This generates a target named "udev0_loader". +generate_library_loader("udev0_loader") { + name = "LibUdev0Loader" + output_h = "libudev0.h" + output_cc = "libudev0_loader.cc" + header = "\"third_party/libudev/libudev0.h\"" + + functions = libudev_functions +} + +# This generates a target named "udev1_loader". +generate_library_loader("udev1_loader") { + name = "LibUdev1Loader" + output_h = "libudev1.h" + output_cc = "libudev1_loader.cc" + header = "\"third_party/libudev/libudev1.h\"" + + functions = libudev_functions +} + +group("libudev") { + public_deps = [ + ":udev0_loader", + ":udev1_loader", + ] +} diff --git a/linux/rewrite_dirs.py b/linux/rewrite_dirs.py new file mode 100755 index 000000000000..d94ef53f9dca --- /dev/null +++ b/linux/rewrite_dirs.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +# Copyright 2011 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Rewrites paths in -I, -L and other option to be relative to a sysroot.""" + + +import sys +import os +import optparse + +REWRITE_PREFIX = ['-I', + '-idirafter', + '-imacros', + '-imultilib', + '-include', + '-iprefix', + '-iquote', + '-isystem', + '-L'] + +def RewritePath(path, opts): + """Rewrites a path by stripping the prefix and prepending the sysroot.""" + sysroot = opts.sysroot + prefix = opts.strip_prefix + if os.path.isabs(path) and not path.startswith(sysroot): + if path.startswith(prefix): + path = path[len(prefix):] + path = path.lstrip('/') + return os.path.join(sysroot, path) + else: + return path + + +def RewriteLine(line, opts): + """Rewrites all the paths in recognized options.""" + args = line.split() + count = len(args) + i = 0 + while i < count: + for prefix in REWRITE_PREFIX: + # The option can be either in the form "-I /path/to/dir" or + # "-I/path/to/dir" so handle both. + if args[i] == prefix: + i += 1 + try: + args[i] = RewritePath(args[i], opts) + except IndexError: + sys.stderr.write('Missing argument following %s\n' % prefix) + break + elif args[i].startswith(prefix): + args[i] = prefix + RewritePath(args[i][len(prefix):], opts) + i += 1 + + return ' '.join(args) + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend') + parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip') + opts, args = parser.parse_args(argv[1:]) + + for line in sys.stdin.readlines(): + line = RewriteLine(line.strip(), opts) + print(line) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/linux/strip_binary.gni b/linux/strip_binary.gni new file mode 100644 index 000000000000..3675d39a5481 --- /dev/null +++ b/linux/strip_binary.gni @@ -0,0 +1,56 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/toolchain.gni") + +# Extracts symbols from a binary into a symbol file. +# +# Args: +# binary_input: Path to the binary containing symbols to extract, e.g.: +# "$root_out_dir/chrome" +# symbol_output: Desired output file for symbols, e.g.: +# "$root_out_dir/chrome.debug" +# stripped_binary_output: Desired output file for stripped file, e.g.: +# "$root_out_dir/chrome.stripped" +template("strip_binary") { + forward_variables_from(invoker, + [ + "deps", + "testonly", + ]) + action("${target_name}") { + eu_strip_binary = "//buildtools/third_party/eu-strip/bin/eu-strip" + script = "//build/linux/strip_binary.py" + + if (defined(invoker.stripped_binary_output)) { + stripped_binary_output = invoker.stripped_binary_output + } else { + stripped_binary_output = invoker.binary_input + ".stripped" + } + if (defined(invoker.symbol_output)) { + symbol_output = invoker.symbol_output + } else { + symbol_output = invoker.binary_input + ".debug" + } + + inputs = [ + invoker.binary_input, + eu_strip_binary, + ] + outputs = [ + symbol_output, + stripped_binary_output, + ] + args = [ + "--eu-strip-binary-path", + rebase_path(eu_strip_binary, root_build_dir), + "--symbol-output", + rebase_path(symbol_output, root_build_dir), + "--stripped-binary-output", + rebase_path(stripped_binary_output, root_build_dir), + "--binary-input", + rebase_path(invoker.binary_input, root_build_dir), + ] + } +} diff --git a/linux/strip_binary.py b/linux/strip_binary.py new file mode 100755 index 000000000000..82801c7486a3 --- /dev/null +++ b/linux/strip_binary.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import subprocess +import sys + + +def main(): + argparser = argparse.ArgumentParser(description='eu-strip binary.') + + argparser.add_argument('--eu-strip-binary-path', help='eu-strip path.') + argparser.add_argument('--binary-input', help='exe file path.') + argparser.add_argument('--symbol-output', help='debug file path.') + argparser.add_argument('--stripped-binary-output', help='stripped file path.') + args = argparser.parse_args() + + cmd_line = [ + args.eu_strip_binary_path, '-o', args.stripped_binary_output, '-f', + args.symbol_output, args.binary_input + ] + + process = subprocess.Popen(cmd_line) + process.wait() + return process.returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/linux/sysroot_scripts/build_and_upload.py b/linux/sysroot_scripts/build_and_upload.py new file mode 100755 index 000000000000..d7d95e95b2c7 --- /dev/null +++ b/linux/sysroot_scripts/build_and_upload.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Automates running BuildPackageLists, BuildSysroot, and +UploadSysroot for each supported arch of each sysroot creator. +""" + + +import glob +import hashlib +import json +import multiprocessing +import os +import re +import string +import subprocess +import sys + + +def run_script(args): + fnull = open(os.devnull, 'w') + subprocess.check_call(args, stdout=fnull, stderr=fnull) + + +def sha1sumfile(filename): + sha1 = hashlib.sha1() + with open(filename, 'rb') as f: + while True: + data = f.read(65536) + if not data: + break + sha1.update(data) + return sha1.hexdigest() + + +def get_proc_output(args): + return subprocess.check_output(args, encoding='utf-8').strip() + + +def build_and_upload(script_path, distro, release, key, arch, lock): + script_dir = os.path.dirname(os.path.realpath(__file__)) + + run_script([script_path, 'BuildSysroot' + arch]) + run_script([script_path, 'UploadSysroot' + arch]) + + tarball = '%s_%s_%s_sysroot.tar.xz' % (distro, release, arch.lower()) + tarxz_path = os.path.join(script_dir, "..", "..", "..", "out", + "sysroot-build", release, tarball) + sha1sum = sha1sumfile(tarxz_path) + sysroot_dir = '%s_%s_%s-sysroot' % (distro, release, arch.lower()) + + sysroot_metadata = { + 'Tarball': tarball, + 'Sha1Sum': sha1sum, + 'SysrootDir': sysroot_dir, + 'Key': key, + } + with lock: + fname = os.path.join(script_dir, 'sysroots.json') + sysroots = json.load(open(fname)) + with open(fname, 'w') as f: + sysroots["%s_%s" % (release, arch.lower())] = sysroot_metadata + f.write( + json.dumps( + sysroots, sort_keys=True, indent=4, separators=(',', ': '))) + f.write('\n') + + +def main(): + script_dir = os.path.dirname(os.path.realpath(__file__)) + procs = [] + lock = multiprocessing.Lock() + for filename in glob.glob(os.path.join(script_dir, 'sysroot-creator-*.sh')): + script_path = os.path.join(script_dir, filename) + distro = get_proc_output([script_path, 'PrintDistro']) + release = get_proc_output([script_path, 'PrintRelease']) + key = get_proc_output([script_path, 'PrintKey']) + architectures = get_proc_output([script_path, 'PrintArchitectures']) + for arch in architectures.split('\n'): + proc = multiprocessing.Process(target=build_and_upload, + args=(script_path, distro, release, key, + arch, lock)) + procs.append(("%s %s (%s)" % (distro, release, arch), proc)) + proc.start() + for _, proc in procs: + proc.join() + + print("SYSROOT CREATION SUMMARY") + failures = 0 + for name, proc in procs: + if proc.exitcode: + failures += 1 + status = "FAILURE" if proc.exitcode else "SUCCESS" + print("%s sysroot creation\t%s" % (name, status)) + return failures + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/linux/sysroot_scripts/generate_keyring.sh b/linux/sysroot_scripts/generate_keyring.sh new file mode 100755 index 000000000000..7b17730008da --- /dev/null +++ b/linux/sysroot_scripts/generate_keyring.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +set -o nounset +set -o errexit + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +KEYS=( + # Debian Archive Automatic Signing Key (11/bullseye) + "73A4F27B8DD47936" + # Debian Security Archive Automatic Signing Key (11/bullseye) + "A48449044AAD5C5D" + # Debian Stable Release Key (11/bullseye) + "605C66F00D6C9793" + # Debian Stable Release Key (10/buster) + "DCC9EFBF77E11517" + # Debian Archive Automatic Signing Key (10/buster) + "DC30D7C23CBBABEE" + # Debian Security Archive Automatic Signing Key (10/buster) + "4DFAB270CAA96DFA" + # Jessie Stable Release Key + "CBF8D6FD518E17E1" + # Debian Archive Automatic Signing Key (7.0/wheezy) + "8B48AD6246925553" + # Debian Archive Automatic Signing Key (8/jessie) + "7638D0442B90D010" + # Debian Security Archive Automatic Signing Key (8/jessie) + "9D6D8F6BC857C906" + # Debian Archive Automatic Signing Key (9/stretch) + "E0B11894F66AEC98" + # Debian Security Archive Automatic Signing Key (9/stretch) + "EDA0D2388AE22BA9" + # Debian Stable Release Key (9/stretch) + "EF0F382A1A7B6500" +) + +gpg --keyserver keyserver.ubuntu.com --recv-keys ${KEYS[@]} +gpg --output "${SCRIPT_DIR}/keyring.gpg" --export ${KEYS[@]} diff --git a/linux/sysroot_scripts/generated_package_lists/bullseye.amd64 b/linux/sysroot_scripts/generated_package_lists/bullseye.amd64 new file mode 100644 index 000000000000..f66b7aff6f67 --- /dev/null +++ b/linux/sysroot_scripts/generated_package_lists/bullseye.amd64 @@ -0,0 +1,411 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libitm1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_amd64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_amd64.deb diff --git a/linux/sysroot_scripts/generated_package_lists/bullseye.arm b/linux/sysroot_scripts/generated_package_lists/bullseye.arm new file mode 100644 index 000000000000..09b91d6e299d --- /dev/null +++ b/linux/sysroot_scripts/generated_package_lists/bullseye.arm @@ -0,0 +1,411 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_armhf.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_armhf.deb diff --git a/linux/sysroot_scripts/generated_package_lists/bullseye.arm64 b/linux/sysroot_scripts/generated_package_lists/bullseye.arm64 new file mode 100644 index 000000000000..127cf3ff1885 --- /dev/null +++ b/linux/sysroot_scripts/generated_package_lists/bullseye.arm64 @@ -0,0 +1,414 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libitm1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/liblsan0_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libtsan0_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-etnaviv1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_arm64.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_arm64.deb diff --git a/linux/sysroot_scripts/generated_package_lists/bullseye.armel b/linux/sysroot_scripts/generated_package_lists/bullseye.armel new file mode 100644 index 000000000000..50e7e1371925 --- /dev/null +++ b/linux/sysroot_scripts/generated_package_lists/bullseye.armel @@ -0,0 +1,409 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-exynos1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-freedreno1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-omap1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-tegra0_2.4.104-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_armel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_armel.deb diff --git a/linux/sysroot_scripts/generated_package_lists/bullseye.i386 b/linux/sysroot_scripts/generated_package_lists/bullseye.i386 new file mode 100644 index 000000000000..220dd7db9729 --- /dev/null +++ b/linux/sysroot_scripts/generated_package_lists/bullseye.i386 @@ -0,0 +1,409 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libasan6_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libitm1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libquadmath0_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libubsan1_10.2.1-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-intel1_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_i386.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_i386.deb diff --git a/linux/sysroot_scripts/generated_package_lists/bullseye.mips64el b/linux/sysroot_scripts/generated_package_lists/bullseye.mips64el new file mode 100644 index 000000000000..de6da06f9c31 --- /dev/null +++ b/linux/sysroot_scripts/generated_package_lists/bullseye.mips64el @@ -0,0 +1,404 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_5.19.11-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/valgrind/valgrind_3.16.1-1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_mips64el.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_mips64el.deb diff --git a/linux/sysroot_scripts/generated_package_lists/bullseye.mipsel b/linux/sysroot_scripts/generated_package_lists/bullseye.mipsel new file mode 100644 index 000000000000..21322b23c7ad --- /dev/null +++ b/linux/sysroot_scripts/generated_package_lists/bullseye.mipsel @@ -0,0 +1,403 @@ +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2_1.2.4-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/alsa-lib/libasound2-dev_1.2.4-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-0_2.38.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/atk1.0/libatk1.0-dev_2.38.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-0_2.38.0-4~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-atk/libatk-bridge2.0-dev_2.38.0-4~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-0_2.44.1-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/at-spi2-core/libatspi2.0-dev_2.44.1-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/attr/libattr1_2.4.48-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/audit/libaudit1_3.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-client3_0.8-5+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/a/avahi/libavahi-common3_0.8-5+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth3_5.55-3.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/bluez/libbluetooth-dev_5.55-3.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli1_1.0.9-2+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/b/brotli/libbrotli-dev_1.0.9-2+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo2-dev_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-gobject2_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cairo/libcairo-script-interpreter2_1.16.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/colord/libcolord2_1.4.5-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcups2-dev_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cups/libcupsimage2-dev_2.3.3op2-3+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl3-gnutls_7.88.1-7~bpo11+2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/curl/libcurl4-gnutls-dev_7.88.1-7~bpo11+2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/c/cyrus-sasl2/libsasl2-2_2.1.27+dfsg-2.1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/db5.3/libdb5.3_5.3.28+dfsg1-0.8_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus-glib/libdbus-glib-1-2_0.110-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-3_1.12.24-0+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/dbus/libdbus-1-dev_1.12.24-0+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/d/double-conversion/libdouble-conversion3_3.1.5-6.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/comerr-dev_2.1-1.46.6-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/e2fsprogs/libcom-err2_1.46.6-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf1_0.187-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/elfutils/libelf-dev_0.187-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1_2.2.10-2+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/e/expat/libexpat1-dev_2.2.10-2+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac8_1.3.3-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/flac/libflac-dev_1.3.3-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig1_2.13.1-4.2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fontconfig/libfontconfig-dev_2.13.1-4.2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype6_2.10.4+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/freetype/libfreetype-dev_2.10.4+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi0_1.0.8-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/f/fribidi/libfribidi-dev_1.0.8-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libatomic1_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-10-dev_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgcc-s1_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libgomp1_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++-10-dev_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gcc-10/libstdc++6_10.2.1-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-0_2.42.2+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gdk-pixbuf/libgdk-pixbuf-2.0-dev_2.42.2+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-0_2.66.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glib2.0/libglib2.0-dev_2.66.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6_2.31-13+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/glibc/libc6-dev_2.31-13+deb11u5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gmp/libgmp10_6.2.1+dfsg-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls28-dev_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls30_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-dane0_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutls-openssl27_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gnutls28/libgnutlsxx28_3.7.1-5+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-0_1.10.4+dfsg1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphene/libgraphene-1.0-dev_1.10.4+dfsg1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-3_1.3.14-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/graphite2/libgraphite2-dev_1.3.14-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+2.0/libgtk2.0-0_2.24.33-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-0_3.24.24-4+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk+3.0/libgtk-3-dev_3.24.24-4+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-1_4.8.3+ds-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/g/gtk4/libgtk-4-dev_4.8.3+ds-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz0b_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-dev_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-gobject0_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/h/harfbuzz/libharfbuzz-icu0_2.7.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu-le-hb/libicu-le-hb0_1.0.3+git180724-3+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/i/icu/libicu67_67.1-7_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig0_2.1-3.1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/jbigkit/libjbig-dev_2.1-3.1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/j/json-glib/libjson-glib-1.0-0_1.6.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/keyutils/libkeyutils1_1.6.1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/krb5-multidev_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssapi-krb5-2_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libgssrpc4_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libk5crypto3_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5clnt-mit12_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkadm5srv-mit12_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkdb5-10_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-3_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5-dev_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/k/krb5/libkrb5support0_1.18.3-6+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/liba/libasyncns/libasyncns0_0.8-6+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libb2/libb2-1_0.98.1-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libb/libbsd/libbsd0_0.11.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap2_2.44-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap2/libcap-dev_2.44-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcap-ng/libcap-ng0_0.7.9-2.2+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libc/libcloudproviders/libcloudproviders0_0.3.0-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie1_0.2.13-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdatrie/libdatrie-dev_0.2.13-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib4_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-glib-dev_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk3-4_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdbusmenu/libdbusmenu-gtk4_18.10.20180917~bzr492+repack1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate0_1.10-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdeflate/libdeflate-dev_1.10-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm2_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-amdgpu1_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-dev_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-nouveau2_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libd/libdrm/libdrm-radeon1_2.4.104-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy0_1.5.8-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libepoxy/libepoxy-dev_1.5.8-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev2_1.11.0+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevdev/libevdev-dev_1.11.0+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libe/libevent/libevent-2.1-7_2.1.12-stable-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi7_3.3-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libf/libffi/libffi-dev_3.3-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgcrypt20/libgcrypt20-dev_1.8.7-6_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl1_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libegl-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl1_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgl-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles1_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles2_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libgles-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd0_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglvnd-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx0_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libglx-dev_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libglvnd/libopengl0_1.3.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring0_3.12.0-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgnome-keyring/libgnome-keyring-dev_3.12.0-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error0_1.38-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgpg-error/libgpg-error-dev_1.38-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libg/libgudev/libgudev-1.0-0_234-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libice/libice6_1.0.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidl/libidl-2-0_0.8.14-4+b12_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn2/libidn2-0_2.3.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libidn/libidn11_1.33-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput10_1.16.4-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libi/libinput/libinput-dev_1.16.4-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo_2.0.6-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjpeg-turbo/libjpeg62-turbo-dev_2.0.6-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp24_1.9.4-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libj/libjsoncpp/libjsoncpp-dev_1.9.4-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libm/libmd/libmd0_1.0.3-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnsl/libnsl2_1.3.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libn/libnss-db/libnss-db_2.2.3pre1-6+b10_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg0_1.3.4-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libo/libogg/libogg-dev_1.3.4-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpciaccess/libpciaccess0_0.16-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng16-16_1.6.37-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpng1.6/libpng-dev_1.6.37-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libproxy/libproxy1v5_0.4.17-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpsl/libpsl5_0.21.0-1.2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libp/libpthread-stubs/libpthread-stubs0-dev_0.4-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libr/librest/librest-0.7-0_0.8.1-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1_3.1-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libselinux/libselinux1-dev_3.1-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1_3.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsepol/libsepol1-dev_3.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsm/libsm6_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsndfile/libsndfile1_1.0.31-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup2.4-1_2.72.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libsoup2.4/libsoup-gnome2.4-1_2.72.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libs/libssh2/libssh2-1_1.9.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai0_0.1.28-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libthai/libthai-dev_0.1.28-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtirpc/libtirpc3_1.3.1-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libt/libtool/libltdl7_2.4.6-15_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libunistring/libunistring2_0.9.10-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter0_1.2.1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libu/libutempter/libutempter-dev_1.2.1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-dev_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-drm2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-glx2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-wayland2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libva/libva-x11-2_2.17.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbis0a_1.3.7-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libv/libvorbis/libvorbisenc2_1.3.7-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwacom/libwacom2_1.8-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp6_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp7_1.2.4-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpdemux2_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebp-dev_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libw/libwebp/libwebpmux3_0.6.1-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-6_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-dev_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb1_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libx11/libx11-xcb-dev_1.7.2-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau6_1.0.9-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxau/libxau-dev_1.0.9-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb1-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri2-0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-dri3-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-glx0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-present-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-randr0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-render0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shape0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-shm0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync1_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-sync-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xfixes0-dev_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinerama0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xinput0_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcb/libxcb-xkb1_1.14-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite1_0.4.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcomposite/libxcomposite-dev_0.4.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt1_4.4.18-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcrypt/libcrypt-dev_4.4.18-4_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor1_1.2.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxcursor/libxcursor-dev_1.2.0-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage1_1.1.5-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdamage/libxdamage-dev_1.1.5-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp6_1.1.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxdmcp/libxdmcp-dev_1.1.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext6_1.3.3-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxext/libxext-dev_1.3.3-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes3_5.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxfixes/libxfixes-dev_5.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi6_1.7.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxi/libxi-dev_1.7.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama1_1.1.4-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxinerama/libxinerama-dev_1.1.4-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon0_1.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-dev_1.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxkbcommon/libxkbcommon-x11-0_1.0.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2_2.9.10+dfsg-6.7+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxml2/libxml2-dev_2.9.10+dfsg-6.7+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr2_1.5.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrandr/libxrandr-dev_1.5.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender1_0.9.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxrender/libxrender-dev_0.9.10-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence1_1.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxshmfence/libxshmfence-dev_1.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1.1_1.1.34-4+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxslt/libxslt1-dev_1.1.34-4+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss1_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxss/libxss-dev_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt6_1.2.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxt/libxt-dev_1.2.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst6_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxtst/libxtst-dev_1.2.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm1_1.1.4-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libx/libxxf86vm/libxxf86vm-dev_1.1.4-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/libz/libzstd/libzstd1_1.4.8+dfsg-2.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lcms2/liblcms2-2_2.12~rc1-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lerc/liblerc4_4.0.0+ds-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/linux/linux-libc-dev_6.1.12-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lz4/liblz4-1_1.9.3-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/l/lzo2/liblzo2-2_2.10-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/md4c/libmd4c0_0.4.7-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libegl1-mesa-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm1_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgbm-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libgl1-mesa-glx_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libglapi-mesa_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/libwayland-egl1-mesa_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mesa/mesa-common-dev_20.3.5-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip1_1.1-8+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/minizip/libminizip-dev_1.1-8+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/m/mtdev/libmtdev1_1.1.6-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses6_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncurses-dev_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libncursesw6_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/ncurses/libtinfo6_6.2+20201114-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libhogweed6_3.7.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nettle/libnettle8_3.7.3-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nghttp2/libnghttp2-14_1.43.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4_4.29-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nspr/libnspr4-dev_4.29-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3_3.61-1+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/n/nss/libnss3-dev_3.61-1+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openldap/libldap-2.4-2_2.4.59+dfsg-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl1.1_1.1.1n-0+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/openssl/libssl-dev_1.1.1n-0+deb11u3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus0_1.3.1-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/o/opus/libopus-dev_1.3.1-0.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/p11-kit/libp11-kit0_0.23.22-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g_1.4.0-9+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pam/libpam0g-dev_1.4.0-9+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpango1.0-dev_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangocairo-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoft2-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pango1.0/libpangoxft-1.0-0_1.46.2-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pangox-compat/libpangox-1.0-0_0.0.2-5.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci3_3.7.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pciutils/libpci-dev_3.7.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-16-0_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-32-0_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-8-0_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-dev_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre2/libpcre2-posix2_10.36-2+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre16-3_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre32-3_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcre3-dev_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pcre3/libpcrecpp0v5_8.39-13_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-0_0.3.65-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libpipewire-0.3-dev_0.3.65-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pipewire/libspa-0.2-dev_0.3.65-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-0_0.40.0-1.1~deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pixman/libpixman-1-dev_0.40.0-1.1~deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse0_14.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-dev_14.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/p/pulseaudio/libpulse-mainloop-glib0_14.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6concurrent6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6core6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6dbus6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6gui6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6network6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6opengl6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6openglwidgets6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6printsupport6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6sql6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6test6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6widgets6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/libqt6xml6_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qt6-base/qt6-base-dev-tools_6.4.2+dfsg-7~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5concurrent5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5core5a_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5dbus5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5gui5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5network5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5printsupport5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5sql5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5test5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5widgets5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/libqt5xml5_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/q/qtbase-opensource-src/qtbase5-dev-tools_5.15.2+dfsg-9_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-9_20210201+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/re2/libre2-dev_20210201+dfsg-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/r/rtmpdump/librtmp1_2.4+20151223.gitfa8646d.1-2+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/shared-mime-info/shared-mime-info_2.0-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy1v5_1.1.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/snappy/libsnappy-dev_1.1.8-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd2_0.11.4-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/speech-dispatcher/libspeechd-dev_0.11.4-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/sqlite3/libsqlite3-0_3.34.1-3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd0_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libsystemd-dev_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev1_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/s/systemd/libudev-dev_252.5-2~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tcp-wrappers/libwrap0_7.6.q-31_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff5_4.2.0-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff6_4.5.0-5_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiff-dev_4.2.0-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tiff/libtiffxx5_4.2.0-1+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/t/tslib/libts0_1.22-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/unbound/libunbound8_1.13.1-1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid1_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libblkid-dev_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount1_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libmount-dev_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/libuuid1_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/u/util-linux/uuid-dev_2.36.1-8+deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan1_1.3.224.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/v/vulkan-loader/libvulkan-dev_1.3.224.0-1~bpo11+1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-bin_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-client0_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-cursor0_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-dev_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl1_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-egl-backend-dev_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland/libwayland-server0_1.18.0-2~exp1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/w/wayland-protocols/wayland-protocols_1.20-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0_0.4.0-1+b3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-image/libxcb-image0-dev_0.4.0-1+b3_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-keysyms/libxcb-keysyms1_0.4.0-1+b2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util1_0.4.0-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util/libxcb-util-dev_0.4.0-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0_0.3.9-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-renderutil/libxcb-render-util0-dev_0.3.9-1+b1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xcb-util-wm/libxcb-icccm4_0.4.1-1.1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft2_2.3.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xft/libxft-dev_2.3.2-2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xorgproto/x11proto-dev_2020.1-1_all.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/x/xz-utils/liblzma5_5.2.5-2.1~deb11u1_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g_1.2.11.dfsg-2+deb11u2_mipsel.deb +https://snapshot.debian.org/archive/debian/20230329T085712Z/pool/main/z/zlib/zlib1g-dev_1.2.11.dfsg-2+deb11u2_mipsel.deb diff --git a/linux/sysroot_scripts/install-sysroot.py b/linux/sysroot_scripts/install-sysroot.py new file mode 100755 index 000000000000..42842a184deb --- /dev/null +++ b/linux/sysroot_scripts/install-sysroot.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Install Debian sysroots for building chromium. +""" + +# The sysroot is needed to ensure that binaries that get built will run on +# the oldest stable version of Debian that we currently support. +# This script can be run manually but is more often run as part of gclient +# hooks. When run from hooks this script is a no-op on non-linux platforms. + +# The sysroot image could be constructed from scratch based on the current state +# of the Debian archive but for consistency we use a pre-built root image (we +# don't want upstream changes to Debian to effect the chromium build until we +# choose to pull them in). The images will normally need to be rebuilt every +# time chrome's build dependencies are changed but should also be updated +# periodically to include upstream security fixes from Debian. + +# This script looks at sysroots.json next to it to find the name of a .tar.xz +# to download and the location to extract it to. The extracted sysroot could for +# example be in build/linux/debian_bullseye_amd64-sysroot/. + + +import hashlib +import json +import platform +import optparse +import os +import re +import shutil +import subprocess +import sys +try: + # For Python 3.0 and later + from urllib.request import urlopen +except ImportError: + # Fall back to Python 2's urllib2 + from urllib2 import urlopen + +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) + +URL_PREFIX = 'https://commondatastorage.googleapis.com' +URL_PATH = 'chrome-linux-sysroot/toolchain' + +VALID_ARCHS = ('arm', 'arm64', 'i386', 'amd64', 'mips', 'mips64el') + +ARCH_TRANSLATIONS = { + 'x64': 'amd64', + 'x86': 'i386', + 'mipsel': 'mips', + 'mips64': 'mips64el', +} + +DEFAULT_TARGET_PLATFORM = 'bullseye' + + +class Error(Exception): + pass + + +def GetSha1(filename): + sha1 = hashlib.sha1() + with open(filename, 'rb') as f: + while True: + # Read in 1mb chunks, so it doesn't all have to be loaded into memory. + chunk = f.read(1024*1024) + if not chunk: + break + sha1.update(chunk) + return sha1.hexdigest() + + +def main(args): + parser = optparse.OptionParser('usage: %prog [OPTIONS]', description=__doc__) + parser.add_option('--arch', + help='Sysroot architecture: %s' % ', '.join(VALID_ARCHS)) + parser.add_option('--all', action='store_true', + help='Install all sysroot images (useful when updating the' + ' images)') + parser.add_option('--print-key', + help='Print the hash of the sysroot for the given arch.') + options, _ = parser.parse_args(args) + + if options.print_key: + arch = options.print_key + print( + GetSysrootDict(DEFAULT_TARGET_PLATFORM, + ARCH_TRANSLATIONS.get(arch, arch))['Key']) + return 0 + if options.arch: + InstallSysroot(DEFAULT_TARGET_PLATFORM, + ARCH_TRANSLATIONS.get(options.arch, options.arch)) + elif options.all: + for arch in VALID_ARCHS: + InstallSysroot(DEFAULT_TARGET_PLATFORM, arch) + else: + print('You much specify one of the options.') + return 1 + + return 0 + + +def GetSysrootDict(target_platform, target_arch): + if target_arch not in VALID_ARCHS: + raise Error('Unknown architecture: %s' % target_arch) + + sysroots_file = os.path.join(SCRIPT_DIR, 'sysroots.json') + sysroots = json.load(open(sysroots_file)) + sysroot_key = '%s_%s' % (target_platform, target_arch) + if sysroot_key not in sysroots: + raise Error('No sysroot for: %s %s' % (target_platform, target_arch)) + return sysroots[sysroot_key] + + +def InstallSysroot(target_platform, target_arch): + sysroot_dict = GetSysrootDict(target_platform, target_arch) + tarball_filename = sysroot_dict['Tarball'] + tarball_sha1sum = sysroot_dict['Sha1Sum'] + # TODO(thestig) Consider putting this elsewhere to avoid having to recreate + # it on every build. + linux_dir = os.path.dirname(SCRIPT_DIR) + sysroot = os.path.join(linux_dir, sysroot_dict['SysrootDir']) + + url = '%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, tarball_sha1sum, + tarball_filename) + + stamp = os.path.join(sysroot, '.stamp') + if os.path.exists(stamp): + with open(stamp) as s: + if s.read() == url: + return + + print('Installing Debian %s %s root image: %s' % \ + (target_platform, target_arch, sysroot)) + if os.path.isdir(sysroot): + shutil.rmtree(sysroot) + os.mkdir(sysroot) + tarball = os.path.join(sysroot, tarball_filename) + print('Downloading %s' % url) + sys.stdout.flush() + sys.stderr.flush() + for _ in range(3): + try: + response = urlopen(url) + with open(tarball, "wb") as f: + f.write(response.read()) + break + except Exception: # Ignore exceptions. + pass + else: + raise Error('Failed to download %s' % url) + sha1sum = GetSha1(tarball) + if sha1sum != tarball_sha1sum: + raise Error('Tarball sha1sum is wrong.' + 'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum)) + subprocess.check_call(['tar', 'mxf', tarball, '-C', sysroot]) + os.remove(tarball) + + with open(stamp, 'w') as s: + s.write(url) + + +if __name__ == '__main__': + try: + sys.exit(main(sys.argv[1:])) + except Error as e: + sys.stderr.write(str(e) + '\n') + sys.exit(1) diff --git a/linux/sysroot_scripts/keyring.gpg b/linux/sysroot_scripts/keyring.gpg new file mode 100644 index 0000000000000000000000000000000000000000..81e2624c5f3845a1642d9379bf47ed2221a291a4 GIT binary patch literal 94381 zcmeFXQ;=Zq)~A`aZQHhOJG0WZZB*K}ZQHi(N?VmySDkPE6X$eCOn(u5GZQiU^1axx zW5pBeUF)}>bRbv|6}w76Km>p}B-%0rhxXR?yt+i${93_1vi)k042~?hpQ9S`?F4b> zHv-MJ&Cw+GV4G?cu}(u#y+ZfdWZb~2os6__Fz7Q4D*5l77>v?#&clxC8|)Qon@3;l z>@M2td$Kr@)|uCpyKRnibArpRQv6Z+EaVXsBrL`hJSPx-Oli`{UHY0BaoLSYcfhHq zmLDd=G1x#YxjR&>GKF(qTfEP*l$p`iGjJx zXG)Kt992+LyvNox5a&#u#R;fY1=%G;)?zw2cU2u-QmRlpU z84bq(c(7s;9jj0q0;%kQ6Y`Yl5i9<*}|Ln8l*37&j+P$QK%{gA%~|S z`2Fh3ZEf`>R(vy1ztlhz3z0v+G$B0ZM7-(y^5Kdt!@Gn-|@D8_|>?f1N^ zPWnsoK$OPzOa_n`01*%XFdT>-1RoFrfDjZ&#i>*p9mXHf|IGDHN+CkxY=Zx+i=9Ym z0_W8j2pj+a3KAjPCe~_9eH_f#1OZ^+>unor!$MiW5G7P>yI6y{7H=G2x1oJ&hmBI_hOr?F)z^8At=e1F;P;6+ zltl(QGEH}pG-$@6I2#12%Gc*oJ5R>%TrPt0W@WsX3*=`prl)8_r5UH>9KL)m;bvkp z;DgRcQSHG_y=n9?z%f6_WisSPW>^4{yTCi~gzrPMN+&b>eiFJgW{s!M9LE_e<;L~| zfe${dEB&Hjg-%v-B95yu19hUk}DmOOC*7>S8Z9Cq4}RZ)@f z&u}z7-y+j;Nn33941RGFwch+a)&r`R2*ueT*=yGpQf>4{wn{jdu}go!LZ*D*S1*w; zL5IXfh`|d4OJxXgJ+gQe5!P&jesM}FB_lXJGBTePa@|^NWk#tXu0}Dy%Qn(^LR_@P zn3qh{Yhv&zi;IV{WYjw9w~8jkwxZ|x3HN55w9hh` z+!V@eD&S!f9JJtSlNDyp)!Y%%N5i1>auqF(>%&nAkwx09PzRFC;aiucLBG^Dxp1`j z#mZ>)uadCs2oeab*`9U(lN^)3889e=s3SHVeWTWW2L8|FXd(aSP`tq)<>WpL_uf*?@}$6 zl@d_!`gq`%&eAI(ba7MpYR;4-j4gy=C@VTTD2$smMOJLB1{;QLM?6a?vzKX|ZsJAj zjR;ObRvJP^MY)JWNV^9K>X_aX*uiP4F0UQo9~^|Ax^r;|d()EhJrKKbBSJIXW0jma zqzx4cYFYm9g_X!Y@$9J4Jk3KrbhO3mQ}^q=v$`Ne_ic(j?9^b}3-^$}NHCxgK1tGH zI2TdW3Mlz}Gj+pPUUR?e-K0D6iEMmN<z*zpB|rM5a)2QIUERnq#^D&vw&T-o zL?`W8xpj|+MVLN|OM~bT63ntG<+rsroOr$Crbjui$$3X-uv|C1cIzvR5XbvFw( zZzXH5ci3*C&1%<^k`Db(#B_Y|x8(s>z>Z)=1d7T5@F!86dDN$Y{`jWvVYUcWyF|N3n;rZ{wS8f@F$_2f7{ zf)CpwSv>OFn=uBI?qpRXRvspy#&gZZa*w z0`dSnuuK`M`ODeeK9Ff~`W+!yk!CALZ&$#VwuBcWS)JTzcdbYAn4xc|kPPeD@ylyR zFc87oqniVD6N+La%`bPU7WK~G>BhF!+PQNNA6wI*37w5?m15hhx)8H5#PNl0hSvEQ zg1=$sEn-?blT%gssY(Y-zf{f`v*W-zZzegbP+*nz;c#{2w%&&Ksfh`H+@93XH@7+b zG}W~#ubIHUMFc>cG!L``GTc84KwHPUw(E|7*aru!3A3av0}LvNJjPMSP-VxR6j091 z{qDEk_w7NxV-6W5Aq{Ji7cnukFt8<1HZgK>v~czy5Og#$w{SHf5Oi_2voUbCFd|U4 zFtfF=H6xHV@gSh!ptm$}aooQ)x)18*gqQX@sX34L#AC`KCC# z35%Dyckv;QlMG$?hzYUbuvK5N`>9+*Nj`-?X{BbS=8{l8QcfBi0C~5V%q|K6(zg~X zySJ*S0cR7n_xSb>D5SV4VpL%R(;{cXBu$fbt=w7U7 zcrC}cxlx+a)u_F2OUN;GB`4>@q?KLAv^XGOk9D@_W@*{)O+U~d8wLRc_@99=%sd)! zsRWMiK0AKC*F?egAYgHZ1N|)z#NYD#85&*wmM2o8+E6S(RoPn(0U#!%!%6xxeekze zWH{?jGt{QO{9MepE`?IIJ$a*Y+-f&zKy{n(9C=Jd%65A}b3g~Kd}y}Sd9aV(ZeD5U zpZ=u#oD5nUpo&bwK;@SYQhJB0y64sltnUKz1B5-BW=o7U1N4Iw7P*YiU?Mf2B)7>* zXo6a=rUL}#??vAceNOr9VD2i*s=f6HQ$IfKFi{5hVVTf`9&}k#2S0zQWxb!YB=mC) z@wc*XE$#I7L&bh*pg4o5Fvq@}v%zhW+8hY1osLOsc#D42@!AikD7h>f`C>V6&`h@5 zIv_V0mc0_(e9n58j@9=DsrP6=2UTGrlZ83$y?s*e`e~nf(Z|Qwem=3Q$(!o1!A2GC zrNJgMIVsbOg6t$g-};JzYbX^w|6U>;VAHtvZ>`X0K=leRRL*aYpAgF-$rbu2<)CvK zzjo~YQP!J*r7EN(spvNlT##2lXkL zX-gb31tzG_d^IAwz-53|c$cY7?9;c`s%po_iRUZz=&J9i`tfXBGMA}Is>#Q&ok1dD zYpm0G&Aoow{KRc5nEho`$=;fy><(A*0V*i%Tf`oUKCy8*AP%}B!apA20j!Z8bun%i z9Ui?cE50qDHEhSspFNQd)=`P~q4_!{Yh(T)4vt4L_2c2+@o?BT{)@*y*FIAz{1<&6 zKHjE{F59Toir!la09!gx z8u$GNfXx@rQP8^CZ<9@s7!_L~2+Hl!v5mb4NtDlp<_KtW6;m)ciS+K|xT7`OZzqW& z#Jk@O!6__6jAFWYDa4xClrxs^j6e`-7uti(VZUd+j`I4R|5gr(toYl!u;@VgK|ZabCx(9@FFLIPX1vE{5k zhy!coLUTSje@0;aT3eIu7QtGCYm^V5Ye&X=;2`;ArGT$yAF`&dbLpv7>FsGu6n$!{ zMMwAau-CdMh_(k&39^Ow{#vH=dlqMN=73TNdwziplqH5DJ#f6MInNcElBEhYffbkS z_nR|RebVkrPqPj>ORj&~jFKFzyW9*Sgb+^*IS5MT`4}@%ORqSdN z{^Ie^K4Gg1|E&$RzW^Aba^sWbma9(~8q;?K<*f|Mgbn}3^ZscdZWm6II1b3V3 zJ6R>&BU`j)N;<=p5Aal$F3_pj&d0RW#?CfDduN*n@yNc z)P5n3De;sZ6=Zhl^oJG*nE|bL^7Q!kT>Dm+ez|`x|0ftvn4jF0=q1_mC5|KetmBwlhuIV~ zfFQ2-5M|(1>NjlxS7H3$s*(J2KrU(b;J0)d?hPIN^Z>cX)tq1g(jXW*o+23B9gCfP zNeptw@@Ng7tw;HET1d-@FTvdfLYZd+st|P&L{Ipe9a}xeueDYqTULfToKq8SQM8+; zJYJOj0-!B*)h_F=^*bES4}mYn14;#6u-!hlazm0cacGu5*<~Ii&GrNpSx6_$rtzpG z_n@wMOsgTsdzMlJ933jqCks#=(703PT#DRJe4Qoz_;JeESL-eTnp~heXkcF@v**!T z7{0$dP{%P_-N6W8aA*pwehko(yeuOm_dQ$gt2110XX1yv8to(Th&>djAx@Ue&o5#t zZ%O|f9;zMuRDbc%4#W!JPAVM&j6?uPbf&%Q!NIld5UfPmgJp=^^%ehIzJ)P(Dm!5* zO4fRLADHc%dw9jXjQb@$&MjU$KI39F)B|B*T5VU!haS^u@vB*uJh{Lb1P~M!m#sQn ze(>m$yz#`tlXCvh3$p$I$_3^oo~UGntBg!dWak4MZU{d!sXu$w6J}*>6_FXC=4O)d zclp!XE1lV?X)dLw$N5Mv=JVZhX+WwVZm-zZTF5zj~>$MkYU7Zfp07$cv< zas(ioq6PbpY$IYeO?~#Xdm31gSbge=q%uEg0b0n890_cwY02Q&QWe451kWzu>Ok?%x3+1Fp$`W#CJ}*>GU^(Ss&;-_9Ue@rB z5>DU%6#ZM=VestOH~9omSkFR1o=;Ol2ws9+Q*e$r0-%rUW|oD(%EeJFA7kYhri3n} zrWwC_`*HLTrEMj>g!?8Yhx8jxLg7)~)oi|6nWT#7vPaCnE6C=A)`Q7dP1lHB$>Y_? zs=5`;qqtBNc>9mNyKtPt1ylr zPhiK%Wfi6VJ`VEmt29a9Uh7CfxdsABR2IFZFC8o;qDol zd{TO44C3@uleEY?YD9%n|vQzRgz#As)CxC!owWNtCCS!Hbo9yG%W%m4gf_?I(|#`cw^=Stys+BK>g#>_(#pf%TnXR&v zRY2a@5*aemNUzRYvCd%#g9V{bNvwbLC#+!p*CO%-a~}pAZRC`6fXz;fn2i{7wF(HG zQ`2be5J1eq$^~}?=y?-LrzM0R|KlOA&EaPdId;PG*WMO@2@UnNOO*+V=0;igGA@V) zfUu)q3F>ZJ@HW%To z2(4jJ`Oa1<3s@>TH{`W6Ku#IC$oO;Mma^MmT0N!ybUa1xG2v~#OWnO`_mgW^#Abnd zJ<2gYo2LWHg^)FB?XBypg$UV-AXp1wQz8L~xsqs6+IV<;DXsyMO`3m$M3Zmm1T+Zz zC%*sRPO`#J=GQIi$t@`l|80+F0@ZO=CAA2%T5_FQ*ju|u4_gt?U9`8Qlt~0xwQSp9 z8#aPPW%@Y_&CIj&lboq5SIQoC6+GiCcNe$ggh{gZw|WDc-lYdT3VNC|HWOOkQ+Bbd z?S!Z5CgM*r^P7~0Bn{_uG_1>Oftr@6KID6Lu5~@lkqvumnf+5x%N3Hxlw#N&rmlx3 zq7GXf*>T~_wCz|wu@k*93x;#8o3)3GeuC9?1y$=qNe{B<{+l^LgcI!j+>K57F1H;1 zmkP0_>rI$TnQK*LL!QVZ@+m5_z%dP_p^`ES-g&hjItZ0QdcvQ22)ha@RcBK7=xR^F z?^o}KGz$aqMA}BuNxZv=l3r$A>M(NQ6+^|Z%DXbY3kVGXuX2=?*^@4N!)B>1M&;0f z6%Zv`l!f=MBnm{PgUO8SQiUt%Ta8i5C{Gg#RnadqPTb-9btF~AM@2|uS!WGD{OJdb zO~D8ovc1R7DVsEnm5?^?rN-nWsYN&LI|ZBE$osd8Df{^Hm%-S2<$&{YI)Ne^e!l?P zHS#Y0(C969FU}BZ%lm*NL#)1~lN48uX)GvL$a6$UqThXN#fu*A@c^gbfS|2@jbC00s>S0S*iVh6(`$2?Y&;1_TTU zga-(O{x5+3i8?8Z5CBr@p4?>3)Lep^105o(fsJ!9aGV`faAP2Lz?F0b=v_jQTA!=h zn*F(8mdYd9ZGRElFIV%19c{1L7oCv25E6WO|~%#5fNNU_EURTu6;{3av|{$hNE@ixGP zJrP`{T2`zZ@bnA4#lgTaFO_*9dONlX8ul*JNk8$h^n%(B`H(meD71@aB3Qo(U;$O` zH!+W_iJ@$9pq}nbtZkA1F{{Gd*EW?^H<~XAlhESVR~XP@tCGXcmI*D4)g!)^0C6fN ztL^V56X*)c8{>wWuU@ z{GP#2{l>58AP-%+=-B3?G2QH<-j*tBoU;hVBjn}PK=9E;h4gzD8+$!9&HWo-A~Qiz z4LPht-pHqLAnAXt!K{A(a=zC9$8vIZ{1Y*dVVOHS+jG&=o7tPunHkvHnwXl>8Jp1C z+gV!}dC)n#JL|bJ(lP&SNB?A6vVQR2iq+#kfX!mV_i3$?)_FA7HR6`V2B1Q{AOOYu!_$#9+ZA}dY`56i8q1>r*=85u$5>&fILk(4JM`j z@)^L-Z>f>tPIznXtA5u)^jFb%8y36sz2br#o~q1z2}<)@h>I4+OLLNbqvD6E(P1^$ z2bV%28G$aS5E7li5=rU6*Dm?~L|$(^^Odi}2!>Q(cpf|V=AWEU?9LdIbRUJuPhrq( z=5A?fw4x)@&28=E2P~+6r#iY#!{gvZNYcK~hdK!v?K&iKunM&Sb&$q--ZJlDCnBVk z|D3%_@-qDLRqOFP{2HwLq%~BO}dXN>k?@2%WGTn*C zsTIMEJdTf(LQMd;5V^Y~t21z*xLTN2>4#$q2asBs@<7ORk{ALyK034eDJOkp#76|) zg;KbAn70@8um7Y91-<%rtkk(J^z(p0{xBXG?SGW{x+LR6Hmaw1heZ* z6Yy`d{i9W%*fp&N&bPJ)`8Rc*N>kF&pMWAb(Sf*?NI~!H^v+2Srcc2HDEPSnkKq_k z;PYS0pv|64>rKkX9n9v|CpuPnL-SwJ0%T~Of;$ZbKAZg1PXjdTPutd#1#W=WH;r*W^eSX05`6>hY#dQztB27(=b9(NPd`;>T^G~zUrxm&Xi`xC zil~py%N7R}Pt?~$i_?Nsd|oNuY@k_R6?AqgHijoHdBWEg`v=&y$LvqlZ$qSelcJG5H05!a6R&aOlzK75ki2VM7WXYU&m}EG+}hn0a+&FlVQ3h#f;pGOJH542cq9QXH2rppsX}n z^jRS*u)>>B(PYzbQF#h0-mA2=@uf0^jd;bGCWtIGrS9qQ0o7t^M#X-1!QL&PRqIgK z6mtSyt9=lxwn$&2?9(vEa})I$t7q`f_TA}$J&SN(YXOIWlq_|DpHhK4Ca-7Td@#t9 zu?OfaTMI1W!Hj=JzrZJJyEmgE*X7mcuDL$S54nmJYy@4pN$298i4$xVir5ye@$0T? zcqohS!csINDdW4S^-EhV{@`z})Dl455quRQbd0cYlO2;Y+pq}<{hpWCMuO#J6x%EL=A8UQO&XPP7cFd{udj5T{ElG6} z%jknzP-pQZ4Zue8T5A@b?>v(^EwbWHW-;5Z-5adpMnXuGTIK3!#kfgDhP9a;ViJ9y6r z^}yDeyc{SdOi^xt5?>jo#^9F}qa_e&LZX8yb?Y&779U}&dZ2yhz<$`d59*tLC5_zB z`N2byS%EZ)R7d$S+~5{+u)r{xl+kxvCC$jcOGIfgI-EioK;N^ZJ8pd+xBzN@TI#aJ zCb6`5N5r&mr(S}qqhAgO{M&2(i<%eRLEt8n26r05XYzv#6dAby{V-4=wYk-$14hoj81)CEY)r{FV(P=sLMJAZ^SqPyy9j7=!TybzGr;$5 z67<>tOnuU&20(yJ;v6B#Ld>Og_@sAQ3R9Z<2aqFI8hVz=jbeEh)aeLU0w~ zdJKHD7I8mDk{;aSu%Ji}$ADN&!P6yXL}r%bmATwy{gn=cRoA|-$nK(Fz967Caz*hu zdnHc{WR5C<^?4CN=)i*uA}8yBoV;8^3=T+!KxEG<-6mHx_-5S zt9av|JL&&2lFBgu7)gCz)#IsB_D49~G(6{voosSzgfWs|xA zfb+0Ds{KNeJp5SjQL(s`=Es2Ep%bc_)Xt#m7vIg+xEL<0`VaBey7LO45M7yDr_ z8g7w;mq)t10NIFK=}dsSQjCz}7k!nyI9gN&->T=PUMOw>Y`JxerjcL-IZRgK;Nf8C zCS3QV;h*WpFrsie>Me(8!}K4KDpz=jRbU@@DH&z4KmY=S~zktM;oF(TgZH4L(X z$TU%|vq&hx26S;k^D~XpoxM=b<<|~P;6=0|ttBrY0l4cAu6A#p;KOu?J;Bn{PEXhJ zMVyOMW%U2AMp6m$|Hepi<(Lzpj+;YZ9fkj5^x&)C7fsUU!AN!R4=5f@Qlu<$Tfdzf z$fsAPtRMkJmuM{6^z_^WSiWG|2swIFVCn}SKrtEX%of7+3@-vC?-hovKxCv%T3Iqb z1(^@HCLVX4JHmYB^(ap%Tk%2$Z900j>y{Q-s4Lsr%CSaa6 zbXosgMIvTHdfExS^;YD_99c+Tr7}Uw%_jn+esdSlrl2=rvu~DGqUsYGR4-kK;;I1V z=V3&bS$QkY$d8S=pL+PKM$Au+g}pr9sbIg2T>_c-fpz1}tp&t#x-usKyZAr1GoDw^hAj(5aQq zX&c-SdanzKYhhZcPB!5>6OKYMa3iAI^ORZPl-NEMyXweTrxfb9Cww7G%eeM<`^l z1ynP;h`6D1El5|8-2(y0DY1h(vLG#{sXa#gF4<)1^RJxJcx%0a(Jz`ZMiA3eC zf+mBw7+pP;K*>g?17w_!I?xEfF1u>9=HNC})l>Z;p8{CBQ{~t~Di0h@H6*J=L8NV) zMpA9>_}ARkqQ8A5>KHruf7I9wjJ%K1N7#_mdKtt5z@4xNPH(Qz;cwMly&bWpdUjF) zvHU&p)*h2R(j{lhv?&vU~ImbmGq3}Z(s({ zMA0QU@=W(%0;3{GpH$0~a225uz}vQ=k1@y)KbQcZ7r?}+wVCZdB2}tQ!poT97bV}c zp5TWAW6r=*IL-~@QG;oj%iC4`#tf*i?d}2_Z3rO67|ZiRCOrTYYM)=tBC1j&l(7)@ zfd-AUC0_e13G9X}0~j`4p%?efh% zywuQ1J`wch4<0b)xObD|Q0tJp+^(h1d^hv8usyyK8D3%0x#wyYm_PDYyEVo9rlG=m z3-O=i1~gXjUucq7Zq1PS(;20%t#Im>%b#x^|C1d5zvTQsxNQHBBeV_wt+C?MsV6SL zttpVtakm1o;#*?}?~J9fr#UYjrd@?N?xJMI0E9_yO7osf{QTj>&_Y&K3uh~>0)n1b z)%1|i)CI0w)ukkI&C-|`rD(c>$~XzT1~3sT?zCJhz*vy`fNe&CatB|Md#|2(B4{Q) zmZUBwufpeq?dbl<8a*luZUq=4>A#LF2O3aTTNuR4E2_-A)rRfkbH>&2Fm4 z3qj;XJJZ9>SFG(LWU=nm{sQ<=l>MR)$U9=ZS$H1CeWv&a$mTj$Z;dXVDGo+*`!hhP zIdO=h4ro3)G@m$z03o|>NEwE5*fT+q0j)GLPY^}NRTYxvaWj}KXx|#VqK~(IRT_Cr zZDBfgmRDShN93r;B6aK>4UT@xj{D1TmNbNWJjoTI<|KKX23nqs2XT2;t~SJ+f(5Z< z)^8%8@BIi^u>-@O=>}qpFZ&W=`6+aZLx*TZHca1lz+EJKC{1Gjmyz^;C?bVr{TUtq zwZ*Fqy1oCM5ka7fF1Y;yHcG64KhX{-!9z#W^OJ{RClrlDr=Uv#s@M8c;?z19H4rze zi=<79DJ#YqK&7}veXtyK6;07C6CnDD1Hi0DSTTy5U1j4p6vOr0A6;4whD;Fg1b+BP zw2Czl$l0b@?RgTLlSKbA9Uf4zosD&^5(zdk*vUlGXHNIt&@y~xRPdc@l>bjn>et`T z`$v;Hb$|dU;0uEIITvO?+lgL_+5)cTVsfB)`;fu?^R5MV2*BvG!fuIFjOm5A;w=hG z(|oE{LS5O8Ir%GqcG+hfdh~GZN~iMR%JkmuGp56YDe1=sUzG(UO4GZJC0QFappzlJ zUM(Y5OI6w$X|wwc>g4h*p&CPk7-JC5@$jX-jh;JBS$yYk^%3w`*l@t|i z09PX%%CN#fa%YTEjzYA`_o4x&OIr&mJp&;z3GQ!q0`4=1RhJ)Rz17U~4T8~{K(XX# z!zWNRU6Mp8jQ2+A0#MXs2hw+9IU8B(1c& zBB60w-*rj&CNs`MoJiD~Q*IdU5QP@|6TXwrF{oo<2nzz~E7+VCrqw4;B_r~umFr~q zf?gQ(-Kb}_HTPrfTHU2$`CJH0)TZxG3yc%TPn?iZXG)o!S@fT>95p!FrScLS^8)3M zZJ$*0UZK@eAawZ?Ec^Y-xeCgVt>*?DNh~NIpzn(3>#<0yh+Xya-Lst+x|1r+4%?_| z$pfh~z+4my-Tn7|ZQy!=QYxh#p}@a)rR_#21vs!=10Dn#yGD|@;fR!Ofr|2hQmDVR7wBeUuUZ4P%-dTn7Jx4H@!X)^K^m z3;H&oxqqumSe8cKNSCwTAr44vf{U^_{kHJg4XT{z8O?l;d1*r0oM5?P;cL+&&o#h6 z;*QKsL=~mmt2|46;e-t4vw>8F<%;A^yq&G+-pfy)Jl#pDaKsOC^x1!^j+9{QjLKO+ z0d8L6T60k$n)~|zCOLc&WlmBDT<9%Wd(lnS>Jr14!WCqt-!-Gl}+=BNFDfd@@w8*PGoblq2!H2RFx|l#3}~CtGU=fuHdB6gPo}JIY2p z5f>?z{f`LrFN3wooaP~zU??eusuZP_4y|_7N);z;UH~S!1Vq#0X{f><`3Y#UYIBy%kM;+8}tVw z2Zwj*pN1SnCMo8x>6|b*BdtzMQtmbjxzZa(R$u8p%^cK~7{)}mzDKBST2&U3hP@pv6!sm#wkbdQ96Oo?x zx=6aGjO&Ax|4eUJf-n(4h?qgxG%}~BO_B#ZP0{2`1KAkal+bC2Ms#5c=CHgjd292< zLJD%|#=|#T_785ZWRCRZ0&f5!pk=IUODu1yiWI=i^g^7}mo}}$8|s`umVqvB`&MC= zKR2=;+l^k zV_#zn$8qCuHl&*ZluNqB-sse(lx%d0B*Uk{CF|7aqs^EdU>4^Nv%q<#F#VO zRvJU*{Pb{-?@hhB+rAjFBWT*593ZP4w!}n*Rz$h;Z+NJ7@IC#-;~z~b^3Up0DauE+ zzGk}lM~P;11=ml_pe;$R40I8=d+qrxO<9yi5M`V75sFGv`$ z*;gVGA7J3xHW8|h11;~ENPq-%s{b=>&)&%vi<&F!6z&14IC?=%Hb7|E4|sOh8Iseu z9v-I^eJN2Q+M|Q3-aZI7hxo?!PMhhPUB3%;*3c-bmO72IhkA+eTIlsy30a_?q8~BW z$vZhn9W9rXHX24?)bV!>9~@V5%(&%*pDj8m3#-$d>HgR^a|!pbfq|x*F22zb(Iv;Z z@zM?md9cPP`HattWbAs%SmT5X*N|eunB!^0m0_+c>>t$6X1&K<;Pjf5StqoA~rE9*cBf z(pa$Zzro=U0Cr{7Qfu%QLKD&yNY}VfsFa!qJpr8FX_x3?^d;~ts6t2bllR$hZCj%? z6jsE;$e^2+PJmp^np;e_*8~o^s+J8?Y`@51Nrrx6bgsrhL{n3RagFuiwy!4>a@G{s z@9$st|FR^xBt|9-ltSfBx9;O`2R$qgF9Odk2puhSB0k7X)_C=y(CuZMsmdlsq0dn4 zs8+yPUpX+=W(!w^n(9n)ze01W7BZHUIru5>W3f|>*+MuE{WEymw!(H?Umc0T7mW{! z0fKsS3hITlrDnk?eQtNWuw`6bI>{V`$9{ z!q_(U4E&KvRSr(U5@mT&qkH9~lO9@a?1+q*fTTFnK@ifg`vyBL##ZQ89Yc@y7_(YU z$f?y++9g5BRq`Uwht|7A42eNNbu*>!>&B;=Ch#NMrlA{ddN52W-J+H zWU$wl*5=tsucehF2PLNp%V%xZ1*9vwwS-K?7~CNzvitzeLtz+Lh6Gpp#eJ{>sJvO1 zarJyk7`z8Y*fK0^Vo9%4t^h!)vmB`RQjPZaOwrls(Okn9L{=DUrcbNH0*D*#Tp`nM zEl&RUB0^e0-iBX0R=poc%;78PX&@vO&QBi8ihf|GzRNH&A+pMyyl`lq14(JP_d1(} z4}$+DNd2+s6#iF>&MX-ITQl^qXxx_c(lY5XIa%pr-C$dGJ7STdk~?pK<%4I6(E3V5 zZK~G_scDvs?{y=~?7UFbM%6*ukjc)4R)3P^nF_ZU2=a_eO3VrO{;t@YeJhB@rh}zV zkp&MhF~%rClOM*Mev&r@;WVyzbF!Q7MHXzO>B9j!K0m|r66|KDZrmo$JOBz$)d%p&NQBqoZhsS8N2&i-kOub$6&>U3r z0-gdKt;F*8USmp;t?;l}tjn{{U5i+o#>EQ?6MSe$P|CH-EY|5#8D#df+g@dJG722s zQ33h615thKyR`+-8)fZ=Gsj8Ib&lXT9k{hwPYcLq;ssJcX*V2dwgK7{57#<8j2zwA zuIL!V^L---m?}^Uhu6>q%_b!&MZiTsq@~9_ax(hL_GKR(wPQl$)OQ*|X$xlO5EKWN z=2ksI-}jpVg|=od%0)X2Tb33hwjV7maKoytLQ#S03Amx$QkHV#c&r|lNa47Cd|RCuWuamp9YR_)DrvG9QDi;D&_mDAvn zVD_SNjs=S){QnT7$}sfRHY>y{@&2_Xzj6{{n-+oJ2**9PhGEz z`>vL8ERap+`|v9)wv0GU)I84~3-<+5WO3H|vuOMyG#;x+O`+(VA2s3NuIYkKLxFWv z_aH*AzDDN*6JM@sui`bM+IL0VKIn^NlgrSyBcVf^UC%%Uuvn8uR)bS+1cBP(-qcJ# zA(NDi-)>sZXq7mLR8LR5s`U^)qu@CwHPt_1_@v-MlUa@avkd#l7Vgz>P5-z zcdyFo8TxME5v(W>de{gr#5sb_h+Dgyr^sTU&0U>Ad%9j%>f4HcDZB96gqUTY0w>-6&~Lkm~R&L0QtFU;I4UDYdNecM6PI#X#nYfdh^ z$T79w?=Dh@@Yb6)bk|oqgd^Ekts3GUpRRF~8U||Z^PrXIOx%*Gm11NClzQ7%!k26^ zY!N7`N?N}zp;Vyt-j`}s-_3ii@&tw|d}bV63Ewu;aWVRf0k#fc>Rp{dbFa4|vcQ3G z?XI$Z@jzIY!D0Di$@K(p)&8f_!ufw7NJWbw0K`U5Svx?oc}07N&^K&#{j5#nc6&o5 z`?aM9{K^=@eoMf*9={X@DDQ*iDb)Zn!lPXnvz^{$`h{M|vApVJcNP4t;OotQjEAYf zPD6krkz=h~094c0UQeuGkI;ZEgG^NWCk~u{d)$o+x1~>{H!9J;Q0zgM_Ztbk07uxM zFtMx2sLO=Ohelr4Kmozr9!{AAcj0TNSI}N_$~3D;=_pPQhmdAJZ@^8))iW>Zr>Xk&nqab+GpEr&@ z=9kE_=Yx4luyj8qg|591dYgtdZ$HOY__6v*t&a3f2z(bAlyR1hN;Y-JJfi-kQ5Srk z`}`SbN;B<3-coUnam;LS{?O{8$IcC*mXJ&EZ{f!V;j&9{mI#SQU9+o! z2$Jy%6~9Y1p`339a0_sS=kXcO`Rog%az!om4&^wQ#qfMH>t0=xg#{s)5`V6cp_L#| zJ^xJ>kWQltn)+>m7naT)phnd0O=eZqo7>&}HBr+Nfcn5$%LgNI*ZaGPG`pj!iW8sX z$0r5pGPfKlRQu{fW{!uN3|qxG>sPZASogkzbjkLyamsPwfPq*M(ZgtTlMgZu?aLyA zGhrsqpt0156=qy~{rl6_NCf}8uc7$5y=fGxy`6xWYKRS@D7(sh6fbv%z+xrihVrZQ zY{6I5=Kb9LP*g`{O1Rs%_C}R4KY#==bX7tTTd}pD)tmE_jDH}|aXF?BBseXO491t)vx$L=BhA|%%X`$w2ns;VpGfm@ zPvyr97mJRI>CjW_$-4Pzz&FBj1hVRWXE#cAqGU`y1C7Q<(HWgX$p+lI@--WEBqn@&pa&;lljfk7nO(*KsyBK(({f0Py? zYxpnPRv3;&J*&sAK0J}=WuS`HJTRFUv!y62svz=%j!;snoVp-D5V5P$f)Q56Pq=Ht zQjAy=`)S5ECEgQ#ySH!Lw_5};XMWsa*-}u;b|A|0yqcSm)fb-Y8NEp0a|kp26Da%h zqalJ&_xBWAPC+mLz;8wY)!-)T3l=$_D+-C}k$>{`*y-X9A@AHPzwGFhwSQIXibo9!f})s=)Y#+JdpQ=ZlZ$!;ZQItZdf&6p*(Yw?xZkf8F=MS6D|&w>#u%-&Vd6)lGUEvk zSDZk`wVR!PQNAQPGa3cnHV%LPAj^;*szS1lmOj$yn50j;r>1L7{xZp>m#bUHm1cR$ z;#E{eSqIII z>3<|?Ir~e^Kav)RVDK-Z{SIcP!&?dP7V;<>TSg=;k+KkkbK=nQ^sqLhJOFQHpRqB6 zx!s%Ee8&}Y*wJgBiv;K-{1l9>EiSY54oN;j`OY_B;R(0k+E&oWJ5A3`jBEN7CrjzW_zUC($+kjjvA4%+t*H+ixqsqQR#1lV@-#SD8ZkO&P}yGFkYE<4>KcaNL+IrrM%ZMx%mzG+oyiMV zk3LO*L0LTG-)$TIm9)rZTs8d-*$ES@qCTOFsKGFL^TJp9$(SE2cE-}ZxRx-Ib4NTC z>f&J0^|yue)@5Gp?buv*8dKscjb#u|B27w#rq$vwZc1{`%34S!kpZo^Vp^?-Hug##*JZG+oef5;KnuAda&dZ}RRe;1J^Cpn zy>?Dj*VTCwFS-&)Dr$ehxI}>`45C+zaBWZq_^Gu?)%ik5O3Ie? z#uPOwBJ7t~|3&GqA4_u!QInV)I1>ZU)FX=O238Pc9Hn}+Rnw|df_cp>HfIsRL?`!tps%l+M1Nh803xHSW9YBrG z7q^gb^({^U&FT$+Ee?R-F+Y5)c2EVNpP@Zrb`|eG=OlP`Mk5z^@{Ec}MyeTQ9d=Y+ zE`;E!*9HbsnoCCQH9UWeS=5s`Ps(lAK-k!UNv|>cZ{UCV_jC%`8`{(A9dw=F_bj^2 zkP4(kT8vTO?$orKfWBSFY1SKZ8VYWe9kOc{p$r#E)$^d@88oM zdj}}27A{Jt0snU5V>(>#jK(%{f{Rn48L_?1MBd1y@w053fqS_w`4k97FjL{8VM1J* z0F?K0;-*~Z7qMiAF6Mx2VJ5@P=q_IEkMLQnq2WIeuB4N6Y=62?)MEyYJpgsmhQce( zO6`se*2l<;5ZDw>7peBM8eJcI%(44fwL0}%;0omumu`P4W^*P&B^{?g)^!P!grUS) zhZYeO$OYwaeh|{@r~sWe8I^c^C_O2XA0a?Pa~?`z z43fe?ne*(w%A*A4`kz#W6}lbnL9OZi+9vRCvF)tEaS^H#I5Zm(eNpyC2v2dEm;*)J zKQhzER^lSV7l73A(A+vJ7WZt?EB-|Vz;nimDh`~17O`raG5PG%`6Y4PUZt$34S1$s zRw7m`%dMzY4Q{nhEvD)A@OHwDHz0m#Vm*qhce{l@eLX|Of)Jd~Y2_^l`K#mYOE!HX ztl=XJfiy5ae=zx@rk0yf^zCN~jcJ|i9*E4r+ZT9Dp zt2X#+^rw|49s8@166ilg4lm!tf{a@wD_>bQ5pYn*sf`8l6GHYShRYQtr#?u6A zV{*O1M69~v!8g^2Wb2~77{w=tf{38Z=)9||U1S!@DW}_!?26knNkfUTV+dMue-0Bp z!#Yi1O|`Hh9A}#bt8t`Unh2}x&^_g}8)4=3ITN%JVY@Tn=_Ek$#3@v6Zz6K7uHZ

    ^@JshjYRDQ8~aK!RNCANDOKnV(a5JM&{>{; z-jCk)<8QcJSW4){ej_|CR@{oXx7ibcph?r6y8`!4n5m4_TVt5*wwM7!*I@-|wW741 zlZu`}V?@iK?Ln&ujFP@`$AaNeRMq1r)*Ygh2fSN-Cawqv{NGXoCBNp4zZaKWVN4*c zTv4pGCG;2J=viaFsEG=C%f{s(T|5#xStFGQl6Com9jVA?5)dz)rs%0J&Dk-ZT%!YA zvO9c*9mnAPo_@;1N_4qcz@9jMOAYFeWVBt&6{1cf9eOg-E&8YKKA5H6!(uh^udC_kM1v1Ab<#0wAn4f~c3f(m2hVmPl2V4T%!XDTkTNGiuE-y7`MQgf2$ETZZ zG4d&S&rT%p%+k(v2|PKLKo!>C0f`rc0ClH+9r+A=q2`UWh0aAVW9c)C`M!71l>naY z5cQqNC&&i9g&g|H*-Q&5T;S>p>M-#0J>R%{m!njVzB)y8C((FoAt&}-N{L?@yBKmd zrjc|$cd6G2lmI|6f&Spj+!0-bM49+EKYlo)N!$j7-nJV2w>|#}O;B=Q`;QXj?wU>vEGIHn1`qmM zMw<~noUtomf$-3qnjFIpGH%={Ua|O07@4AP?WaRff{Hdz`4qp(o+EDN^k%t9BQ$ZD z6tGJvZDND+XI#m*l&F!ljl@>dy;6e#4h)tD$h?dq614`pc++RB&F8D#)Y3RhtBPRb zl$RaVFED9U8>Uj%J}a!iE$TH!5}~`UpT?+8XGeC1Sa-Tus*IFiV?1@Aj^WtxV|CBd zxVHXgk{#)Wd+a4am_sLOm-+0UPO=GYD!D=;CCk;mbEHei)(8zPRdAIJ?R(NSW6;Rw zMD8h2l#3xcs}j1K`m-!a0x$Tke!D_~{*@3l>YQ~g+N!PnI(SHWN>QY9-Rrqg0a0%j z4gFgXmpOzwvaJQ8iKSbW@FK6dKsH|r-iYx*FiAj{ESsvLLt=Sy5x*&z1!+`oO6vLJ ziPKt7eXBLbR#sPoiTyzciyze~e`A%e zy$Ljw`^&cRyqTaClagEc#ld3Oz`iZV+)4D9N%WB)$v~2r3_a_+&b=&;y%uYG0)d!U zEm{e4uBN}&+2vg|*h5S@HBCK;kE7)xm(ZNxeQG1tQJGdhDc11IrP{v1XZkSv2u_`D zdNO8aUbMfC7U_EE`JQuZLJyG_oPMPY`9Oo)6HjX29LEHq^j$N6#qPcuY{Jw*FPZF! zvc&F7U^O6m;tPB%suHm~nP9_01Ur8$V`@w$ZiV12--)~Bs=jUR=$ZM`4ZxD!24H;) z1b&_i7s5an){=TSEP4Sx<1Uq)LO_RV(WAd>dg)W)1l{mgnzV_>6K6^_W7lRen6W+b zo1HEe8#6x#(Jc^OK+Lj?=nh{0O8SY$>UqUUYNI+SEfhDQVk-m@ok0CCV#=a}Q9 z>YJCEZ6k)As$$pN?>WKXffF4eE&xw}jv&$ls~=mAQ11d@Xf3KvyV5^w`5JY)oRItLs$ltFvxLm%5b3)am= z;63wx%KC%v>MdOQ7Y( z^!$aySe6^yYFtcHQkOakXaiKR&AKd_?;e=y9R7(oori3FK&}Z~9w8FKT3`A}pmpkH z>ZUDzh!}5f5RaiQ#K5cuP%4sDlMfR}Stk%23v*FGmI+~oQtrO`f(r8ixX+r#IrI+k zL$AC+A*lybN{Jh7QG$SaIEn|xv&;W(d2UMd-|Z07|V&!z8jY3C+K zpLMjFdPQ|~RM)U0CXBCBmKNZd;A0EuvMk}8T@n(-8BV~`#iDNzMBxe|*BvabJe0{! z^xF}Wz1&ORr1;1WY6;g7)XK2Lys4?oql;Poy7Ty`uR^9VvRFwHTXuY^MMf|0gYBfl zx8nH)1mad($2tBh)WyO>YXKV>>D7SIY9ocdH_>+H+lYMdw=wP4CYjY1P6A&)v8(Mg zcfqu=XC>n0X0qx^jpRQ0(p1NI*Yb7VwUk~yd0kZp@>%<9XTp7+A|S7!ST*D?Uq5W> zK1b+$JUJc~i%a<OZr2TkRrk&L*IacR-KQ8Vz-YMyN$Il#J7fxG=HW%{ zaSuFDVCzg$-%-t##xywt9>GE5LjnN(N4^}vG2Q~Z$mW~(TTSB|46vqvS^o zbSlNP*~{<8glQLBA}=5!kWFqbHl>ASGEYW@Ee1NbNU~azdhO9Q3f0Pgw|s8G?QhGA z2-{W;$=25&^x<;2hIa~MXWA?Wn8`MofI~iJpU(4KN>g5Fc7z%iH;Q2TTrMEqMP^6@ zq^uVq#yF1kE_^)LzqgJwjH#=F7?FV4?Qup_mhpa`2-$4FdaN0XB*^!fq}>MP8-=NP zP@=(@w3Z%&(7oSZB+*h>t_P06ezMR4St(hJQ^T!g2ZiGSW zaPcN`FS8Cj*=!tnJ_0YSHAjmH)>d6t-FO%t%v482eUb1KoML-YFmSK{iAAuj{Q{lC zY@Q{8^~~ihJ$Jm9o>H|U=1ADzK==dr_d+q!G)UEYc;Ti?76(UMq3=AnE?-+d5@oM3 zSxiUMhdnMvUb`MM84@n!iy?)zDVwthI5Xyp`^iIv^cGD+L0+qBM}R%O{T#6GJMBI! z;n~T?5zCTxkxwrDw~GZ0tiR%7Y5y_F(0f1(x15F;HJ2-lJ%7jf`>uHkcHy#J&31Gg zTVkzH30vzV9ktk*5N1mIL9kep8pcX!j2ukq2!OrkvPJ{hy<0H;BE<7jsv2GOb@TYi zwqJX@cX6~H%waNY@3qYc0ydGEpL=Br63J(f%2%9`A!}uHI>V|+_@}ESycwE(H*9~u zPEL86xa50qJM+fce_Q^aS0}{~DA&I&XA36!fXZOa-TElGxS2$j`zQdO#JV%3v@69@O&QEXqq}#lEgiGntuNwxOCU$l zP_?dzRhPgxHmylaTxEeQJ9uYg*G2+9yPh-4nZ}b~FQ~J%T-yGWca*{bnuM;^mzlwxTg<-+1qwPF}6{Gbd zy+e|o*2rGSbV(Tvi=;pbac5JixQ@2lr2@zIp~{^Tmv_-=2XLWz+`E`d_}c*Y>a)$- zRNPYzFB^x|y*1ke*N8({Fg<4AI1ujkpn=kW8EMQ>zAYaD>pNaRrF?xXo&mKUGyyRb zp&ePIfk@fdWo=lL*2Xu$@_0+XaNr!5^Ve#vz}rBI-DiJQ!20~~?tXQ;a>XvgsqYFG!lIe;(A+HIzr328nPA{x?)CB|c4^X*rFLe4NYD4v}z9O*&R0 znA^{Td5Dk4nA66~nk|cYIfSLzT9^dd)bEz@3ih$))y3b)YiqZ+MtwdDjh|A%1ZaM7 zKLV{EGo(T>PK~Zl&tdj$4Q7UM3#Ck7)P4O{x0eiIeumu}$l#Mw(R|F=*vQo8K5b?} z05Yz-fpEws%W>X-g_=Z}Qf#=|WECKeVSD&oSia-SH>3 zjaf1&C{!*yUU%gpFT1){IundIZgsch=Lcu?VXAxH0$Gn3d6qWxt1uia$Nbb*Q?8|> z$%K6Bn}71t41^EwDp7+NQn=rxB8cqosOIXYqf=`j_HPwjap~a1*W!Dp4+Gs7+FQ|w zID51N<5|r{cojAcEar+miCH!5Lh{N6c_ryu#N}noe{})`Ugpfwhl>pv!=@4K!ww<# z6GIK)dKVGGT4?39PS#f4*quPX1eii5n5)-}m+pJ_<`+@|-fccg3(#Hr(JYLYvF&Fn zE|CS+)uN>>M5w|_|NX-zIw#T<~61?PbO((!(5*?RWF z&%hkP6P`*iXF}#*cqsWxI{wAumL^S2=!z7dy#@XQ4rVOLx`Qt8JcC1|WlU$Gcq#67 z<;-wmSisj(dwW`&$z7(7mhijeM?I%ECZ^fC)1mlhhug!l+U;B6)O%^*CIDR6*irwI z>j*1$sn|;%1$g_#Cj5xl*`($l^P>!?qWvctn2t$l4{>h|7J!Xb>jh}jFN-Z}G?=Cu zAQ!&pV1k1*`r4n;n~T>63_o8~jXf>4u9)Kkm4D5d(<)CX^Ao=@oqNt1LFFUYt$68% zq~9*7$V_=Gp`%VmMHGQz|rPrc#&6^hFs^XI-Y7O(oDj-hu z++jE2po<35pH1m`ii=c1Mysnq(H3dOR`swzbgC;(jGdLo#`iN>nP#S=Cc8s3kR^fM ze<2t!q05N>e54MiM>Woqu!S#p3cyHu)&cIXM&%E*LNs4PgjO5m2nocPVH`X{NEM&C z#b-yDV6!dlZznq#zn>c#uW=Afobz*UE(`gxJgpx(wiDqmS)|yRy+aPC<>izir%T5| zWgn6`S_m>q!`gYc6dRB6kGNUr*7uz!lZ|X!rlG0Y>H7rE%~s0U4at=m6jw(QC~$v z^P7r`>p5UqV5TT@$mXMk@=n7u?1XK1?Cpki$zH(WBqg8zKe$E^AWy5*NbJ9C9jpo4xbNTVeU;lXWb(LE4T z*@47ejU+J8Nrpjti-%M*aF{dY4QG5YbS|_8T-jIWH`BSi3b~Vl;ZwMc&87bWnPE}y zdxZ*;3R=ufS*Cdi=1FKVCmz7OHpgpHYE;Htuq`Pd21=vVts53yDAv`5LFo>WsGaH2 z&Je2}GXoH(2>M_HG=l1naL_@&sL^E2&GV;k9UYRsZZ*m^QTi7i%4Qg3fAN@1mCoRh zG2*iX|0B3)(`?&frP_TiNEf`NiB=9^(~5&V+=o_o-ACVG)()bD{XSom6}VRVbgBgN zK%{vNO|prT_BEqf9y5?X?B#N=HsSk3_CMSsu>uK44E|A{_6|Fr6scjoj0Es?C-zZL z94)4HAd;g=o)uz^@m#r=GEvVAr0ufyULpwRxC?|Q{yS*CAiPH8@V z+;_LDI`Onozdpr*pjt*pYU>rd!Wj{l>}2aFFPx_^kpzJmuP?@myJf1LOvNFawENx> zezQD~sfQn#VbPCUBz0(E7>8Zo&T4Ktsho?xX0S5S3-q}L^KOn#nhy^%h=;u$R=G7a zGv_PaaIgK~J*koRYwHUJEG<<--O^WF6TKbMvhFCu$fZ_lSF0jzpE{C4 zI;L_nBng8un0q)~1g3<499naRj5YoN2M2c<7PdFMeaiVDJd_{l2IP1oS%fp^aoskf z*M?hRpbY+7@q-&|Jl}aEu8Jq(VS;JehhuUjbIq%x48%b_e=zTi z6p>&f1KCKIS>h?=gOO11@(d!xE=Kp4fT2K-7)pWHI}5>$D8qzr3Wy%-dC<@4rJXNv zyk9|mRf!d#=LGv8g5RO90SOnx)S#qG42WTh zkq4dvIi6dh~TYxFbX2O5i7ch;V_9KIeFWIidbxMZI2a<1x$8|4m z4m+--D$ko`Bv1)CmpR}LSPBGc z!!l#SD`36c_YJN@}}1@(cGPPWGx zBD_Y6a?$z>3OzfzPgpir)=m2!wn6fuuxWY*LQjh==jL*~_hK#>1dK4~UKyA>$QlGO zR#Lp)rfN}pF@~7HJ|DF((2K94q>k)GB_-r+=Utfqk!qaEm~(K-O#m&KqI_NTmXh%g zky)sQ-L#*S*4Z4F{x2T?iB^}(1giOq$3Ld9YA^Utw4>*i*R2>J4@&V}Z%qpUNRVZ6 zR^`NJhqm)-%wB`!H{`O(%NYnpP04aw46%F>uatwT>L!`P%@slp3gK8 za8vE$t)RjEUT@J)`OH4QxWwF zeGciiN##3_{Iq^v-j7u3{Wff@K^;fEvN%OEm~ykHFe;)AG%e!aTQrX_Tpn-_$^hZU zMiSc8ttQXOZ#0Oq;-dMno$*S`A8sv#$Ld9fvk>fwnVnTZkF`{I?@&S@JBsq`MD%b$ znv#zUJ}Y(&H`RR_r^qhGkB{U|@Y7vU(;=O{05xmS0&s=}UfrA^#*2{9xo<-5D@zNd z8Py~K2QYyKkd9`DgC9S6{cN+`{@N+90b_IJKnFA4u1lsyj}zbSC?oq}F^3ROpEZ0V z|Lm>KLzBeZKVxi@+j{JvH`IMFN-#QF^Z&x5u=6h-{Zb|cX4H{Lww&-^JQ>Bw(H!1s zf(Lpi6P98ykyL;3JOHPle~)0dEs--pLv765`nX93-{=eGQOgEJ7n6eQ?u$4Gc#odz zkhGFR8p?wFd3__VcP>7~OOF^=>KD2U2JJF!EDK5180k>g=fdd{(?`ZI{)Gj$lc~)8 zIwt+){3mDs@bIddC%Fjd5(ZT8Q>00e0mOnTZvmoAN-yxl$thv4!AMY`REy}cTq9|> zE#A!86@Mv)rJE+JjmQ~;zZ?UT(JcsblPfDa1hm+|g|UptZVd(jIOL!RH~i=4c>zo< z=oeHCR$0aAFF^CM3{&|#RD2wwsyre8)nD<>F0)yqVt4A~00hcZq*{sJ zOVY{xT`7hbvPD%0{!CJ~i^`O33LvoF^+`47c~!o;_t)$|$)d4(LMq6T*h@%qQur}| z8JicXw5+9bw9Ik04o0a=Zm8lSg#40BAGwrHMSI3Q`iR&GP&? zNN?_}Mv^Ew0<7oN8;rRxJO6EXHvs>o?2v)2b-a^j0#pKlapvAL&rvZ;qOu;LVyzVR z$*y5Jyk>*pHl6%(Ojs4*x2Q@2lW-#2>M@q_Yr6hdv0m^dC=;JSg=~j%&=poTq=&|l zctQ`N`kepxt)_BxgcH6ZlWubxg7melkQgR_fhCL^ZHp`J_}G8(`0pyAzcln;Jkanr zG>7thvJRIK01`e1xm5wD3gB!=Gba4&rW41L0{RkEk0dlh4cw?`Ndk~!1X8p_V^UVo zHf!-5M^Iv!*|Q3{aE7y`|7^MRldni4kh@7jiu|~~B|}~}QAG(q4gt}>2$URRSR*dy zE;_RVK0D-fvi$vX*bkOw&z}AqiJ#wc2dW0xhuQJHpB$1)lIj`5PedsJ#-gE-zK#QOA6VIzdA?%@y zLIniC1^>N;wX&mOFvWNm)_BGO71n#gNaPIyIzK!#9yjtaq?WOZLlK62KBKg{71|~n zXiD4ay(oEILVd$$X=_4CSu&_t>?;xnEYoTEK>D}JKIlzKgOaP(uo!`tx}G&?kQB+| zSb}oB za;4Wo7gZ5UyfXxf142qfwds(lgx488@|0*Df=dRs18gw#`0JGlGuogOhC=K|OphUa z4J|9Vdqo!JS&Jh6QA^j;Zk60_z!q|em=HAqWY=y*{EhAgFZ!Fp}`S8a##F;{9Tqq z9TX#Nb0-0xEj^(|iUL0zUt>MAo@@Q(3%bs&GPxa=1DSxF;6T{e%`bS=w2TK}&dvZ8 z>&(>px|XmvG@80stTc_(O(r-Uh~a}nne)@yiRls3r`pq)SZrBaTce)+05f0`huBqy zxHYXOGkPy%?KqTAHJ_$-;j)OA6<^;eswQk?D#ub2h+uT%6HazbI!aHDU#V~}zo&V@ zx{l#YJs|Zs_gmY+_;5x?*DyNB@IubZeb6?e6~AJS24w>1vvg;^pRO)kwLY&Y;0Wyv ztO`Ya7qb%UAJw3$%o6KVxBT5ru zjgf^SSsRrR<%b^_l8}GBv%E!6j6rIyciBW~!1Id-ai0gz;0*jKfY%~m&WYh~tr)aZ zpO}Xc3n(wSt@TJz>@X${#q7?yyBvL=4?F_qBWD0+MpFdkVo}>Xi230sxTGaT6Qlup zCUc!0Dgw{^6pB0dvtP;Sy5-!9^q2&fPy|c+c|F%wi@waEx!qtyDy+B~7@{c=W4qGI zANCzIDpX6Fk}1IbJv7qMqnZxVVu2nX8!Vjv{>m4Q+1;jvZ>57F1ryzlL+_$h?diD6 zGT#D}tJlh9@P`&FEWFUa@c3gXZU2kMKbF!73ivMy_Ft`u-w9Vr2DPsj-lZsqhO(jj zm_~$cUma&!ydD#pWcN5+v=GgssUMZM_OP-^i48GV^zs5OcDSVVAKb$gp-hLF1SHtu zj9E&BZg~4cv{JlJ(aq^LBA%Af*Tth2dkoqqP25}tl{-pn!Oq@lNk0#1)xUv{+NXgt zBNkBNQpGvMUq@kcpUT3m^>If7;>yZgtZ|PuMd?Fhf79{0)bkvi$xDIW%ibE44^$$d zuUn^U6~H`%r|(ly73JA4R1(~Q>|m8_IlHW{GsPuh8E;mOY9Jr{9GuTBlr@B@j_YbX zryDlg+`Qm;Nr{U3O#{7b#%YsrnuwCZN3k)RuW~8TTp4gE;1Q};JcYlBbmmyEA^Op$ zXrXM?BNYo#ZdfhKkk@8E$>gacI`|EzTn5==e1LPB$_Tnh8 z2ujp-OMnlnYGFFRWMFbaubK0R%Gd{ED=lz9O4^a_8iUi~o~U7!QCgK;MnU&CMmyS= zs1Wh^9YbeYk}|-sS1wzX@Og$HZ@q;Kttxm>CA0q{X@BXJK?mCtFSqtK4en1astskQ zsNwlXB#BDVoprl5!hj>LeHRp2|2&bCqS5@`2ln|bz#%@=QO3r!G0?r4ULd4DiR|)y9)Su5tvP%vGdsK)|PaD8O4vO0j#!nq?O$|$;3_^v1 zh+G%MJUcwZ2`Bp6maxc`1Z?*z32>T&=&O|Ef*>F0zEn?_>#LhVKP|u48IX z6L1~JuUG%j#z|2-${a z>rJB{YiEyM2y;>ahJGSWhvkO+I-46fO5fL2->24%)oqQX?X1#g0- z>nMSAAfId7*6O5)V%9st?u0#ZkoLmF>b{t296)7-pu!}BYB444LRaWHN~|9~6{Uq!P({Im-_|#XWGr-LXVD|MCp}l{){=JcIwFqPYA~M)&up z{ieF47LjkpFKIqM+;4A%=DYKN07DX(l-SBTBf5-X>>2vA7B<9zl|)p) zU0biZg&u?a8_hY^g`P*2?XqREp)*lb99|W1?5o)aDCO8>rlf5d7^y65Z@#$SMS^sJT!o_N)_+WoHIilgN zCbXRQhX)2^7&}m)9R`qMP(c9L*PT-XSlSEC@pMJBH+uQGFaggCCf8Ugq}`H>lavo$W2NLH0}>xHjL6?uf*_wQCj0U%EQLI}{V&m>6sHXs9!p!6 zzrTc~86rBRy@bbKq~s_L6+(7ni=xL7G;VN|0jp_?V~C`{CZ)#=GAzP@{%D#1r1H~) z0?B3E6aM2FB>4XgRsZ|A`X^64Nc7L+>Z_XcaR`?WAMfB9ODyhFJkuy51y zyV~fAC&m_BCiE+NdI(5NM`VE;QkShFBRX@|{nB2&aiu=7mug7lEXTF`BGmuLvB5F@vW!)oOA3Xk3W|1Et+rIh7hRSQQXjj@k1mlOg zN}z8DJ08bHavwNQJn}C`_~APC;J+<|-|?gGr)zeo#|&O@LBM=) ze&Af_>!5G3ryKiK`dDA|mD|_`wZaov%9jsimq87;vIL*+8H^-5aUBhM(YNcqx@$dJ z0^osRtZ9kr5QR^_1z~DgVi#qs@@vCl8GBugL*ZUSzA|&?I_XbTmeu!tYhzWtV&yw*h@Rh&qu7*jiN>(^4Qr zBMg^(M&vYte*I9rI2Wtj@J1VElAK?IfCvmp8Rlm~6-ifDn`ZJo zc&JC1_S6^)R}FPH4)iQURBAMKaHNj-IUQMg4THu{qg2b>yvFX_1R^OGT@x zlu-Um;XO^NA2^GbbU&1*=HX1(8;=Wz{J_bd$^WDUxCaw;dO{YR+#S>Ev7{#cG;3G7 z^=Nq7%z!{Jg;*vhrD(&yqyDigk7IGL7t_B)269?87yjVF?C4ukt_fhiiD)zEv}BAP z8G9e6dmKl>zMw>}ccBW&RQgB7-sF(lJx~rW8%I30(js4QPP5ijVJgilgN8agp+?`W zoZpMp5GelJYihJoIPp5EGZi_1abqZ}5NDZQ`m3}T5lFl6Bsk(lA2p7ExVA(dQ>vQg z9H+_Q#v0$18J!;A8-O|Qe2e$)bzf}eko%7$?Eg@M;0H@8DO{dl!vq5B3e=a{a{NFJ z3Z%Vb!upq*e~z5pKmaT%+gd|KW*K>vqHt*4{*X+GT@%K4uQZXr(=@StJj^9u`GKtc zM8Qaki3%%!AtIupUlCgW3NsHuMBF%~iPcs-DwaB&#L0X~#@`uGw<51E>#>Mh-icUE zQ+Sj*tH&qmtfYzlZplDb=uYnb;-3iJLy}~oRXgQ^rjOKi`(45wF-S;pP~dMGxt*@@ z6VFNmAP__P2c?L+&i>)%Z9Ld$~&fao3XwjDB6SJ2TB-^-QMNqSG4s3&`d@ zjC)fRh@-9p44k;W`Bbea4{SSOBx?V9$*)VXy0#7OrasIIAX!EBTgb04Av`U`w#+P| z>uT-3q$0Vzd}E-0SB<3GCBD;8#LQbi02p421FeUM>G4n1b`1>vR_&i75gy=Qysfg| zF5T^;(1#_1OX5Jr+md(QLrz!Db99NbT1;>O3HF+;^ZnG~=t|oed#YB*4577-L22F3 z^7}5BQ-Oyxj8ptI!jXzG9yi9qZCoQRdd=WNxaDkSj~_eJAXb%Ynqp$)fN*sBYr|Ab zshk?;w+Ie(Vits7@(C=%KX;Q{@@-DI1QboW^bE-=Y<|~6nVTFaEL^}+$7n|%KNRUr zL-M~N3#e#yVc4A3Sb^d=pP^K_8xh+8&LFZrSEfqUI9TpwI0WO6KTI*p?C3sOA92*k zs+(z(>WsFd@VQJ@_z}jrB{#=2dC3k0Pcrc+4pI-(PIhmRqr#V-dY8@{|4&?Wwg0Kw zKQ21ENbqk_l{q+2K3{EFef|MsMdZ?aVsIVg&M{a5Q-2grnS*tG`C6NttPVD&$S4zn zH$}hedY}ezC1$Fh4ubnP1V%mFKhoC@y&(*fUSz7tcFBeiF^N9 zJF*w#7$Bn+G0Q+$>g!$+PtKw;pRd`mYJv5UOmGCQexLuUm^T{iO+cJ&Q?G&*j$t*d zOqsxDp?YrZOlS{RzO}PS_xm}=)z@N-oJ$;8ZingKJ_MX)rYth(m)!WQf|1x^Z(xy$ zX+J!MdBR}xbxit-BRi0Ns~o=ReLxJZwMl<@)g~uScRY!ny~g?yW>*kAK4%~==C>qK zQMN3)92Q=_rL&_udVEhU7BJ2?=HiTFa$A^w8s=RXtU&?DJ4n;;P@kC~6O` zbct0}PZ)or7ap)-7a@4&g{LLUeO9}G;k6kBmEr^p0Fde*51ilc>x@Li#q z3d^B%^s{|Y@`U3lQgTwNI1J#ojY^z~GI6utsCQskz}_54rXA|4(3%a)oQ0?j;Ky7r zC~3-pLd;(6M!j-35!&o49HLPQVu*OYlid8fnE~;`9!p*Cd5!TjkIXnIX*n{iU)-aF zr+cq1!$@NWm*>V3oMF?Wf+9GE7qJlr@~|QG#dL5c(~%_#C6Va`CxbI#5|)r>&&vJK zTk3mNCgXdo(3$K!OS`C@XLncA?t2`*La)?oZvcjk$=3PHg<6b>=WH>H@;eylh^L=W zLk@`5*VUjq3GLWD_IN_eA>U!+g42ND3Heoqa=DJC0f>6L+$Bn|rVV z7aBdu?%@kH;)r+Lmv#JFVREg(z=4KSg<=eB`*8Zd;>1ZnF#ZfP;QtOY{*F^&{=50t z-}}Zt!%b#5wp9{vnja*ip+0^--HB5hk=gbv~BT_Hn(5Nxi?CgVDxhs@pW`lI#%xc=eHf5NwH=G8w~r#44*o-P7(}eu zd8=bz?4+>hm?W_zbtw!9lGnYnLc)-UNh6~i^#@mEj%ywOg7s|EV~ zBL}~5AXgCbKb+`;0x6LNE^FWbE1O$5;amOLMf}f)i<5~lzNwufzSIBOUEI#r+JhEf z+1v!*#Ma2s!`|7#&KBRv#qjU<-3*-YZB1NF9Puq|olR_w|NPY14&Tt^KWCx+H+b3s zfA*Tuujo}f>+Sf(Yry)YaRE-KZA(Z6hIo=@oQ3T-2?wjT$*@_5D)ZYF#PV%4B>)Ip zj9$6Zt8qb*E2$iLb$$W0&4D7^N}UIe>>Pj*6>h^9R7k@-DW&JJd7lF-IXp!kUsihV zr2T5iEZq6tqkWa&R4SD$O!(be`=aT7=uN01G%t61$LR@8m5bgcAo-SHVl+L1p5RPp zavtFgftpOl_q&C*^2#39umieKtiv2xSE3;81I`S=WG45Og9c6D2wB)Zi~wySu7o>Z;6jeD1E%D*X^a?UH3`{2$;!7D z-0UmKU#s|r6&$AMS1+cRd3=Rb4sCl>Sn^2cm3=p(dO1zg{_M25%Qy>B#;sM0yxgD{ zZ!((@BF&u1Iv|VQhav}Ytr-|rgK1_!Ij6Z!yVq>VL5m^@I!~Ca7aJJv)*07%RJ-cn zpK$?XlBjPF3s^q!cHTCbkCxbKSy9wJSZ42nR_Y?cW7=iJ!*y{|j!bdEy2`eH!vYlH z_0|7Oi8c58fRT5tohGnpt`UkLt9vNWhf{w1{&WhFOSODy)g1%p9lPp|?Si}6Bj^Xd zr$2D$@Zz^a=xlf&*{ZfL{Sjmo^D=8sxe8owK!N4!=cT*%NQWb3|z8n8%&bX|?lUx1%`_4`8|{mk&E8KG_M+b$$bjm0)z zJ6ONNo4lgD&kq!X!t69&lo8dJFr0X#u8Pji{5ww^g-0!4N)qECm#EBfAj{FRXzPHE zfu;7zdNS{W6vW=dsju>O!Nv;aIFJ5kCoJHL{!GCGC??NPI|Cv9*9o|jbn?9Q`;YOb z(f1*~DHToQI}dsWo|}jk`32F4>BEVOx$14>eFZB((-bA#mtV`yS35FZU!ghRm5$LS zSGV+|BWM%40oUujSZ`?$&m&cf0vcHs@+Qb2R=O$|SGmg+U9lwo(k2=aw0n31T$@cp zNQhWGyx#iQhpCbz_^x_DTn_g8I(n0t#0-mt0(PI)>>*)Q&|k6t4{PrjWa+|W3s;wI z+qP}Hx@_CF*=5^ZW|wW->auP7t3L00zI)=%%$>Or_ix7Du_JP?$S0p%D|30TP*Wx; z|DL?NI#$J^?vVgz0hb{k&m0}J!#jBld`>4hH8M)mf(*yHYOg**k|h}S(NT}-0#Z~M z7EW^K6rYcGRX|&&J>1rswlbjSOpnDM)sv{JPGF}Wvnh`oMIoe6(>0R_4af9|;eEQ{ zTH=vieE!$504_;mM@MsGd_^aH11n?v@5WZf`i{ojmH1O@zM()GYj*epUJ8SK>h}mIuVwSaYBz0Ic4)Rs z0}L}W>MRaKi;iRIT<32RU+8)~#tl&9;hBTR1&z?7%P5y)XnPe~lWw=9{iL3|*g)J- zXw(y~xspzn+{p273#+DOXD9}c!tu2G9>~uctR=u-8CXRPdL;uUf6afU?KG4agUA{0 zrH}M>gN*+9?G*&um7cT;&~5)lT79L;MPw1tfuVkf4^#fOllmanh0Fdtlr7OnsUZVM zf+|GE-XkW6nEh#ZZV|wtq*OlJ@?MdZUGH$MsbuAqY*^YZY{!%TKr06jVqW>~x(o+f z-qYw?8@&KgA^HqfmY;Ku+hUygJ+$-MT=fBeP7j3SR0Q#3R6< z^DoUzgYa0+)o6GKUuL)3X?ZoLx7E3HYQD=gr~nqNOihav43o*!PDrfjMpaN1DVd#% z2`AsqEPAqPT-Long-A$HtQYwr++?dj-FRLVBi`gtc9`_1st;y1^@B||OD&h3b=!Lv z$9MAhNl%XiFj+ zK<37#5!K|=Ir=f)L}*1Ru)4x{57tEPyJapgjtHuC^OEZniJGqjaLUTL2|YfYv@$tc z*)=N)(<^BupVE5M-oA6^Ql~AKXg!=3W9iwDg=*Bo-u}_uMu>&^75FA?eetw-qJJ3gY3RP5Ml8}KW!o=aBJELMCfx_`N8*zflk3Dzn-kf z+7Up5*-w{9n zP?Uv&@+t37^&N`Ql=L!%~;8jw9;I!bF2Jn|1a|jw|ASbf4!qhX2-?=}g zsh^o|^yEgymYZ_L_T76k-vR79^hk`uH)mYe+m1QW=ET&H)TUXa`I}y?R2*ya!IoDz zW7hj?Zv|Y&_nFEnQ{PY9yXs*%B3l-)ocg8kg=o_L(%R3k9wqwW^HLIJF0lx0`9lzA zFMPbU{nG!wDhqW%eTc~?#+G-Iat8^@Dvy}6lHWE(gLkw!tuFL=3)3%5B*i1cZ$We& zkn2;)8JLL-|MR_JpAzj~@c65^C;<|C{ej28q;~Is;Dox#Z+L(llS2^S2`xVAog+B~ zGf(SrUY!DrIc$jd%Mt7#Opoxxf#*B3UAgQZ@Yqm4N5uDwDT6R4*s^`gz{nQD-NkNF zeR($ZUqh9eY6i|%DXe^i1Sgz9#ksY7>1g*V(Q@!*!xxrvB#UkoD=Q)gw%z47Lep@h z!MC(cY(0jzo-+E#BKlJ+TQc`o(JbgI%xX6<+pIsmzckeEGk!6O7$(c zTo?+UHem6YPyn*5Tzh>J<}Ee!y`d`vHa-N@oKT4e0@S#V@v0o;u6fHv$>@iEr=81} zbsoRNw!n1V%U~)XwIdXiU!H~*n;qN?hZ{Wt@4(CW-myxszG39Uu5w3*S&BeecMr2F zD;;^ouQjsOJ44NzT!|sy37A;6>`fkUT`N^A`J!W@7@-@Ingksxeb3P2g@ISor4&S+1VfErhYr{h^EISt`S->=przP`Il= zWii{&_LZuZ&>TT`8>Vj|^Pf^~7vPGpbT>8y3)W*4KLC0DB6P?=0RQwe|34!L@|Vpr z2nskP1Qf_0lVyPa<+42X^tC=7jkbLo0{7`gO^I5gto#DB$=C0VOqvsz>2XLi9l!Ic zp;9SOe{4#kPL;G{5kROxbDxy8iETYA5>9DLirFwA?mKmG*P=jN=Why(I%f%e!Fb}^ z3b5{Q^WDSFzmwsMou08=k7$hey4y|iQ;u%O69H73HeH-DL}_&Zo#55)tow&m(iCXt zHt2X=zUm8*WKw8tyji%mZoTz--dd_U)&_Cq*rp`*y38c&hoN>il4uG>9+r8d2A<1< zeO6a=P3f@;tJ~K=QasG^BzD%FNi0pIMx$A5Qdks?zN^Uc`o)8gNaih`dT&CM)L-}( z0^$4`Q>ZZf8o-yzR7t3!o_3nGscha(#NhVwgXLt~Li>?Y=_&jk3#stQG=x&LcvOq| zE_z2@>}D*|Iga0}v(T`+F7_go$bd}R=*NC**@4##BszzQ(J%vUn%Ner#Ii?5!Xv|! z`i_Ogjk{tKdBK1-*#`>(W!F;{jIcPi8eOMZymtQ@p4AzRz^~53$KDtrZ@ZT(u6@tURw$L0RD3a|6k?h+s9v5f8_fNWjHCYpHn39KG=!3Bc-xl zL`D1xMP~N9qzJ#?Ou9{?qTQoFrRPfBq8>2Q6&=7$$aCVHE>?`U>FTYjL+*wBCL3W|NJ?E>rM8!6JI-t; zh@deF5aY<+f>?iV3Ik}E4{)ZTeOrjxxZPfuWc8_k(Z(^IGThGlS$Lcg@qU)+u|3-R zQnmq7srp9_4_%Neb zqa9ViIeK|I=|Hx1e}4<^w~_#@8T5KwDYSQ%dLus%S~A@FRPT}7Wy@KWpCtDfWoG{1 zkpBJnTN2%K)V~S0F#=gibcP!wu_zg)3Q`XU^Wmktjrn&H83>cTsh1q;`knNQUgB`2 zlUH!m+{FPe=7j_mM2p}?+>u$AJO+4xtnz=dAWeF%)f%i;sdHU{B-TgDL%>%(i*}Iw zR5Bx}ud^TrhETG0LBz3igHXlhre{Hxpk$j0<1=R{|BkWWoTb#5I?Gf^$K4JvV5so3 zE5BYTVSG~-LFP7b@Ho0H&w(1g=@MCk?q_mp3?ABJrpW>8W6*c9?qw(RCEJ;nP^mJ_ z8MVV}D;q0*dfl9whHY$uUgQLM5PJ?N$*380DZ4=r?+=jnWb zO0g$;4#xK{qu)hY5x3l0$iq(*ENpSks1}!q4`5YvbDET2cXi8Oxnewf=gVH2FWVT} zdCp0n%*R!ARWflp?n^YYIu1F04MMK8c%H0jA&sKT>EUa;!BJv-DPI>0_i4jqkJ?0? zhzqrYXIn}lzFP|1-a4>jz~`>&ekyR}VGQI-qjmJnC9OiqOBZLr(J4?ZK59roamX0& z=BypBNv41?rsBJ;7wdSrLV3c6r96Y{u2||L8fhd$1a`x#rd2pPbql0#uwH949p!>x z9$O*90=;0yL^+gk!OlfmmDOM)hj~&F4%(z2kB^8U6pIiG` z$%^%1d*xMIE@CZR;-qUm^Dk`zQ5Rhxvuhs{AA||z`X^|(J{l=oeHQo~7tvq^i|R{Z zCJgBC7U?)#0<&2Y`8T9*>?rx0vdS`grFtgNmANYXlMeVVoqU`AG$nu2ygA%oOImL; zqqdUBU>Zxuv9<$__(SIENXDf8%}5}F5s!eub%crm(LSt4!rPwL@!?f=i6KQ9Yx=6D zx4WUK+5&!h-vt@h0$J~0d!K&hjG)Y$1p~uhy^cif^d|-e2Tf`d))dv{^ebrD!v_%b zNH-?ZKWG5d(tU((wd!0o@T|CpE~s)rd5NA(XTi3(o%~7%Nc!X;=oglo1_{x~8{SBn}2|T~C@C`Lp!yB^=U87y{q(KeB z3uy2Sc;6&ENd6V0uaQ8Lg@RVXf=kosT%1d44?Lo7lOfoSTj0S2WU2F!FL0*`z?y4P zR`-NZ+u7oT%AOjUofns7cJb$P*qye+$Q(mMX~y~K8S_Y~TwR`XNAV@1J8@zvw%&O? z11Tn7(MaD7`N6ff!<6`e5U*>4dcMaK`&%#3s87$;LILJBh4b=?z5oH!kxg_A^AFmI z*QLXQI8*{Ov6pe9&-8&68Ye~S#Imaiy&6s`4WCY7kPCc3JXs*itCD{bmiA?6bVaTz z-VJZe*=GJ;&k%Y0VIY9oQNlidQ1f?${fm0D9dChVgPmG}mAJGjf1;yx-GwN+t5OJ~ zoacux*u-_{T@gxobV-rQgRw}X_U{jnT$1;woXK`uL6*N|yLAEzSX?BKnOE<7+&$}u z15Ym#3eElewGt-z)(?|7M#O&bxhWJWLI0v14pMk}>;t+_3n~=dE=q}XxCAiZy97%= zLEVVl&*)IhqKmHURCbNE-ZHvTEDgQpChJj^$#!W@L3QO9U$TgIpLG1mX)DkB=#+8Q z?IsofDi~>kA$g^`y>ay!D%!?O1U1I=M^b*uu8`jrV0qSSHYsRw+JLR zeJj`7ds3p$j?K!=c^Pv^Q^Vjyf=uE1Pr?!nOO#^z4xRAPW~~?~^!$ohK_l|Z$u@N9 z`A^pVj#w9g1C*Owz8FCvD#SYa6l>tkN@|SryJMy30!61}IHB6tS~721JynU6!`EZ% zXwoS^LE%BrR`?=dSEA?~Vtk!|VN^Uv3>o}!3^vMT;gW=9FQiyZZwKwdxHp+OF1<&q z(FfW(Zb)fjH+VrJ&hsH^xD_D?IBpm31C2^Rt3$&*W9R1f5T4V%zbLZPa|HA3wR`*l z5?|n)bkq&=VDg66%EN8( z`-AHD1qcd|;GMjby{S--P*Ph)$}}loFl*~~`!NvBJFjqOw-ICHbp!YEO#r?lR}^cJ zV;}%$E@C>W!crAGM>!Y{?8Lq+rovgqgVC;m3q+8|OjJJaIV5*y^#)Bh>1nkL2b1db z!Bfc~CrzCGDyG^vc)e|8qHoQf0un`HN(>nwl%CW0^PbZb>W4mM??z3q2S-zT)7Sa{ z-{}>yK?@vJwaGwj2H0R(S zg}}&Q?^ujag4}v%|88Iu&}u7h_%r`hC!)XpntwJ#*nD6>+bi8lc|@ZX-#TK=7S(2r z0@o`d<(=Jq69`g2i7;i_S9EyKW_xZj5!^&A&j&_Suq5Rd$ws2){HXp_v(s=Ly|pZJoB?0l3e46x3(#-J9IpI$jf(@@|jhj zOrM*`@bFx>+I^KU_$Eo_eJodXhVITDN{*zi1t~d!5*fq4LYzLoMK;^E55hX6mc`gH zs-S)vRX$MHJRd&Q5(dpF`FEoyoDIqD+whIe2$%$hb`j#|9c@|*bG%{Aw_r=J**5<> zbut;Mm_@FnU+GTHT)?&pvf@zatE~1&GM?Gl#u$qM07-myie?*X_#Loi4#TH#{Ep1a z%C%!A25XXcS-o7zg&cWD4=*#oDO0Hz;7k)H6TlUuHba~l8~m=Jw3o=xmnqHhCI(yA zG3$sJEn|{B?rJk2M`BfZT z>E9{qlKn*ex()gXzP?p%jdu8nS8TK=r}f6gUv^1UwEu z#r*g&c$2XnQEoA(1olg=Tv>RJVwhg zKLKF4w^~X-99}M<4b=^Y3lbogcoM*Psf-P)?@op3Pd=h*s?2(~GU3+3<*H515-q7= z?5SH|h68^Z-SmCt5zPMuPCb<_8tS;tG`HZ~TX%e1Msr1Tz>JwK-+UJ_v9@0cT^c8n z5X_N6Ab2xdY%ChFDCDW|Heatvz!!phPGrf8l*^8@7x0)%%|7AG$SM#S>-w#lgrm=; zDcxN;feZzr-i*Y>wd`)z?iuGyh#QG=t?g#xxnhO%2+0IV$C6v(A@=HqtbM4Yz2%?xp-z(6%5rNn<{dF)o%#@tz=covR!|R$EsV)eL%z!K0$-1V8QS;^L;sP;?iIrpi0WE zbUkXkk-Umy2vE+v0XzlgW@+MDfMIttnPc8u=H6lsdcbMtAA>2);)O9chHELx@2LIrK|e9 z#B>ks-jKWuQz&HFZ|0mL_JiK<|ArQoj!bzDKdsFpHrHFk_*x> P4WwqbAxNZ&`o z$rC_IVa=UBGcf(Ufv0I^t@pZ}KSsdZ+>3NH`~CYel+e$lu<*79(Y)FOSG6KHDx18T zXq2&(pkJ!uls^O28IM<&5CC+%8%rPAXL%9fbdrBJ>ouQHK~Yt9%EWbVsa$%?mCLFT zUVm0zbT1RLaqEtTIJwje>}D%B`kYJXn{wJ*S}&sq-kz5F^{Py=X>+ZI+*nvA?$%sY zC||*{#sIOf4xrYq77JgaYg>n5-hu{KPiW8UFUS2QDS{bFv&}wDIh4YA2UrfH&=M6F z+C{Ywk%5_37?M$9rwusscOf9A_TWuH%n^~(BW>fj?Y8kkK7H&$w*oIY!;Y1L=KSm~ z87}&m-k7mivPpK(PFZT4fbE1wwL#?h{=jnf@<=M^w9+eFKz%8tT`f8N-b4ZO0Plm5 z3V7yo{9pC*|Gp2dXl&^0VD99O|KGR?bI>_DIT$+`n*CRI!hZlNea&Ja0eJrp4^jH- z|1hG3gaH2!Big?!pSl3t8}$3?OfR_6AV~7hT(OB|MGFoSx5C+|<0=Y1h~;H+E|i_J z8dr2Iedwo%Y)eDP+ot{lJCP4#g3@rhj34n< z?4)#$90=-M;rXw6EWpnXsKX(H;qh=33YQW*noB*Qx1pRVJ_mlzRhjQrF0;oen!a0%=fCyu7WDDm*E;$6XqN;~h$Jv}f!aOCLg#p=+lR&h7ZrY5QOrVa5y>fb>xd(XN1eCronEn-3SJ+f zJ_anU!P)!`P>`V}$w~*no^NTrmQNC=1&WVz(uldi6GJ5$mk?Rwc3g2zRzAhMz-i%< zytX@gf1jP#67~n6@?RrbzAsq)XSMCCHzUr`{J&e_;M9f(=uv;&kzUiRn1OSbAT?lB za@nkMO~0WNPPTX4v}7;Iy&M?&bs*H4sD9B>#4?ldGbwZEx;~JazU2F;*GD(Gl;;-} zAw(R@Bn8lRt4DnZp?hp|P)o0HZ}Z6G_&|GQd$9U)87oX^i%0$)wf^qQ@rCGlk)U8b zw^dGnG-C29Pt%X)ck6@&4!Ck98r`gWKAEfFlBIjd*dq7lHed@a*l3vt)x?KQ0BbhM z9B=C?u*Lh;11r6_9j;$Cjh>YBK5~>FRfV>dD93D&KKLuDQru2RzVe`Ue2=gPFmy z;2^a|4yJe|P9bYuKx-Cr0;VlwQl>|_@mzWs2ce@pMfofDSiUhMoywjGM+ynNCU{c- zKU@v3{8)Jkk-FB}n`B!Kf#0xQ{jp9sTU1zKB-41{{48~2It=7hgWr)A@(RhI+t#R7 zL|CX8g?!?%5;j(P8`r?~!?)jLM4ca+qWBRF&;q6hb`?2xuRX%Ps0r{75;8 zkO!&2O^~G(L`)&Et_{zB9B&^6!&XfY{MYPweVY%pO2O$J!n6ve4afu;cm>KYmO6)()Y5ub&><_N|>;Lp&f45}_?Qoxh zSp302z?@ISe))DN9Gk}A4x*|~Y28dcL|`Gm2VbpG7@AOg*-gV_VVFPMb|g>iVUK$p zCGN={#Vq2t$r}T)>q}Pk0uzyo!_9XW`6Lw7ZI}nV#29cfJc_8tUhUp@ag%dphs~Tx z{8|_^yyXGMEeFkwH{$mimf=ccJ|jhdyeb0-r_P^UTfdX2DZ&;$Rl^3_AKq>fo;>SP z{1DiL(!g2kmc$tosuZu*Bq^hFXQB#C+7}*m=oJKGv}Z#aE0=+anlh&xM{Mq3( zHY$_eh5@OA3*%C9D@v#L!uib9b=U_JTcPEsQy(Ck8}s; zxA5~ghv}h=?MAz>=mko7n351YcB9;Bn}@U?(-H)XWnOAiF7A!cv>H<|KpZ|H+K_xj zs?ipi5tw`)15%h+FSw+HNV#_6yj5{&b5Dw3MNXsIw6X^whF~+7asBAiKlo^MlCy<}N?sthXDnM)X=#dmcCH4@vF9b+`ps{s?P zzV>(01S|u^KJ|NhC0nV6>Da>5!@i{qlx0oCy{;&G;;=Y6242`Ll(QV~uQMJWz~2c4 zv}J&u$I3!6H!rBO3MF#%iq`lfwS2;4oa^9ix*n<)^IPQ|qESVdg!Ku92hOE`dLX1E z>P?D|EBay$KUIVZ`iHd`L{aXcdXTmha%XR5*>d~C7&4yv7coZy)AAmT7-06K%9sEP zn{z0LsiH@?!!9sxseWgm#DYJ7=FU{zeh8HWBtyF6H20e@0Y+@GGLl~~Ek8m&FXoK4M63mNFBM2~Ky28I>T>(p)EsLc@s5*b)*U_H z?tUpcvH$9UK)L{IDbfe`sc7Oq_}ePZD0$Z>W=qQqz#8(HFeaQ(-$=`w z3y)>}{!}gBz2x5Y0r?}%1|>KIx9A?f9-3uI;C12%tdngwIQ$_K+zp%ysF|p&SF!XM z(}Fw7@p~{ihpL~Mv~*z0l<^*bMd3sPj9HKw1F)(NND%*)U(K>W?brdrgWfmehzjb@n08@Ku7yRI@0S^1^!C1{?{vav;fcx>Kf(5-L zk+E9hNsVKF=3fUM0lSi92g<5%btp(~q|5`>mc<4o&_z@*OsVw)LtRQkU1LU+Im8>8 z7e)G5cctE`)%?E$?>nI|{nGC`mzo0CNgX9T%Avbg1jTI2&e9E7w94Ri`kkyrIETs9 zKzZ5r@57>H6>>9thkj^n5hSd^EU;1}+=sa<1>KQ(3Cx}J%vGXj_c!DYKqdQo3)b!~ z`;#>jR11IrzV>SY#{anfoWT3#8nxISsax^QWL+B|4SO*ye!s*q5JH^7n8(L6r^fxY z9vBsCTC95`LGpL+g%k}S<-$mY9MLw>y#lV!Zdf`Tu)N0F>>1bDEE&{u zU_vi~1rZ&i=K5+Sp5e@a8e<)sZ!k^w@^y^05yO(R!xb5fTSs+b)RMn!Ij6&vT7f#0 zTH8)bMy3L5fQkkx$TlH05Wo1Of50R7FD+P>b&Tb(>Ii761Kg*p=Qw1Jx(*<5lR~VF z3OphFNg+;8d5|dXIEIvczcRV)Dd|(l2vb45{?WjeUM(G)Hy}DkZ+@1l>L_%$a9|vUwEfAyg7dg=y6~Z9kZ3ji( z6fR#^y*aK#5 zv6K9^=z*~&(YA*^-l+Vk5y_QIzb%u-HnVI>uOq&BZ9VNW2zUwrft$K%zcBbQoimv) z_ucsO_jgOdxozN;xfOMtz1-)rMvd#cIxbI26|!7-i~ zhF!B?mCrnnWfM9l-{HZB_M}oP&0gR+q4@BZ%?ta>}9cPkMOoG!2fmKr+UBLoI z$Q2PCbpm`4(K?*=rO@8sDHr?wlKnV>=X_l#j{TM=*fTjVBrEp(PreFYefMh&%**q` zQ1Sb2GQ!sfQkt&2JJI%YT{7-Qo7o40X9JelO?lxR3)(Se!ze~-Jxv|O$$Ubp>c|X%5WCP=IRF7$H6pGN;IC|v ztVSgPMlF!jD!bSr5@Bo{)HgjHFUl2ThUTAg<8HvborL!!GtfFBbwngY9%0nud;?On zp)Ss8xMBu`kt@n81d?my<0A|M$o#9tFt*EF`&lgc{=L^~u1E&4Fj!BK2qn+nPa&uC zl*aR`=jjC}rnn`M1%gl`nQU;HBBTGz+JCi{sO$YFYk#jL@&3qHz_V{B{2ggEZBV{<0G>oE6>c>sgWaYKSe+K1x`cYHNZdT+9WpRPOHlWw{q7gJt7Bwh<9ML?I1OL;v9SO@(c+^G1xuY~A$@JL{I_hBUbI z9B@*hz?ytL@1=^}nz#{)JNm&*`sL~>Z9{AG2(s`ZVLlq)uYIGIabm>FP{xN={vQTC z8sSKlf6@ceW=oTp>HM%F4h;}MVHhG5TWZ?%iX=Ec%3!82e#^EtafQ-iF8A9lwurQ0 zUVT3F*SjqtkkYH3Mk9+rz`6r;IlD*HS{Z>hu@kWJjy+&Tb`U!;H2pVIm%&p#XR~;} zZYnNY9xaF}>Z_oo!}?NkZdz}Pi%bnvncXx8u>>7~CT$<2kJTIHB&Lhk7NMe2MwJa!(Z67jUB_2IH-| z97UZS(4;$x)m16EYjiT&?RbPHlpQ0rwUgID_Urf|?zF=a6#$gaw$n>j0L=i0orP>X zK4YEa)@*!*MVR7X9P|oVmgeqfz-k91X)-Y%J`BiGp>LN2~vhT0=i{Xjb53T!DG?KPwZIN~z(AHhflTokgRM!^nqcnYv z%vA)q*sw)-pR5&9=qHqSKt>nI@4x6v=hc|`9KbZJu+>ACmuF`FDHHK0L7o|rV<7k? zK>5}NE2iGaG~@MEK!3Eu{`uGbj^weyy(74#^Cmg#hbnWYHlG_$M*TGDJls?qWpps@ zLiwQt*%8&B=`_GVF`76aczC0A5U`g?UH_!luwPlL=_4-5C`%|{@hwZRUQ=3szbay0 zT0XWMQnwRK=1{$hLvr3l5pQ0;vO7J=Ba>aU(f6l$ae1^wS%35Ep4)+X7moQdLT%%l z&{(DK!;xLp`vD~BBQ{m;LEYFsi(=bZ3m^@%!G$!-JIy0+Rn4;}_Y&r0%u4>}&J5D(fZc7!mFU2#y5oquhm{F~9F zn$(pa99&`N2q30ZH&w`Ims_$(a96~2+}!uQFDJK|Y#kNTpa@ux1H!?U{9RL_@*RIS z_BnkNvMU-5g*yacCr9_`|EG^LJgY}-`0G)T1_V-V*Xw~+fWL0Gx z64UHPS9I(89y`M~m{1aA549ayVu&j;M zznzIpjS^y?`uOR-Rp7ev#%*@=)(d6xZlzioufE1@AU-~@*sLf>b8EP`pVmcyPte69 zQcxoH4FM7htou{}*YMO$;C%dZ0O76?T=-L1!}D16>s3hcsCHE)9Q+G4EowD?Q1f@B zSQPG4fnKP5PXl}8<1Cd_1ly6aeZ)IV|7Uj5h8I6Z22KJ&n8OO$MKgXR2)LYnwLrHA zdyg?*>P4ZwcnfJG9{=mQOeYZp>Tus8vdRpKW*kYq##P)pi_eVwSe<;~%pu5@Wv0p-QJL`iXXJI3+g=mUPS6{#A8h%+$iazr{|#Gy|goxD;6v- zaW>2Wt)RnS;?d0fz*XFeVcGGgl6x_(meX+gHCJ6x#&^MURqizeIIP6C3#sMp~h;}!qs75wOWY$=>a5Q(IgQ%#AnpCQQX{3 z592FU&tYZ|0tV|Vn-Qv%r#3;e^?M5Sg%T=SQ5lHi-hOn)FNOG!r+24wSBx4*X+z>< zu~s z^u+I;JX*Kvhy9hTj}zl-I2a1ds1GJ@!a4w*ORZSV(4P(f26mNa1!oAK8Ld+!_Am?nY z;*EPfJgZMXQI<)n6j#C$KY*QEsYX$@F zp+_Y+s{lD9K1$R#3!0eh{ov1djiB>q6gnJV^(FV~N-`xp>u%+>5xS=m370BpaY0N? zp|7@h4*1tx3w*LRCMi|yGP>QUaL3Tq23aCiG`9^2${u>-v>g+KXIlBK#4cxoVOdca zk&@6F&O^$z*-LEAZ5knM(sNX_$og#1*x<`mygTXH^Ay(4a)sl6UQlVz# ziLN8wZ{Kq?Qm(G2gs)1V!$LHCYu8gg30qKMTdRA2P01>frAJhsAvx(=)X+2|`j>P8 z*x_r}7!rW=zhPYf|Fwb)$p621& zNA*B|{igu^9Z`mZd&hFrK4a+mZ3luZ@MZoIJSIcLKeceywIx1-`5;MMJ&kl0E$9hx z*{9Pthu~0Ua9kBZOMJ)mK|)m}42H2`4RD$>E$-HEiQ#%##CI1t>UaFy2S{W#3fZ1| z8lpl!-4DyhZ52iE>*RxIo$y#m&e231BBeAk`$IQnl96AUL`*OC2{fa=C}nvS?|pRg zC!+8y^~Mp9l%tDU+xBGuk#&+>>ec9hu9GD>QqwGymt(K|{v--+kL(Uz%Gm)lm>KK+ zBFJwT!B~U9QoAaLXJGC6pnOHGC=WxI#M=`+7QD-#H!k5LZvz#0UQ^f_A_v=%v}27; zj^11`=0z-^pri}50;rMc?<1ql7<_miSBq04v%PKe95Hq@s*!XeV@BwH5pM3NKqd?h zp%b4m;!OS@L$07Hn&3^x0n0HWpTr^p;wTPnkEUG^lj;L41PolB2%V)5SZA@6jjhH* zw(m)zCFzny{xhCoYSJbl;TZB*Gd-})BYd_R)pmn?rl9J%HE5E$BvPK~NAza$Vw?oC7<;CNeOVfmWLHqS9{^VB*=lz}Ij3m1Nzbi?)By4)KdTR;5% zO$_uJF3>w?L-i?bKk(TQwP*BfL#UBlsckIZ(x(dDn%}lQeMa_$3{4Q^)-RDs86aJEia9$Ohaqk(K1a>QMBo!>i(x1OJe65w2dd8 z3b8wPYv}xaiV|!l7KEcy3|B)hQx5{B3wrqbh9xFBa;np(aInVe=P=$q?tKwT&OJ~H z3Y>G;_PQ0pz6H3QOn`%Tj>2zCvyn@`ToO9JN=f_)f7#V@JtLF-@S|*%!eY>#_v2X& z7b$Xs2IjL3}Z6M?q1Qi^>m)o5cyZ=9V<0K}!Mxk`f4O~LU(Y~Sr* z`PV7_t)G+q($A$g4BjHmVB-HI{XsLCHhPqotV9$pTAu<^LL|Wce$s^kTt2_I?YMl! zj{t3IH)b*Js9oC|%7HvS(c>1dj;X}#J`o&KXL0UTJ{>zWfG}q$gknSq1R5x=gQ}?D zuY`meu%=4{0FSAA!r7HBp(z1r4=W9~{3Bta?~MYe^&v11On(U1GJ~qR#(BRgdWe3V z$6Yr!qeQ|HL*7GzFt1+zgp2u^f!oLo`@9zYt^vlNcKrx+(B}S%8b_8~KuS>h{P4|g(jVGn| zS7%>C`grnN_Abw8+U6cY&NmRmi6Su!zOz^KjWL@u_3T4ILo%22dST2SQpTKfWd&RD z$o!%j_W)}3fP{$$f8MqBB~s{E>7%$CrA1U9O1`g-(5%40C>zIU>!h{G&l#0`^lVF& z>)V1NR?xa1ZqH}FQ~_~3!6H6#C>+)1s>CkQmjdO1BA{Q|i<@x1!G4ZwbyA{X$H1;j zADM0SknZ6)S1VpL3TivbDc<5lRIL3$w^|P<1qV)34~MR6L83439ilH61laN%W2W}4 zAR){1Kj`PSr|{Q5_nNc)NY$>lYS0rOEY4E@dXkL%H~P81Kdfd4_rYNzkqG|2a;ndK zXR^pYD{}@>6b+bElpPI`p<61%EkH)13iYv(VfU%t(X;I#Pw9@zZuO`$gSS;D7t+v> zQ#yvcv(}hC*z2wy4sYIQmWB8_Ku+4?iS?wm?_jZENV_USR>sc7FM(?ef%OKUOunf> z6Yj6Pg6Q?xyUIj` zSA?zFC0-}uORWSIN13X;l~3)eqGPDG!v-l4}!I6V_qYvz=fuSCk_gD zDKcXokm~$(L4=NK8hNWvM9n&O5^RZDVEPqIHY&IHxh9@`CLlItS5#sGV-FVb_uqH^ zr1v{Hj6@?&d-v>{EguB5QSMOZT6gcfI+Bu;8H-C~6a*q-_rPi9R}G%g!rBx-(;z-$ zC3z$taOz8X7#%C_Cgl!Uv2@SI>mJ{n1akWKAm-m1vu+UaP%Kko8FB@^%P-KmG97+_ zxcTmbP~J7yI#`Qw$?73S&V5j{X0x;fd*vEOhzJF^Ug!T)?(}~K)PF|Z{#)+!H=qI> z;Xc@~Z?YF6nI1mf0&!hgB~2;Y7^AEGv=k590g?l0XwOllegn%ee7=7AcsU-#MShKm z=9tU04Gl7)Nd-Ru(A#qeF)TJ*0ZHnN88t&U0-2>)?;}i%zgaais}~Fl&8qiQ#`V3l zno8=-F0Jl{qa~+X&FJxQ#3pM8%l>YboeYa@)yPV9qj&D@UY#~A%2ivy3A^SU`C zz34_QfrfxIjr5PhE3!v3`ZB3DjuIW_Jhn#Ip&MK?`=xC&+fknm%W0k;SkwW)O6<$Y z`qLmZ)Oh0EmnuWDS$qVlVCUCPZLOJW2_3PUYa3M$C;*?tetJ!-&Nt1JHO3$(LZ&aZ- zk&FQ@)Zw8@JAh6syfN4K1m)YLtr}q#D{RTSyeLT+zXi6!da5833O?7BrmfaY&&6x+ z54p|p8uWS+ff>3IJUOWp7ing#CDbr~_7Mw&hTB8%)qaade)5tOzB!|qjjZ0{|Ac`` z8Gy0SsJAeM?XswWW@2XdQiI+RodtOw!aM9c7XVnL&??H?{fH zRMvdJR#xTE&)1G7$mU@g6xiIH;t;Ho6)|^49+^)XO|;Yec3de4eqF=DC?5j&KL^x* zrZM^7096?Z_o-VM_(Fk}Nj=9z9hZI7?K{tULU`!>A-(>=F!R&38IBJ-d7^_&W~f%M ze}{P!#KA9EB9164ou0hJ@TPvFQOX>z^UV<4#om)EpyKSk{_8>lK8;*5q z>=S$QdUlrEt|v-6>?qOh{jsC` zwd}=YIpU+07S_2q`KQ3l5h%;@MKXmS@1YOM2dFkAmty7 z)#6$|>`>z0_E4eZL2@c6PJEwGAlPdk3A2QP!>Si%Ub%n4k^`9JyYpdx3)L+RX(3k^ zJEN@^`?r4X|D|m1zbtjK|0CJl-vy6c1MVHyAevqwu~jgMYday`xj=4>qz(XQUF{Nw z=LeB%7Ej~vH8&HPD;3Aj#f%@ULN%I==ZyO>Q$_5sKcosk+ZHjKm`{pL8yTvr{7+5PwiA)9FP=O(%!}ZFJU!oh?_JYOOmYBHlLkbc4dFkbcnHD zE8n}&I!_p#y}Epg(|3|@dPOX>ZY2?7KSI$JJs3vg!H)OAC<_lk$yGcvE6Zg!#aw8Z zX*mf3+pbIFOF?}T6bCH^l}X~lqNr(2Zfb{AJQiW_nk`Tw2xLn!8vkA(Rp!DhOjI8@ z0f^?`BQ{iuzrnVRx)l-I@8bPCCIwv)tW>5$m5$}tGiZA$D15;EZz2lKWg(g7$8P)v9y%uCveUZ1ZWh-sTvikMHjtTTTSC1{1dZ2P>6oKfm9b=;h9Yo2k&pYs1F+ zX&qL$H1>{@?O1SXMcWCwL@QLO*6;LMlOnAN6O(aMdd5QM-o`palQ{%1T@V8OBTi-L z6H+196(|!2!0T&Tz-oFH$;(uK&DhgI8g|O*Nd?}s#Y_FsJ|ZGE96%O+`gs$lz3Yo# zbmag_5$yMdPn{wdski2&6I#baze6KahJF;7Qr)sr1d*3WBrO~cqD(Z35P&KTyg0%_ zuK`dkMSW-`2rGKL5gyMOyD`xEgq-St|2x=Rw)6iRHn+eF_vJ})gqh!aMU68+^WDje zy)O>LT$Sx6*-EmU3pHw2*{I-*5`(?O&6+Hs_~waS5t%+EQbNMHI`W2%OZ5ppSE@p_ z`2sb}(pDjcQb$Xfi-Dqkp7`=Z1=tHR#jgrw)*7|e2V>Xn<4E` zX~ThcoHe=tLhzd%n#lW|)!@`Y45mgNi^SKFi_2%J-Ray9_aHAq$=^hHeJDEPx$oUf z^bKC!ONt6AmD&^kcG2jMBM9+h%@k~xBZe&vAIxa5vmp+yUgd8|9RQWZjB8M)bu&5bck zUicVQY>2S#@^K?&9J;#Fd=q=WO^77dAniVWN`U>{wf<*&uYX{3VSkxMf3P{AAh8UX?#)GSqF$ zXIe7QLT;55OMGZFL=FM!(&I2;Q<@Kl5pdyS+m{1QShX<}Sq!9maVvD`?cvYJ+?}Lz z(Nos3b|Kiu27**iLJpW0wbQj!G38@te7)ICzUQegaOYE%MS#mvibY?l>g27TL#5QWL^nQ8^b(?&I-Eni}f zx!i>N*><3%PCq1f6WcIJL^jn;%;e3CQau1zT>3Y~7h#Iy=Hc1InE+qzEZRT3h#IWZ342+7!&8cy%G${IpV(a2U-I~a z&6#b$14JPhh$jr+5Ipk3sH$lr2uXg88FuZvVK_6ae&J7f;{DKT5|Ahg@VuOLj}2TS zq1Zd}b1Fhqkg3EchCT#ggDW@@Yk;#L^b!anJWt{*;ez*%1C~~a$#i)slUguMnc&ZBMyh*@~6%BHa zRNBBMnQ5@9--*PW12buP_5dBu@g&FupqfLG%N++(S9vu>0BQ#p9?AgmI}p<1LPV9R zf6np;gW1G(Vt=*=fvDEqgX%U^9jrv^i=$qi&a+DKU7R|*F5&Fyf&N6;No>59%6I+O zXLOBy>Yt42_gOvGkSerF`%PgnVE{F7bdIu)su(8&@lZQV_Q9~7JeV9G_K3Tu`~t$h z((>%%g$$Om{RMkY!F~DG}f1ELUHFoLwo$h?mD=o_`b-ZfW6VN;G>-|rynh*j{_sefb8xg2jKia zxTrw+zvS@;n6&$pBFa}AS zIFr{Y2|ovc&49NQ9(<`<^gf8oE;X=m2HKh_-ohBp4oBiECm2ujsaz2%N@(?-$}XCx6V)XQHI|~ch#nR3vNf7{fq@u=b?oH zAj^=djv|MOTeSo^HMunQo)7eCMFRZT)99N5q>D-k*bI8g^zmtzxQ^G`w;2i}RXvAP zh3rR34Kb?))>q(5KUZZ$t{3W>QbK0km+X#KaF}35mdK5f?&p;1?I(=jpq-d82TI{e zXW92%%AA(`7m-h*^{*05J8oFw@dWrQeo`C(QHDAo(H8LV67Wfq@sC^d;hh<(CGVZt zx4QTfFX&YW#-;oIkaJJ$bdVu+?PMN2s4?cLulmdq2SaK5pz*^oF#Og#{do4nasbr_ zj=jUgj&<*U$RkMoFM0gI=5m4I0lMLvY9>YKycE@cR#q-GgtQKXXe6@_^Ex~fE5;kH z2nMfSnSoaC1l!_&w7y%qFLCqB_HB&u&4DfU=Vz%SM1#zT^U7iWOysqMYtp3rh!!1G z_AIUq*Ur`tlaut_Hy5k(i4$|%nac}RyS0}R4LopE#A+8CFL6-MFo?T?8H%^qZ$)N(=5$oE?hnn>alSQhroL& zXmy$8Dq{dR%)gpurb47jVSLiKpBf(7G6dy1Yj3fM^V$!-*(IakGL zh^*X0(o-z}%*zALRFp8saD|2tNnT|Xg`y6AL>YEY|*r0UUlibmt?_m(8cSnw~^{MQoz zcz}RwbT1_D?Xhz`d=zkzQ!&gJjwLrjHUPIlic^&IM+X)a*xs43>pga>Tmsv{Az1C2?dX%_~_#Bk6HBApZOb7w}@e6pI^?GHO_|pMVM#W5i z!Y`T~tR2;jnPWui^Pjs;DN3O69+NB^L(mAqXhJknP{CtX$FX~u|ni&wYq8=kB*W{!11q4OX z9S>mh9^SvlZ;bO+qGS<4Ka5t?-*C-|3enm#@U^)c|t4sdu(!>J%h^1Nr4X+9>kHp;nz$O z4Cpzx^OV$z^691K}KT4S6q9&K0JubP?EOpqD0bNuzXA3NXUJOC*k^*b-3&zO||N zcA%{~B|ge_K;NTq4FjO0oru#x1yMoAy0Y_=S+$tTRM_zU`frt|Q3UVTS zUE(o+nQJQOU>te*ag*Ji#Z?evo)xrEYLeU%OUI360hY@GGZuyvCLMv>FZ{gcX#bix zp5Wj7+G);6D_g>I(auNo*t)EItX^*07o7dRQb~c(_>coU`({CCeODJ4| z#B~&f(tFup>L`vvZ@7aB#Vjb6RmkAhj)(g1AiDqPKKz5|X1)S^avQVSRZ2GSd-7mzy#QF!(J>CHSWZtHpUyFCqZ>J;W)2hg9{2LE3XLWFxV_w$i&<< z3+<))?GRZ7$rQ+oP=`;%`l!RL!{GxICT}XKtO*4Iuw(BddcPWFuodd2jXVNyhgCzB zH5S*>oit4?JoD$4cJaD>47|Hczc1SVo|e2y;cyMoX_~=-`_(-|?o^!MHMs(hFwY^` zrdo}v%PlTuPrNT(Nn&+H`1h~&uMk}gy}!QNA4C__AN-REK;w)^y?T-wW2p2qu#86) zYU{(+qi&NO{LY|md_1w;S=#!nkq18`GfkluDx&Sj3f#u|*AkjqY=$^J2l!D~|7i^e zA{!gT6jCS>42exD+LQhP=@hXqRJKMHB^)>u$8f6vsgRC&S}0nqJ^IY$>Q!{mf?gvX z8B>N%%H!F7B=w1g3dg?riJay8)gXq?h{`J-VAiC=`;9tV`G{G<3 zoOv`Z!W3UzClhPZ#UcjV`$`dlDLOB()TW?(C@{&$+#oaV6-e(4sB(AEmf|4PGhG7W z;(D-m114I7ay2{pP+njDhTSAOQv3wt^9x?-lQu`^CQ1Gfx28N3akt0(9pW{q>|QHz z1`$qCUcY0GJ$jhj`7wiHk`jTWyZR2^XZrnZUiSR0tBPzaVlR50df$=t2LW|E25LeO z9wXgz6NFVgLXmRV)~u6l5}m@L`8qMjrwSh&gRX+q*~<7%@dJ6x6G3hPacdyjfQf8H zlswHo4>s4qB|qFEQ^B~Y&0gn0T8=97pm8(C&Jk|+N(89&E2nvH2h#t(oct>WwBzxw zul9!lg?57n$bzc4x`x7qpSBez(lla0zvh7U_e@U?7VJ@+m3Y)7zW=2J!k>_8mlEH0 z@jB}$SzL~c6*N5vk&3vFCgfT;;uKpEbn1fK_ zc9oKmgmEquPNx`+$1T-jIm{pY!wL1AT)R2+(tgj{S&&a%P?jzuFQk)3d9N?^7u;D@ z>1p|GToXA@a)C9=hG~!E`V4G+jaS?5&ysWxwGnIh`wagJyzF>~@6f3c%N7(x{+Z$o zwfC<j81yt9%T;do=@2FDt z88VRFDvy3KQaD(=u#t_ut-jcMq+5^F+Y{>J%1BIuFmE#Yt_kK$?d6z+>j*0&Xx9%r zU9%?#Dwg8492$H;bfb5P{ERCP^Lu;M5J6V$fCDUPSe_~}$jq5$k#KEz8tFqBQ*ij1 zTc%KX*DaGDDs)h7M`~VGgsQ_GQ?bpTADO#N|LtwrqtLfM>N#eY=1BGvr4Hw=d0+`_ zIRa%6s6CkJP*fuQ2AcZVpo=1ghTq-v&E}U?%7nC+^AYc9if2wl`%fVCI;1I5d9v*y zM#8_lY5$4=RXzVpHGde;C`Y(2Z!JaC7k^*aK?d`NCt8{!(hAXQU9K0(G(HQLYoJAC zZ+&yYN}Ra3)w=7CVZx8c;h|5fw7&HVlR}*;#uA_?o4WZ*b3;eYxHO397CIkfNG;2J zXpYynt8Nivipw+}RnT8d?Oa?H%SNfc^wPY>tR;FOd{4M}MBUbf3ee4Mj&VX(WpeiJ zz)5JlinGg7O-%aA&msNp{Rv0j%}7LVu2+~m4zlp2>&&IVXN6cz@J!m9J0ViAo)Z8^ zmytKEWO?#crxhATcJ5qr==;_&zc4e?=EF&xi=`>R*M|^esr>1pem3N!w_@HT(n7NueoQEYp0F9{c`vv>GT$fAv9fg z;9v(KNces#;$-Q^F9h_;AXOrPo}UfWmLCOMva%&j(`$2R2rYJui$s!qjIfMi%%uxU zadr8@SkY6>V!AKop6k+q!xzE_`a*LPWiE6cPy>Nvx5Kg(Rqb%ivfCB;A!La*~07`Kn0=Ys~%tXud7Hoo9 ziWxe*$b{B_A>17>{htizU--=!@Gja&#Q2)tG{lW=4QURZo2~2fak+hwqaZmtG44<4!5z;ZND%!(_yTfb`o~(>kUQB}{=5||r)!hz^6p2L?2ffd+Zs3pS+lR> z4LqwpPTsG~M~T491serhaLZW9wccbV&f9m794%K|W4sa&8W4hOx8UXbtcFt1Ec!C` z1}e2mu{C*+Ki3!owVSBuPknXYE4d)qVC%`RMxAkQ;sdcld}Q%Cf4-wymr<<2;c5{fu%#qbbGC4!JK<)R6PF& zxg+z}59rSmhby=*skr(Vxu_`0Gij6r_;-rYq~z<4H|z>}M9dTg=*V!{#F1)s3t6Nc z-R#b;Wor>TNLA?vyz^D{_@bN!>fAE^dXVf}r&osCT6THo7BVlP4F7FM0XsRlFDs3P zPZDK4igoo6a^25PXs~1<@{6;Mr_DQofz68#8|FPP@Op!80NN1Ailv45Yy%nH;D$J4 zB$2AbH!z!nI{jQr-j2e#LUftJ=c;qb;$uv(CU}G1li+oBkxG5Z062xt(gR=^nR*#y%P; zDegqhRNUS4K5RADxxL9=ks2Zf@;3{#9fB%@L2+uwO*Ux6v&d^yPsrndk4U|g-#43m zQ8GAX-LCbMg73N;8@1juFZ~Dw%?f~z6ek3;l}VJ}P-eH#?CG5p^fU41U0||(jGeUB zex$1T9us?THzU;b_oHixsnrA6VO~ju>(J0KSnrP67>d(**N$|YqI(Skh9r4ZM%L6*mEz*g~|*=IH=GA-;z@p@x*JE?#6Z6KOcS2kQ(uz8PLau!ynOr(%J zPkEKpSA*ZBo1mOGul!-MLXl`OG#arD`CgOJf&MmlGXpKTDe2^Ikj0yQSMUqfsGF)P-uW>%d1vQZxIHp3Lugy37|zb?=-wT7(orfH|KA+ji-{ zif|KNqHxv?rC7WbmNd``are;*!ybS~YxMNmkRx5fo| z&4*_|$8e#u=iBO96X$&8TLH4JQr`ro*9yU$*gOaB77!D}a>P_j=l2Rm-d7;u7?K%}%rJ92k46DiB5{zpJCV~4V0LDxhCxx}$vNC(b z8BPa(u|e3FQx12@*HEJ_ewyBeO(>eBL&r(&!_&W{H^g}?)pe^t_Bx-LwXGLx4_g2m zhFQ6kJKA3MZt#pNVU4f$p!$2+sqMRu5;;0?6G~163Te^Y*ya%|`2>4LdM5nx5Bb3_ zbo$Fusl88^y2!pojEQeH)*(N7Mv=%y+m?!ur(Q zH~XGlFz*<|#VIklUHH)1A`>SW1NSOLNQw1ghPwOFw&6vWY{!!@)580RJyovUZ5aM* zx@l~{xbnygQG+@o9b5}e!%k;Z_=Eu8^GzCNO1RdlXqpam-(TaCP@ND;3M2T&OjRf* z9{{is^xvS!_0lAPPgSq2$VflJJZzOMC#txCB8}3{Iuin1@|+y4#D2cys-?D4Bgd=i zimDav;(Yd`f+^4lGG0(MYu*Yb2b|O870yj`U>rDx2L#{lBfp|0Ut93cVsj0H~8 zlYU&O&R0pSh513gE*pP~@45{2cDL*#EO7%?lCa3+NhT7U?uith98FIQvle|2q3D0a z^M3*TH803t5KiAQC%pM4#KrF>%kHOv)ve|=}82@a1ln(`cHQOql+KlyCd_U+b+HCHUi(N6P0`=7hFL}VPin=4D zuu8C8%V7~hYk8o&>f`$WpBznT)kOZv?cN4eF${tV=#r~?f`OTmOMLr=?)+oI>v!{e zWfhMgICBm^39j>zXx?}CN&*bUT<{cyf-DAU1Dv-<($<0uF}wx8h2#Jxdarl0qrJ7K zWym(^VUks+1it=y!}Cuc3KPbRz9cg!@?w5#fGtp6Vh6s{^~PpG&;N1P{L1^+-sMlD zOdLBrz}{xufYh=1OA6|4$MV(U>LG2nUPs|mNd(6WQ;pQ%{HK*x)ov=IFX(Zm!X(${ z;cWcP%XMPXwLv&c38()!@z|Kwo4Uc99mf7|e^&I2a0H22r4M5wzODB{{1B{c&E>I! zN>4iT4Xgz{0r9*BFkgT)F|GP^%8HC{$> zAlICjgAalQ2OTwrb}P}XBc)W#Sg_#rBNQJ3qPjCqskTX@|DY+cdDsa=uNaZXMSq|jbK_4UoB)ELq^CbWaYWw>r*StK}d zC;b9S6KUKw2;HqO_GoMkPfZz(^cU8O^iZy0_7xdD2HB9zMZU_O7ji;^^8SoNg#*i; z9!QuPiU}*6@UH|Pz(cOmE^QMv*KupjLgBKZo$apZKILQRcFeaLtRXCFRhl0H!xl0H z#X7ZJDE>EK{7LU+Dz}EvA7$Qpf!4&F&j8`-sKmk16dHLl=ie=?v&-~G8HN#@Ze`uz z2-QkJY@L%e)Ye_fXMNPKXC~cIGq1yckR1{w$KI7@O;YAg{BER^IbZ-U3NQv2OZ&S# z$WY(@lEB)i(qX6XV-F0mv#o3Xn*>YLcEbEA@Dz&iY%- zaY*+!C>=g-Dqa88E^nY^B>0Wl>EG^^g^G5f)75-;QR@jM@l#f2=+uh9v}@EU*#z;L zX}u{i$s^req#VR-+3b*t9D1rMo;x7Iy76iyeE{5Gljxp&u1X|IVtz-XZLUSOL4@1M zH==meEU}3YXrsyF@a@_O1&5Dm3oDORLF>rtOk_ZN?M44h2%|G7By%eEKIU2_+UH*G zVPZj&o(voA2=d#cBCTo|T&mFH3cs|G#)Q+=fZ8laCAtW&l5-#(+{ZweokCVW@E8b$ zev(63Xt_=Fbsh8X7jwXuaLSdYK#)I+5&SR7YN^$(^;2^_5Zm@Xmmp8RDa-JwM>l4D zYuFma#`w~CN(p%_7I~?a1fsP;}?Bs z`Fz&LdiUS)@jCJUQq3P9FAWv$lX$ChsQ{+0Qu0!=f)^ni3a~T?9ds9Aw#;QPf5E7; z8-Y!KClov#zW04j$w!SqKcuAVkR_mtRuif$jF7*JRKt9^W=4_-1vgDFK+y52t7o8%NqMY<(W31%#-xl_fGjKQw9s{sifl3Fl)*x{XRHVEc4XprXMhX0@6$lR zUj)WANHEe&PV+{su|9I#S30*pU=0|j&oEy=I5muwc{SCxlt1ey-1eA%ti7C49TfT7 z6B17c;y_ql+8}a-8a0H}9Oam(#}DLUD!JBx>ZtU0lzoE~g#Zl5XxT_Os64(1Xq=|# z0cG`Vc%e+DN^-ZBF|_(Rq_1^>t)fr$^J7N5B1oRJ>HS%uBa`mViSwAc zuM(x>^c(`!&`dXvnEH3m{9lFFFTeh!nm^(7o4>oTt3+ZKOq-w)lWjt*le zb35vD@ORCSv5&5klo<0iy+As&U6cXmin^v%>NFu7tE1aU%YP_+`#c3KGXk?@AjM z%ZtMbGB+c!A%8m79a%lEK4b}yNocX%NU@Wyxf@k%EpOIun41i$eRP2Q>iPX>4%FOR z$%d8?WH>)*1jB(pTpnxVEF<>On#3II)3bRy_pc*_R{g=z?;}K=mOc9wbQoyls&=3e$wTGPR*Ewoie~%6x`hiWRPUE*?v9*(C2+zEH6^AgY2eD0a z*6}L8GLhyE^=Uz32V{N%1>sC%$b$AmYz+7_+>^Ed;s5M8Ejz*GbRTZqpmsQ> zS1ADm%Z@2Bsfb^gm3Jl{aX&CrTDLl1|d{Q)!G-^m<4& z6#?Oawz)(cLLJa}3xkb!m7FG;Bd3 z)X$5%fT28lFh=Ro6*3Q3mmLBrYX8lW_?MSGvtk1j%Ic3Wa;t^1a|mB&=Jk;(3bXiypf66UwTI3|GzOZ-#Im+mJbyDm#5BO8}uvv z+#MB{pg>ynhgScWF|t1|hg`vZvNvxZ!BL=_A1?obSc8IAs(i9qhV~H&JD`Sz30=to zoAB!+a@czIJ6D`=$@`AmQ0J9d!6%~S!whV`5U>FElTV40OD@xFu7KkMHA0b-RlkXWNzHYZOg{XK7=@tLphIqvlpP0Q=lbahuNpB-*RLBWHw1M8neFInTKa~9 zE96kzpQ_zD026-uF5ors-(?v|mc@$vbxOB%FgYAD^4$yp47QPj5oBq?JhlQv1m0LT zVy3>Qbip&uXF_)Oqc~v`rd)z}l=Gdy%!cdtFC&pnbi)G-X@V{A%*bCs|3E>>u8UuV-|4uQtZmUd;Eo|}~ zvhgrsTd$nqh(7EkOb={i>GuHuKVAxU6;&IBv0@ur(6a`22OEC~XcLK_bAiHiWts)# zmYcoJG2YwVK_phf*hqNXRu;kMUXn{g&zt*^Mk@K9PaSzM4`NoOhI^@^=025qDIK`_klooLiF{H{GxPxvd5|bz8|9 z_5$CuwaT=>Jl7~iRDS#gCp`}9XHII5&mVINU2ILIr z=wY0lfz!N;epC9a_cPp^zI0(vV7~^hN4F0a3Z*=*$Ko=-4RsjImp!6pa>|C0PbrIS zfgQHCb;OZt$)W}BzLM}pV{aN}mPV!!2E_g#9GUP&ObcU5BmB){KXw%T2;rLHT)9DM zbXOLL#=H^05Ss9Qv;@j?;UPgC_`T0_13u;1YiP+#iu{051J1Ig*!S$+Lori=ULzpV z9KbT@dofRHMmqf^)uV~LPF)b)wqdA;uB!g;3o6q8`vrWtFTY_nBD}_Ql+8EfLc9M% zP{;moAJ~lcLAzffBgv~OU9{SO4@Jd7fUP#_CRPFgFtBO+Lp=2WjM53FZMp75!|ofT zJ#5_v;BgtWOwOdbV}+rfl?fZ(xD+dTz|2i}vwbQd{Q+@`^$+7= zcsJSlUi-Wlo_6%F4GGtX;~P@JO$3!(_JMW6sB$DB{ytk@>*CKtzzt|1rAiDF(Xcc! z;52k>q+y4rM$l$P2EBZGr)!i)-)C&8Jl~Z@nJN=l3fG{k;pwC^946p6Y-;AUe>Zl% zEW=`OEOgnPksaapU-Ym8JgYMPcC3YJo0|Cby-v$jXdP`~^es+?bD(gsf4S4u!k0l%K~%U1!z^GvT6_dB~j<~V@>qR5YP13FJW^lFS% zl?|@ED2f@LA%Xp@vmh8T$sjBmpDI2ToPa&IL=%$*32srFHiR$QtQtS3*XtfxGW+)h zH6m*@bUYGMkmbn%)W!eFv~06jZ+cLvlb_ zDPJ^U9k+76SKq)Vp6Nsp0(zY%`*b!hDAF-ri4ZCU++X!WY+w$!4K1SRk@ZJ`usZzS zt3s++^s`I^*XDpIDAvgl%06oT6>F`DbjX!M`6IIL7FzxaVDi_P7$++OJJPTHa4pJP zHSbNwQ>NuIjTq6B*VjtZffhC68c^WrdJe8ew`TU)r4^5dfK^bZcJUTA>KIZf?-2aL zCEs(F>R#@kJm|$tCRF?q8v~5pX>CWV%x9nz<*8|BfeJ`YrFUq3KIk4{l3})4oT&L# zZiZAd)r!qzTQ)<>5EcGg-d2Vvwa`5_jrz7SIH^U1@3>1lhNzY>8XfVA#nL8S_oUVZCayl{}FG)wfm zYGQ7Sko-Wz(X;)emsU;2ht6-N7=*x%sj`!Y%<_~W45(TrIKw8VO>r5H(Kc4erwvclfsL?D$kbDSPa}>|)4ybcAKMrI2 z##Iqrqk+zahsQtk?7WNGCH5&a_ znVRT6`+;HSr@)cDGiApSpd01<=*&KPjXBl$oY9r|2>gEKIRv3|Ev4lO;UV^1y~W+T zyxZ6bv|Ii7$zAYfCBSMTYw!-pUL|MbuPZ4YHb=Gc_ATFCa((zh4nCI#WvWXH#%unhNys*&(b6X6|m%{iP$+lhIJD;kloh3h%< zC)P^uKepZmJAwsk2y#>Q1X zfRuRzwO35+mj-9S@BSoC)F<9zGl=QWJmnlZWd7vPH{x1CqxhX6y1ClJCFobK7d*>*d3`DA%-C@6?5pN0B9;|p@m|{2}s~d<~?cX9h8vgjJi;xM* zTW~DWqKLa~6Va=rlX8#FzCie3Hp(MBYzsx#4WQ5{0#unKtD=8o*oa&?8T=&!{^Hvn zMdC|C1=fLSM_pfPMnU77QiH76Ght{7laEpU`(Z^CE-pRdb)Hp_{sM%hfP^M?Z?L18jVgH+Q|j9nh^z3(l3`j%Ev?>KSUi2!6gTSyBY~ zP)4+Hwr29H4u|V=K94%DJ+ORCR{ZADvPD5Z4U4OAZM6sXJC918!Cx#LgpUP#+nt(g zwjt7#G$y37`jW2Pjw?y-L;eoIdE2;{5}c`aZ5s12L?))laWGyM;wL8t33}+8$ASj>kFU^m*aohCS5n15Gu6%T0g!`{p&vTx zAYms`P`_jgzEm02pjBbwum8w>{+n)MnH}1}4 zCE;i9^-2VT;W$4L6{WM~@X8ypxc8S&5O6gd;@m&l$6ODAdNymh< zP2P+DZrJ^E_(7}wY~wHMh;1B0BJ2)bF&r2CLh#|qc7!j^vTO?L;jt>yzL@w0fX12sYQg4R7iNYE@i#l&mFvy z2mkjee?HN}W}*a6L$l+1Xk(csOIuRDoc1PIj9}oV;Q|bubRG(bnse1*Uf?;0gwjfk z0TPBc0pRA6T_qm^v)LuSfUTMY=bl4cY*g)Y zeg`@ZLP=OQsQu0;SEkn+^_Qd@Yq3K%tIs!vWXr?zgclc(sPWF{W|pKCNH+|e+BpPB z%yEd%IbHc$uS!4>S=SyZz`xqwDtkp z0CNE^Iw`c?-{()>!p^ayIf_mcG9p9=OWlxFH(i^-tg`@fP1o4SyVqG}QQj@ZhNih{ zd%zKgayyYSPp9;?JGpvvGdg1kB%?DzkkB=Zx2NrJ_^!QQ+_Vn)FZp^I`5)^Hj@#{B zEG=uAPy5q)Ce_m0;Z-@l|6Mizyzxz|{`?=R`O`(yKnV{JwAnBC(U&q?lQ8LxxhW0L zv3K{o6|cNb5fi~($hV}>C?xnzYg<#iCju98mj^P%#BC{XHqR4MA>A#0mo76f`SKyk z?`e8-m`K$L6&1;upYi?nrdr&Z-xKq++;;Y05-eb|Zd#rwe8V~a1zqe&zARF7+0Yt)=?(9L8(OWZWGbkC_2 z;;$@#v+?yY2065Rz=+h10ySeI^aSm@;gt+Ss<NDAgr*hV;T7KnmH7+w}SWk3|oeYYfQU*$bk-MUjNkx$3;cPZA9v2(A-ai4KK z2sAp}v)CiVE@(B$lTa@tw{$Kj@OEy;7Eg?S$NT>^Gx8sadH>8Y_{*TWy!`85^JnkwPB7E`bmQR@QBoqE*bhqmheMs)h!Gv||S5}Wrn z4`JJGwAYgN@1HUKOH7aUbPkHrrC<{EO&=4tw+DSmqx!spdD-aYsY=u+!X60#Iptj|MLwslo&Y}6;;c>hF$IoBgXEdB z>v?>%umj@yvQa$_YZemx>Wk6mDd>}al6yL{m}5E}Z#qdRn-C}klEjXx3bAUe)z|$z zuqvcwkou*L2**+**!TFCdCE)?$M4sEK%s1cX)$p8X@{Nr9}ZsJ_uF`ZAnmXojPI$V z>NWUWa!YABKw2txc~TH_6s=~G8*HS;7Fo|4mD^#w3Vm<_iAf=7)HGl8-!8O?82K&C zg}41h2`#{z$0CspzRv&)_F}R`-i6z}xuP_X2*ODyVEN!RZJ#F3304NH(3uyl1}Dg4 z?cl7CQ-sSX}-*`WDM$mMB~_2^qvp1s8VKlNw{!|lGbcqCtdfVobEyi&Jl=sAC3h+$r&co5=l?j}`Hv_D zD1d+6V5HR$Vfp_S#ZcA*_hk;g)0En9vC3-ehAzqfqiaA2GTAU>LzI^ySc0Z~|0*)+ z6dh7BW#)eQn{3Ye6TkpP>7g#oy+Ufr(=touQ9OpuO@hE~Jc(_Oid>2dL2W2w=nwy< zP!wHM*+3P7K~E0e%QY8mW`CjGBzha=<|MwlPstotdJK*CpttXiYiLCKK&`GN_y_I; zY=97n*TSW9sXN*0Tb4_5%`zXIH2MXyUnK{P@b7t{ZC*Ldw!^*!UHg$}aBw;MR7diT z(2qRuW~&xa;H$SA9=@6Rpn2==%mrGTC={K;k--YUTGLYm^leo998^CK;bRuW5B;-( z5?I=>&&jR_wDaJlQ}@;h85Ktwk?!(1%3C;{CY6uVkZ9F=$ZiePnQRkD9geQp$=xb7 zm11(*rOaI?KpbQ`SQkh8RUz=fyuuX)azCN+R>R;dUX-6(GVyZ5`qTK5`yL$JjxvnI zd-nAex zHcBWuCqpI*B_ik1Oy5kUpkJn4BYQ)Nau$vGELb`6D8>=Fi9inHV;-BRo_i|WCcpd6 zc4dIZ$?$~RFfJFgTnWT$fIVU9%~MdwEQO-Af@%Jg(EjF;?Vone-q6=3Yf2$!?b~j~ zE7EN&x`*a`Q=s%02K=AbC)H5p41kTH0G0@|U?<#6CFdBb?O&@w7G&h6SX7N~l(6>U z=@Bd0yhD>)q()0IZphN#0~*EGSu@Ac1rE5V0(O$fK6E4|E}f4ta`wqeMLclZYDtHo zkV{RQ%2<}Tu7W3-Z~G&+i_C@+ix~kd?gA1uH^u0PBOhNn{Vu%CL#eJr4|EVqJWg4Z zNTu0vur&0+oZJt`98^&!ZZsF;Hz&y|>K{IWnJDZhm!FLX`{n{&@X zzqNhlyk%*sI=sN3FgJZsQQa0@bRviR64+FI`|wlgcPpDJHQ+y$2`H>+ zCt_%z_|Z{mZ?I@qoll}Su~;)=vnvv>F)nbEsc(8sJ5j>a2u+w|` zdYtl-{1i$e8>tfl86fwlncY|NNwP|!Tzt*gVsa-EKIaGs4)||gl^uTwtBj#N1c*Z+t0HaUFY2Y03Pc%Y5 zaX;Hb!Wl3!e$@MEF!tD}_^@f^OZnI9D<}!(y@3%J_*z>bj0*EkF7X|Y+tcXH2yY)| zn^FIbCXX%~I{d^8S#L~pp-9OxV-9J1fe?WeC6)-9jy_=&_srgS4BuYw^LATxi!VUs z7pRB}&H9?9!uw)HF?IZZ;gi3`fdA=$^|u)CpM%ujW59sI8&=}bI5x?DV1#p=$8&6N7MZ=ts9((K;N{WJt`2aYc6Z#?HTTyD#mbn{N zb~}p#j!GC+pW#EB*G!F{8!Y|Wh2F7yo~4FHMmAGnPCh^0 zIi4usp7?;rSO@AfFKeK1dfi&fytpcyPw|j=Ke|!&7eqMlhL3jDF_Dthk)6fsXf@~O z;S_X@7cwp<0xVs=oqqNzglig?ULEN89!X9fEEJQ_OL`1di_2BYv!RF(pTV8h1GxZVG@ZK_?%K31PS4> zAq2biSC-(9g?3Y$;dWZ%vPviv>w0F39OldCC>1z6iZh3hr*9GXAG3Srj2N*%7&hDP zVUnOK2nt-pV(Nr08WEdzC+q^TRUk;v)oXm=QS^{@Y(wKTO|vy$($$6mDX2k1*ulgI zJ_M=D^kIP-FuqZ*LP^#dq1g2%vPP$0^Is^tU3qp~I&{u6c#n3k@N28ib}H(}adwHX z3>quRI=Q2F%;^RC@n~G{G5gv;!Mzt2CVLc$>94Ut!^Jjkg;D?z$giQc_241nc}QjR z6KP2yXCd`5F2t4{uM_xlxn?T3;IkagAv1|dIOWUmafOK1}6 zx%Lwgo;_WWep>mMQjpQqF82mLEL~2qyMpsE$eF@rbF#fiMG8>nP`E0sgbiDa<8xI< zI8CMhXJ5^)@6_M(y{^`E4N3Anmxjf$Cg2VEwI~sv%Iq{)m<*N1ca-=Wc7);nmwtnGr%c)x= zVDF1{7T4>T$XO-Yp6FDscR~DWE~~sc!<~uaBxt4u75Db|K3m+Ae_t-$9I&usPpiA& zp(arsFu4x78yz~HWy*qoyTL0lWg_H*;Oh!tX`V5f(!3i=;{rjoYeb(of_8qwenn_W z^i$Utpmz{aBzq)$3&O==V%+*gE9AvTm}tVmC3Q*{fu)H;}U^&tf zK^nIKzwV&hH^~1h(#e$%AH8A5T5`--Uf=tx^LqPZ9oPq^HNJO5BtzN_^O6+(>5 z1^JnCp5H3-}45nUSnPh};~IOhcG?U2PEOMwtWtGv$(ogyL0+jbIIB)bNKVfR8utv(;0e4Z}ySR}3dZAfo9yML^I@ycN=i2-e6_W`4fy2B?UecA@C zlQ>mJJ9E4nBsh=tHPRaT@>nko4L<(&-^hSu7ffQ%#CseyQ<8tEziB*W7F^#)wk;1 z;|D@x$$1|mRuf~k^%h3nzZ)SWt#;GX*-~JMjx5|^EgVIl>F^sQL;A{o4PW3fQdz3t zCtH65n)ZK>h^pj(4&pIFT!#BZ24lK6Yx>x#Rxxl&1g*!4`zj)N(lT6s@3G@g1$Go{ z4l=@>$!EA8deyB~OlnZoZf332>dg}%vBG4 z7mbPG6TK04hxJq#T`)V~bl<-l@nBfAsWZ%vbL|@xOPCBm+vIX~8~dw^DdkB51Jm-{ zbGA`0DzI*3X@Ly4rfxMW;l!eAlzDT+?%@@Zq*V?C*9 zg&bm{wC?djMnE|WH6A9%$a)muXK2A`5Tm!r_#8A&-9AFt=l8e}EkaKaPJ zq*pFp3vt(7qwrLdKLo94JVb`bu+&%8RB0q#ZK09(ErHqP#agzS%? zi2dLI8uGp8loP&zd(eOn`?O>Cs1(OCCxDj~va5rB`T+UrvQ5FL7p83SIUe<-1j&L( zgxbT7GY9_OIk2!)IEsCEtHC@@Z=(zvpc4t%qG~jbqR=gc*Q_f0g6oyzgFbGa@+wq26wB{Sf#OyKLDXo3;+geFxHZ3jG*Jf&9sn zO=Hq;B7i~M-@+cljwda=j}7UilQ}Aab1G4|MFsIXkCJV>P3Uk+uLK0xLE9hk72wL} z-%g0GK&1ZiYtG774LN7FF)ZQE5iFNkTGtI3@ms|n1PFNru($B=ep$b_MJ*=##b7S4 zOBQ3D_MT`eZU*?7bGgisgeak`8CfU5vBS*QEp*%dH+ zk5V@_@Yk`=(x1Kxy~EA!zyfDKw7Hm2i}XEr73-*q)k@sUG`C^Em`wgXH*NM@QB@{I z^$T5Zac5IO`KM2*(^+6GJ6cUxCW(6#2bAPKS6nnFvW%o75PoPZQECB)uSe$Tba^g5 zsB(H4kT1YuYC7$NhLz=gcv0H?hu>rAN(k9-V=Wxd4gr)5oaM$u>LkAT_g(vaygQ~c_C#MuP_LcU~3$0RZ{QG&tvTFKWmpUwMAQoTl#HZMoHf<64y zBY=t{Z@a!a^O3S?^~MxT*ey3$jEa!%H{6~5+jVU94G(xJfZ*`J^Q!SYnD#vO8}L_S zCUapTHM^|3OV-LVm0u2~qV>6benzDlbwEOm2GnOJ4ALQR8FcZc(Sc4@d5q-86;ES> zGl$kV3z!}I-s;x)`eM{iDmzG7n*NV)iZVs<4j|{e!&f5tvbDxhK(w|bL%;5wCeAD0 zZb^ZYy34+ZxqQw;EX;~cdf~5w6?+%kV+| z-ehP7@7e`C_mO(wY6bh5^|Vmv%L!@nNb8@vrm}1j3%XjR$DI@dVYLn31R}$hYlK1y z{}{i`MkOw{K%Tjv)z376wUBC_SPr0p^--J2Gdxel)Y`m#L+v5)2DxxB3boBs?fBX^ zKmB9w$@US{JMN()gzIORwdvM-mU*#vp@}KCOKJksBS0N-zpo!ZbnF?qunxNT+F2ik zeiJwTzb~kNVEBIo<%R_Z5Qqf|k=-V56>vZB>Hu%j)Y#w32G%5p)N{<|6_I#LntYB4 zA-=M=_}2CvAIo*%p!bFuCK{q}L&j^+genw69_yk|ln>lCrHIBh>JF8JsF;{01)E-Z zReGw6Sc@<^J60IV&(c9rbCHV(Q)K03k8H}-W;xfk_#&C%#h@Xk2BfN82G7M!H}iJ# zoG#JRb)#?7H0q~Gu=Yc@yRI%m>9ACTg~fE|ZTN2EzG0)1yU3v5VG!u_%-(*! ztsv}HhXUi}F1fDsd-0VA(WJ(P{D#9+L(0X@SPNxo)jvluK(FrtqIE`-lob;F; zg%d|=^YBK99QV$e0yeY5>B^Ulx(Gt{g>N)bIk}2*V0j#Z_dm|!*&tvCkGKQlIs_mR zo1AyH7OMA=7AaRt6#0fvY{3o->4jbH+NPo;_tT7wgD>ZVYVoQfgBnOhhR6g%rXSx` zZYol$FLh(neqT|L2EqMpGqe6)%<|7I?;jZcA3^av!@hrmQvX<={dP-}A=$$*Q|9}& z1;SO2F`-X+ZIafkUB9CM79mwkc(_*r0{-d`)p~f+{qyiW$`Wb%X*1Y-b`ixw4L@JaWQFcr`pcHiVRIeLr{+uC4G^IYV=`i z&GUPORnzLmNxQm$bHC*R+sXQtmI;>@j>cFW(d=rmYECz9Z`FG}mNl2=wquna$o;MEbQwXI*y{&Z-U;t(n;axuWGDZz0Qs1Q*4e%M5Ij4=ek5ZM(1Kb@;t&B z6ytCoofcyfk4dA;y3bu{oZEZba6U$CpbC8I^4LkI55pap76`sf)TiSYzfN~InV_1n z3HEEowVC67GaXe)as#gO9R3JCs`~g|M4VurM%n)yR;^%U;AC&+=!W{a2z%fQ8^4?`npWPP>4qe1d}kt-4zU#|Jy6piwef0=4m6+8&izDQ{|06EX{r75Yw}DHY zqNc$x)o;BdZS!iq8j<~Pmqv>eAALilvc0|!GiS=nTQLT$zeM}me*cIiE@JB-TX(g< zQ)mCsXJ&1l9DaFc`Jkw_<;eU_GavZ{qJi}|_l-wuQz1YcZ&Pi+k58qH0q2YRC5z^h z{cJmZiO-LA{{;`1qCoJtco4dT^^IuT26x;8sXi|f+XmI_){CGGCUw(=uW7Y3m9@J{Qd8hKQ9zC0Dr$ljDeTBSkXa)nV7Yu_oMsA<<4vH6qMh2)owZR8 z8(wMgEiF|130qBMH2q(d2oQF|v+3id5N!aS@h8Wn1T6wCXxFnQwp!-J^ zud9Z9e{wg)5KFjTFHJHY6pe)7PJ@>9^tk@F%gtno%+d4N-UioQ>E*F-tLtA+I^*^v z<$6UGxcn8k(?L!k944i2woXUyt1~;8_Pn?E`bi=On_RBbav0LKj0NY~zifya-!EKm zJYD{dW&c;0Km2!?Kb-!1d~A~T8QZH$5o9J>QNCW(O(0+p~%(U z4sGRN!~HHvr9Pi~AtWN|+YoUf4wZ){dV(HM6JnJ@4Dfq?(#SD1Uf6X?_Mmdgvm`hX zf**nz7ce8d*cW{rM*++z&1Tu>D;3;+K|-|Ay5wn9?Iv%QM+aR~_~5m+-Un+O=o;$v z`l4YXnRBnWL@Kn@n%<3~yNY%u2*2bus~|4}lP12+gTTpuBqvh3I!-1;l*5@7K6(s0 z(r?#A^N7#+XPPPfIDvzbuJ%oNn_B8Q?0&ZvVXC+5UpYmw&{lCb{qP#OUBIT7uj0br zRk)M68b^>fS^~*SnzaS0lT9}et?z}S%(IGPHYBfJk-~VumIv5AS8~kS3)8f!<=hzS z?}g1uqhEm`0-9ICF`KlJ!k;sq&)8KULJ&aBczNvAY8p=y6HR0he(1M{s2U_anK)Os(eChfz)-V+NW66zw;M7VC|+OEI! zs3(Rv4V*weF7XW*=9+#@SGmW1Skx|pXy9EsM!B_++Y}dd1c29%%>mgMD%XxVo6Tpw z9xT%|y-b&2p$X4e3OvU1v%cmcEM02-%rSbIP!JSKL7llI>`g?>d1ed-rgSk=#kbBl z4!-U)C@}`M=!%=|d69s)mHAF0(Y~vElAEd`YvIF?c1arkSmtg02Zvw zT=Fsu7zl980PWAS5(x3^AK!V&K&*`MQ}@Lj<*5d-8gLjtw(Dv_7+Z!G^-GX@!07o` zjvUaPe2CKdA#NGMIKS%@5jWIW>Am_=IzBvn07 zA{d(0Gx(0ID#_B{l1M{!J{|@t^G2j4lDs{uiP-L!|HxPHB2(6UnOmZun zH=jx=$mLQG&S_Zbdi>w}+#hyR^!pHqME}gZmNH7;r93ih^ z3*S=oSv$I@EU^bx4b|FkD?)NZyzjTPYlfQ9fNRu>Bc8H`fm3dsbU4rh=dw90rqxyOtqi&u+b7L95om+B~Zw^Ys?NN)-MdVT4LSUF z?o%?HF7HiiZIF25M&S8=F& zP}1`@+G3CfSs>y@Mq|3lo zT62YcrNE-^Rcbcu9D@`^b*pL{+4DjCQ@YaYomYDC#pI5xT(ng9 z-})<^R=$HD7hE@UG&(=@b$@0GgFMhAE%c<3Aql2CIu?3Yn|vsTE8KD8oJMyManSM0fD9ur58p-<5E z#AHs->=1kv%`T z+rQ6Lx9I(jR}bGD9tacfORll$aI;Z8LO+twYC=PFodD!`4A2g8cZVosfI?Ldy zbRQQLhUEB8`bd$nkC1!Fs03S3463RU_R|G0yfBf%Gsz)S4Cd1~`7%fFATS>0xQmLh zRbWCjA*m|3*t~noXqHr>1K}==q^N9=ZmO-oBTYFzvCyBMyg4fq>DzzrHwo#H1d0e4 zi!HPGNWOmIWiCY%rGvJOUR&?9nTa}3h0C3< zR&5JDb3j|wHfNzhB%hj^`;|`FIJOFmz_>ytG1!w zIachzlO*LH>NimQ2ZHJOtFirgB3%Oe$>T%|(T-c#?%NpW4<8(miH?_fDfxG%9`?Q zix|9{Vk3NnxmE=3J7ZviM#7mU+jovQ27+fys|eteh9`1s3IsSN>4nivZhBke&-eQOI&2v*k z(?wJ0kptv!DAbP*zbArR=*fS@bD{`uV?s+~6szhQDf_fzUh2VlUU^_uDn{FdT2jvF zwNS6^GffE|M^NRo{+zi6tr$#%6?0Wx?GC_MjbK2N=cXo+j`1T{)QIVfXh813!P1N> zUl9(sRr_LwY0}89%ksckK8SCe(=}xkz1QlMd?N~-VBM~w?Xt)<$rqrCxDJGw!$PrW z@kDHw=-BcoJGSC#lWKsilbAF%Vva_zN6GZ7F&GO`4z*Swm-qpoKf3pQ0f=*G0XB?9 z5hSliuL%MS%1!Mqz?^0;?$UO5;U!B?p6Wc<6Ma=9G=n^w!{;r&I#RX|+i%v-Y|&NbPe@soHJ!ko5egKi_$T z$AQ>ESW;{=Gj=y?;EGme@1XrF>?bWkR!Ct5#`~<9Iy!>KMRBTGODT5RwGg~Vd75xp zKZh-2^}`D}q0WmKlYw=P)G!~+{2litwI)b=E)o|Yi=C*r+r{PGgR3G8gur$FPw4%O zBXOs_9;F}2H{FGiXf`ksxvTO+m4Fzy8uUQ)`uyV~Li?Q+j?|G@LqFkBT~iT|TH3u1 zXn20lBer&6b~36fi+|hUkonQ98pR6=6+VNJ*^f@f1{S_FC3J!bk1N+X$A;FYovly; zM(j`au;x4e6`g2vC4$_Ws%gzE-yOSYO*~26wsxcq4hP~U;zT_mK*)=gwV+2dA|?># zKNiVrl5gUpScnFFEG(ulm#BlFswR2<;YTIHxtQF&zQ03P3ruJJFR z-J3d z1e|Y*dXp$7#_WM&A%|tqoljELffpY@lwB+XmUk?RH_8I;H1<|UAWg&TaaD}u*D|uu z>P1x;ofqz1?9xSr+&uk%z&34QZBnm^9;o+N_=$G(n8bu}C0YK4A$GxX&RlLk3gsK}Kn?Km55;X> zISbNN%*7qbpPhp!QfJO{dm%nl2TAxD0(*`nh(V{N5pK9juTWdWCEm_YrKw&_U%dv(>dmOT%1lh-{-Ze(9 z;dQp~Sz1yGGKLAgBSh^BRCp!E_fU-lts2yH81-glu9EJmKs074w+g00c%`bu9@#66 zSF4A31F|v-AZWSd{Ghy7Bmi7FN3K}=Y4^suT7b!hhhcLu$k1JVq3s>ei&kbG3eToq zM{Ly@B-PBMYm;i{9go})_`lEe`ul~S|NBfYMG(Z_jyCsiN85@a*m^VOm9Gbh{c>aU zy4z(J8TEHd`YpSE%P#!$$rltHfNyzLd~Vlods^niS4-Rm+mtIPcY*UAp^-|f&fJbP zkq8rPbx#OK(C2e3n@msDR)!91+f-+yQ+ALhtt+UpACYnc@@X9b_`QW#9kWeG>`r^Vrr}RepvNaS*=hP@xgJK%8FH z8;{&cxIy<5Ec%BKNh1pjWS$_ZG!?6lq2gDtBxj9Y9B^dZv-bcDth#ju{UJc>Vs^ zslKMiEom$d)h&&x5bwljO5tPBQ-N1}c-Xe0;adaxl+LjT@uN;I)#?@~!b zf6&BKB$;t(lBpXgyyhB#-;JrQR-_4AFsjIdvJFb(wOReJNzhULY8gz{4 zN7~h+$g$^Ro@Y4I!yM_~WB)3ja~j=L8;B0GgT6E3Nzy~cgFOJ})KY0Lo>OJ;M;I1m z?1AhB>dBiUvQF;2YAbujHy*|rc|~-Wn$HxnuvW9N{6@P2BZgQnI_4tDP#H7XsbbhIB9RHwYn10oL&IS)3$V` z0yUI4{L#cPyZoQY<^GJ7pSi1>R?0(;l&laHk+el z;}$v}S(^Z3iawRf%d?1q0Tw!3Y(odNtOZ<4itcXv;&>R+_FV$C5fS&&d3Lvje-qyJ z?u^jub3^HS3`1;d2O}@YN<+`hiqtEWLgKJyMI(Cw$@Hq$>*o*BH6u*?&XA214>By` zTjf03JSUQY`1XB8`Q4NE{%*}=*EG()jq|}%+1E)i`FtYIDqW2ga39vU1EJ`4fO3vDju6dN8AuAM!Dzl!UWT0t)r^fOAmm*f{JKe|O$^?P8Y zN;x{=CRlaeDE zH5Yf!lm|%~!Ej_Es(LI&5`17yg!l#r*82(t^YoFje06W`cD@evAXA#^Kn?S*AS3j! zDf8-L)|%;O>!U!>G@^L>x-xWRxzXmxPo+{{Ve9ZtiO1pQAi&zEQD1!V^>H9U1N|*0 zKQKk7*W;JArDFzCf$!R~rNgrh046dozL}saV0!#E&8KYd_SofIc=3tPRgx>X3XK1A zcu%ih5Oy{uQOyPwcu`ISUcO4yVj3;G3iF$J9Z*f*lm_|MVUSS zGYXg)f4*mmE7)=);Y9`O+=k&kNWFVeh1ejGS;m^a^!6{ zOa4mkQ?Uu$CMEHv&zge}`J2ub^QnC9^7iA}p1tZOk!KV&4fSq-3OG=s8_^j77wW?x z4mzsmPJ1XV1nmR&`Fm(aGJ_E#48SzmdB_TjdfmlvsBTgftb{t*yz#5O%+tX42#%S{U*_3HR z7gGsuL~{A0CZ(*Rmdq72xN|!ij_=w8``Wiaw+Im`&B;I^)~z7y?)+)PA?Z2DlOq)X zymM#={%;Mq=TRAIysN9?b^RCR9=5BK*@auQ?>@*zVK(>tesG>1on>2j3UvI!z>+h} zh4EsY20uN^Mv@aI+WnSX!Sj)hNTaWOls2A8c8u9zd8P5U4QZ{Ry+t*cm}-8VowUIc zYnEyA_U#S2i4@*OD_HY`2SQsb78$Qio z0f+Kf<^&z}byeJkeV>!#>biUFx_`P)cuSi(a*-&)KKZ8b8q+NARC>AE31$3I$m((7WRgt>3TJa|MlU zi2uR&#JUO64J{e(-(Z>17s#xH1ps*|v|dFcuXJ-@R1BinVTR$wZ9}$YB_rKi`0QRo zwb6@DPFDz*Ud=C)z3a1CcmR{{DC*O$49sXYQiY`OBYd_OGgCZB$G^i2yH2-a&CRka zE}mU9L;huKq0Lv$5p27ge~;?k0LY8AZ|wKv>0n%r@%-6&e{}m zIK@5T)d?xg!LsKSzJ5f%*j|Z_H56!c>k=OOfSk`W25?&Qylxs!xUDd}7Mz^5cgxmp zSl@4{a0ffI_SqfeGg1d)@d}DvLzUqObG|jU;o&}?9%2-uI!#f9-oI}>bsmY|&w~^S z&=K^8x{>JZ91%G*&0P^rJxio}n-m@$WCW{gNAcRP0sF=rwqsQl7cX~I83nc;sso&S zTB^7yu28K_jZ4_R#!wwkW#a(p$Pn(e=#|!w2Rx$7(+dg~ta*P=L{*}X{Cjrfp9S>~ z*^xhjf*XSUz>NLQvCybC83fPxaRLx{gGlqrSb}U|2f%fe%cnra8pTou%9%wlbSxQ# zBR?Q@?=e9qt;UP#yH-*^wasIlTPKk+(t>>CDVAjn%`4fWn}UtRc#81nPAGYl zq&_51ZDWcM2>tOvD&>j$(rz+$nVddfE&h;IDzg60dyrW~E+|7u9HBY`GCYPzRv(Mx zQ%wp~ngbIVOaug(0Y!BrgE0>$*J6meX8Gy*vg9bg{PCa(Lw_w<-`V|bV2Q1K6-%!IHnE_E$IB{e_H`1}*$i;rf&4r(CxKGww zpW36N{*Z^xAqs2?lv#uDi85|`nhE&WCv%k5HoefDUt0#$fMBlkUej(-XRzmKOy-^l zDHms?awOx1nDiY@3iV!G29NaKbX2#h<>QJ_t&!92p`Y1!Ynxla|y zgNmoOsE_~w$cNjkoD)oL2Rowgd6)dI$>`u`XfZye?R3$#JaJ4{?5kX)7H2E00^qCr?`q2Hx+Nm^8!$nE=LoF2N-;QfQX?sJIEYI zvUmbICDLFoXopAl>p1PJP63<)0J-+EIu;goA`jk6WhvS^y^_>okg_fMYV-Kf1Z{DR zhYk(`KUi=TtwMN}e_v3vY4BZT#xr`f3Lq+f)&>7(p!<)YA^>5(9BH&^!ur1(HR+)U z4pM9SH2~?c1jiCn()GO^WqM*4@Jhg;jPljxAG~U{)I;5&3tRdk^sTU4Sq%i)JFO0P zcwhhWgb$ISuV2F+bx)CeIc#w^Q#zI9^lQA9A1@(fQkPX^{rP=` z2I-eZTOSF(Q5U*>II(l-+fxjOX6ZB~G>4BXcAPT&rPO&}^X3dzGan)G7%qm|<%&Bp z&t!I_3%qD0Bsi*YXL{`{-bNe9xuApKE||2*XASN(r@4=VY5zQD@)^;~8FG{hKy+IO ze)PGC<2p2xmq$WSuHIp7r*ha?T~Pvw6%`VT-jEEQ4}C0TT&R;Rw#ko0X?Zv4-$iTY z6{={%%8S1_eZD#u8&Q(DHke8XB)SwPV!fsJ8SA_TrBs-nghZ1%BIn6pnLN4P#gTjb z+&ip>NZ2`1*?|%k?%Tn$$%ILM3Uc2*#dC7cs3L!zyLobC2E7O0E|Khe<~N`1>@Fw0 zDa`GXT8jNpC=1oDDGMu$$wu^|B%1^BdB5=*iVu{TRZNR zNwahBFFTu_Yy2JS68U{M?h*p6<>(^&Fg2ezn*j6~Q^ldw><-XIA`+3QJaH+J9(ap9 z-XkJOSvtHKcFY(|M-BV$x@ckW6=y#E$n!dyz43*1`kT& zVs0&VmNCFPQD&)>lym*jmU28^1YeL-tC*HaO1ZzvkJFctD+pi+-G$bHb+phK;n?k| ztHrAh3t-mC3k>4uhO0`|o1-T%a8(^ijd31g2%|xh@1)SX7189H3EjmdkTA0snv}Yd z(GSmoRr%>9NP*H(2zW)t7JAu;NeiI*w6W6-E|__c(=evuh&fQdqAZFm9tYF#9{$U8ndZT>V>vAK33Y8{I9%r3u+RXBO^2$<^R|S|889JzgY{$ zjh(fB6Z2q#EW3`{l#Z zChiPUW}Dk1W;W^qsa*?hS{>2DyFC9rT~pS@3W0=^Hp6klC?jQ0_TF4|`rtFc~T27OBh{HX#BMa~~Ec=rIa$Nh$w zq(e<)2EZG1^XQW6BeWGjEsUU64{uLp}hxfvMfXxk)-XzkBO^Y`7LY2#?X zUBaP>qWghSOj7GAZ@0Lb_}mHnejf^|w-_84^X3B`qJ~^>v{Q4#tRe7(6s9&30$_BURP7Yd+O;_@iVa z6ktdCs`*!xtAK74g8{<6HmZpd^M+;}bQ}JvX{)M!z5Eizk;CRQN|RQmL6k8Co%(PN z($Fs2DxXQ`u}A|$@WFgWcgNc5uuH+9igf&l^{W=`{*hp|;?Bbd;lS-vJeUY`lVgtWVh=2JB z=M$>@<vOD>(H**+X&_qjqk+EqegzOtVbzEab&1w!z;&39{HWb)BK<%0bSeYpe; zqqJ~WrLTJ&wyr@S{D|hPh#BqHWHAvK;>UMlVGuia_p?aK@>6$za;@&0fcP z9*5}x{tI}@b~Y54l3>EPOx_E4^Do2`h9F5y^$Nb#oDlpuU{H3WQ?`dPQNw1hEi@gO zSo1b#I!d9N38P4771yanPse+i*`_@g^IzWz@BBy4_yqLon&RiDc8N{lO3@!c9pcV4kuQ3pI#--6J zx&dbAokUexf;P~E_*SvWUvP~#dJOu`uYLv(b`BRcde^HILYFw$%kQs!&+v$0sFn7e zHYxmiNm_iY0C*}W)@AOX;X5MV6j>Z&_TZ=3Ra_|)fh5ywfVJ}ig|I!7_#d%>e=0M7 ziw*oKCx4F(Tq+_W6!5^4-9lqoD)Nfxw>O;AK@i*AspMyo&xwLvTyIN`Pz&rP-~}pbTQfY0UWzSf3yx@L|9kzyOue*H4nGvl z`7}Jj_GAd?eaFln%N2#vX)$R-Sl8|QP$LGL!*Xlzg6Ef$3({vk-00FN8d_my!`hVZ zM2{%_yHM!|4pmH?(y+k>KYlgx?`A1b_4w zNx{M)noUt&@)6N=`%?<;1(xx=qN#Ig_#kL@k^G*)Et<-s$)uf0T{rmp#7f(9kCF?Q zq)!b%jsaX;xz)pl@LB7anIk>xA&kHY{@{V7S2hbu8@SPlMY*PU*0ed1MQM$^PDj|S zh<{tgF|vXu{T#y|{p2QR8e$|`WgQ$vdsi(fCn9IrhYOLvAl&)l^@S)1A_OCLYiZcSpON0zf{~J`)+kx(?!UV%c1%Y`X`B{84T4 zZCpac_jXaDhTnN7`8$pbwVCoOqKNmtBJOnH>b5e4$xGfd%1DDa+Q@dbK$nl0-~x>9 z?&jEKIe@LH1f&)hn}SUxmwNUTbI`_>ZPT4@Y>`$FH*!`n@$WehVMvronDFOxlc^w2 z`m{#{#ulx`c}q&oC(u|EXfogQty$o!WC29^#QGTTzjo3235Mcz>Mv-6x0|e*K|4vr=4Hi zG(N=|{r&7S?8Lkq!)s${Ua^4)`rJzb#`I zcPktgQ4d(p^y+cg^_GLbml`kAkR7Q^gr_&Ruar};m9uU`b3cVejsx=WeD!6$s_Dzc zFfrNUV^^~dN^t=*5jq61F8yiZD%S1G4>F2n4}vu6do#Aar!X*6XyPbpLO@nxO=DDY z$HnAMGI<4E{eDv^9{U(?WnwpK{OlnW9j*XZGCy|ff0^3Y-+nitc6tm^e)&BI5YHw9 z@OgYg+6W5#<|}7g{#Gs4t$}#4MZ^!+9?cR<$DJ~-t^{wA>ZKzty>2|!22N7+_||-k zCHf+LBDDkS57-82#SjXxwz4hFMQkz&cn{WnE1lVBOwsWGyO<4UL#r2>^2CfYvJL_;dLA8fu@Vf)zwoWlIs*>$rc8w%SX9rfA zv~C!O`%JG<_35k1C}@GGsQWz6xr6!{v4C-&U`)U`Q*{Vf=FRy8IK;W42j^sKu)ljO z!m2gj$;O;?t}A3-gUKG+w#0P;nhBouPEBO{iROmclNi~gCho%_vg@&^A-lXTBVwtU zw(A9-UJOk=X(2N>vYL~lSLKL9Iks4!LQkeqWyTq@y}u)PNbO@1_A;MN69kCX$#0K$MU(lH<% z(k-0>d^mtKQbS2dhqR=iG!oJ!ozjiOkWxc80uG48<(#`*>s#xr>-p~Az4p&%y?gIx zzwtw>^F=ua8J2?Urt)%phPlGg2B;fyEwQ-^aATV#QxHx33JCWhoHeJoCUus->xZ; z^Z>J?P?0@`?s->D8tqJWW^a9v-wlKV2isw_XR}ozZ?L|2b7yS@|A`dOidQyFTbW~@ zmZDzJR^C1}tg0KT6+${WfrZ~c`eIhV?XK+)2LNlnbM6Pjaf5QCp6|B)R zSg!sd6$x%&*{&e*k5UeT+=B$8=sw%07X7UnMJyPzyU}amM8jnmURn+~&nvXnyvDLhUC!sa^#&AdZZQgo*fiB?k z%gnaLOYv()iRr+5Z;g%?@J`SiVY$Iv5}7VI5kwy2DPOl+?rrE*;#E4EJ6d|OG>K7` z8pAO>>!tcd>gxv7S;LvzR3f1BSwU3v^Mauwm%l2~BzB~gKFS=)7KX*C*138!Rw)P4|V9H?c_&G4V zfMs?mTsuu7A`|*StZmrD8rh=RZ9)!7vT(Fe$r&V6BMZsCDsqk;D^Jb8YQjl7QpGA+ zwtD&whq$<;nJD5QdVUilc~+@NZu3##4N`UkWqK4umOe0J{_vrWZoIfjxI z;)m4MRGA%ZHd3NPZ+(jZANm8_clk+Bz@bzFN>G^P7o}6s*$_71?2~6Gw@*$23;tRu z?vF6TXG1H4MG2`NgF-K@$wZ0A!}~KhMwu%{Qr{u95ft%KSAMAv?IG$O^7w6FfEl5rH!q z2)9r;$`fmS3R_7`Z7*f0U2;reX32Gk)P`{hXV||+u3U(d7{#Q-lCOK8!#lnR+hS5n zpp5JRk6c<+a%g)qhb2=1TA(;=QV9`~a#9F8o(c~R*(tIy!yXvmUDXZ4ou zqmpX`Rh+&}^Yjt}=x!1Z5;-RV6MycL5ZBt~{`e445EYOyxYfRg--78YRTHt-RCmesXprbzEk(K7 z9v*+C(d*MK8zrM6lC@efIMp|gspv-A$)~wUo>e#IRH5egMYQijP~R#ilQt~G9y4WF zkfGE+&oEYGuw^LXk{zBVAAU$mcb!bobj~qvJwJ6Lf_tpgwp>Nh2yXD2^jpZbGM2|R z-wY)l47R0o3var(kMLQWB56N2ash&Qr2FBo*!EK|dZRIRiZMbdQ?UkO8s&ogRZIg# zswBoHs?cLWSZBOBf*@OfGu*0TplV0L4s85F2FC%k9mmKds!XI~$QR66+rd;TeaVb% zxDi3n!o$Z&3X|L09%pIj{Q}`t(ue}>7SD#Oa~4Vk(L?dBaWQ=PDBQ`#<+e!LUOt8b zlRpO{Vx(nFl1nQYUGUUmn_g1)>$%Q^K`TOPW50*ptJ3n|>A zCqq9_)pghKA))RAIsuyt3SPaS^lOt1CNe%rpfSCyO0dIps{No^?Y>Ot!|fN_EWyI2 zhy?LOffO!=OnKT@YDVl}-SKb{roG9aZtJs2b5`Y`fD&BtoNQ~Sy@2c|&kH1zvW*Ff zI&z5>A5N;w)Sh|I-9m16sOSV#d8@<|R2avBBtQGFIINpTJ)(d%%0~5dE)!EGOdl6GL-I!zhFyQi zNntH`IG9mJjGo;D@X>@x%$beL>YipNQjmvk z`k#O2#{IX_|No!M{y(o{ME}TT|0eX~T^V;RI3g6+X_esBGuJJi+5C{9KF*%RLif}j zW&G{l@2Mdy2~OL32E4~8YFp?edw;A;=H~gBvDv89;j}QtijTRZ9)MXUYcp;Y+(L9S zujPZB>?>&>x1ErOS|BpNuiEnvr|g)D1&tUW@a2~F7bfs^!F#VaI6#KN(S#3_nO7L+v+kRpmShdpjzsK0%sX}mx>Ck63m59f2IGE` z>r&e9z%k9V9)8D?lfNXx=|=t6Lung+^<;GyhPVSBru^uTw?XOxktEtOD%F!TUyFK< z631P_HKJ|DgbLX_zuh-+Mh_FQ9GYb+KG0ZNu+4uiH1+!QadKdI-evUT7{S$v-F=}! z%1Pi-EO?)R$N`mLo^!(2?2--qN@61~MY1l(v0t zD2R0A26S$2eKDokoO~P3b~elH`+b(7MqTf-FBaeR7t)09lMHXmwD~FsGhS2Pl9u>K zV-eHkRIExrayl60CI@#OJn!!t3z+J(yHGcJP{KQZf|;W~EOKKUquN-4?kuUA+Pqz@ z=;xDi=I}eA3X*Ead^ODn@BPN_;n#z6X`DVs8IZBYaTA%4;?sy^d4SEF|`UwUV(n1`8>M9#Xkd_BIdi%H3CQAL$t-eK$4hF-;j z;q9sXJKKp&=lhtY+o>a?uiA)x3cJq9O~_5uQ-ZY@<|#|CwAbhD@zq%TqUG*}q{(Yq zK^a2TYKXr!OLGfBM&0(QPf9Ijq%jM-ZDuKUNDa)l5$4JW?846J7`! zR$v6aPBm?aSs{CYg1#N)^Tc9Pf&CMR&&o-)PwM)4F)P1olQ;4#scB)~6~q_?6h#at-f- z8D8f~zvMlgs#pnG?){3_d3cDmy6rGrHSF~;^N}S@Lo3-D+K}*f%`iA;(p3xr?wD1K zmqSV=E@>KGmK!B-1$?Tz zU8VIN#Ut->99GtBrq_U-!5LJ;ATY7iU%=p- z4i7Y$iF#I)IfO{02~p+bD8Ni^FST)EX5wl@JGLX=<&~pTW7FPAfy7oV2U0bEGFW3| z>;qyPG)pjouZ;6Gjk|8g-~ALID`$X6I!<;aSUl1xJ_Ifri|ftMaBIBV@Vd}2-RF>0 z(2xEu5>O~BWsnEyb?wXEf28oT*i4Au@MX0nbDW#H>xAnd0GWz2*%M0@(*!>nASZ1#D6YWY6(|_>l9oJm_lb*40AnaN>nr_q0hB|%WBLCwoA7Dd6p%ucZx4{Tb)2W75 zo|vQOy;P_}VrasJ)NFNaTVBG=yD`R_fx97LG?@;oGTXbbc`oI^KsIdxGKz7T^Rm{n zvMVO0c#bPK@{8;-nyEyP!(mlD#tD#FpCmNIZoV-XJ9=EIeeV9a{P{Y7K5F>BK9vPz zFohZ?CPJpEQzZ%$C6<;87LJl5c4{`tWYT{9G3ICR1u177p;h=CQYNRPEKr|1F_kIp Fe*lF0>74)o literal 0 HcmV?d00001 diff --git a/linux/sysroot_scripts/libxcomposite1-symbols b/linux/sysroot_scripts/libxcomposite1-symbols new file mode 100644 index 000000000000..aba31fa2a8c3 --- /dev/null +++ b/linux/sysroot_scripts/libxcomposite1-symbols @@ -0,0 +1,15 @@ +libXcomposite.so.1 libxcomposite1 #MINVER# + XCompositeCreateRegionFromBorderClip@Base 1:0.4.4-1 + XCompositeExtensionInfo@Base 1:0.4.4-1 + XCompositeExtensionName@Base 1:0.4.4-1 + XCompositeFindDisplay@Base 1:0.4.4-1 + XCompositeGetOverlayWindow@Base 1:0.4.4-1 + XCompositeNameWindowPixmap@Base 1:0.4.4-1 + XCompositeQueryExtension@Base 1:0.4.4-1 + XCompositeQueryVersion@Base 1:0.4.4-1 + XCompositeRedirectSubwindows@Base 1:0.4.4-1 + XCompositeRedirectWindow@Base 1:0.4.4-1 + XCompositeReleaseOverlayWindow@Base 1:0.4.4-1 + XCompositeUnredirectSubwindows@Base 1:0.4.4-1 + XCompositeUnredirectWindow@Base 1:0.4.4-1 + XCompositeVersion@Base 1:0.4.4-1 diff --git a/linux/sysroot_scripts/merge-package-lists.py b/linux/sysroot_scripts/merge-package-lists.py new file mode 100755 index 000000000000..e2a5a630b927 --- /dev/null +++ b/linux/sysroot_scripts/merge-package-lists.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Merge package entries from different package lists. +""" + +# This is used for replacing packages in eg. bullseye with those in bookworm. +# The updated packages are ABI compatible, but include security patches, so we +# should use those instead in our sysroots. + +import sys + +if len(sys.argv) != 2: + exit(1) + +packages = {} + +def AddPackagesFromFile(file): + global packages + lines = file.readlines() + if len(lines) % 3 != 0: + exit(1) + for i in range(0, len(lines), 3): + packages[lines[i]] = (lines[i + 1], lines[i + 2]) + +AddPackagesFromFile(open(sys.argv[1], 'r')) +AddPackagesFromFile(sys.stdin) + +output_file = open(sys.argv[1], 'w') + +for (package, (filename, sha256)) in packages.items(): + output_file.write(package + filename + sha256) diff --git a/linux/sysroot_scripts/reversion_glibc.py b/linux/sysroot_scripts/reversion_glibc.py new file mode 100755 index 000000000000..8651386ccb9d --- /dev/null +++ b/linux/sysroot_scripts/reversion_glibc.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Rewrite incompatible default symbols in glibc. +""" + +import re +import subprocess +import sys + +# This constant comes from the oldest glibc version in +# //chrome/installer/linux/debian/dist_package_versions.json and +# //chrome/installer/linux/rpm/dist_package_provides.json +MAX_ALLOWED_GLIBC_VERSION = [2, 26] + +VERSION_PATTERN = re.compile('GLIBC_([0-9\.]+)') +SECTION_PATTERN = re.compile(r'^ *\[ *[0-9]+\] +(\S+) +\S+ + ([0-9a-f]+) .*$') + +# Some otherwise disallowed symbols are referenced in the linux-chromeos build. +# To continue supporting it, allow these symbols to remain enabled. +SYMBOL_ALLOWLIST = { + 'fts64_close', + 'fts64_open', + 'fts64_read', + 'memfd_create', +} + +# The two dictionaries below map from symbol name to +# (symbol version, symbol index). +# +# The default version for a given symbol (which may be unsupported). +default_version = {} +# The max supported symbol version for a given symbol. +supported_version = {} + +# The file name of the binary we're going to rewrite. +BIN_FILE = sys.argv[1] + +# Populate |default_version| and |supported_version| with data from readelf. +stdout = subprocess.check_output(['readelf', '--dyn-syms', '--wide', BIN_FILE]) +for line in stdout.decode("utf-8").split('\n'): + cols = re.split('\s+', line) + # Skip the preamble. + if len(cols) < 9: + continue + + index = cols[1].rstrip(':') + # Skip the header. + if not index.isdigit(): + continue + + index = int(index) + name = cols[8].split('@') + # Ignore unversioned symbols. + if len(name) < 2: + continue + + base_name = name[0] + version = name[-1] + # The default version will have '@@' in the name. + is_default = len(name) > 2 + + if version.startswith('XCRYPT_'): + # Prefer GLIBC_* versioned symbols over XCRYPT_* ones. Set the version to + # something > MAX_ALLOWED_GLIBC_VERSION so this symbol will not be picked. + version = [float('inf')] + else: + match = re.match(VERSION_PATTERN, version) + # Ignore symbols versioned with GLIBC_PRIVATE. + if not match: + continue + version = [int(part) for part in match.group(1).split('.')] + + if version < MAX_ALLOWED_GLIBC_VERSION: + old_supported_version = supported_version.get(base_name, ([-1], -1)) + supported_version[base_name] = max((version, index), old_supported_version) + if is_default: + default_version[base_name] = (version, index) + +# Get the offset into the binary of the .gnu.version section from readelf. +stdout = subprocess.check_output(['readelf', '--sections', '--wide', BIN_FILE]) +for line in stdout.decode("utf-8").split('\n'): + if match := SECTION_PATTERN.match(line): + section_name, address = match.groups() + if section_name == '.gnu.version': + gnu_version_addr = int(address, base=16) + break +else: + print('No .gnu.version section found', file=sys.stderr) + sys.exit(1) + +# Rewrite the binary. +bin_data = bytearray(open(BIN_FILE, 'rb').read()) +for name, (version, index) in default_version.items(): + # No need to rewrite the default if it's already an allowed version. + if version <= MAX_ALLOWED_GLIBC_VERSION: + continue + + if name in SYMBOL_ALLOWLIST: + continue + elif name in supported_version: + _, supported_index = supported_version[name] + else: + supported_index = -1 + + # The .gnu.version section is divided into 16-bit chunks that give the + # symbol versions. The 16th bit is a flag that's false for the default + # version. The data is stored in little-endian so we need to add 1 to + # get the address of the byte we want to flip. + # + # Disable the unsupported symbol. + old_default = gnu_version_addr + 2 * index + 1 + assert (bin_data[old_default] & 0x80) == 0 + bin_data[old_default] ^= 0x80 + + # If we found a supported version, enable that as default. + if supported_index != -1: + new_default = gnu_version_addr + 2 * supported_index + 1 + assert (bin_data[new_default] & 0x80) == 0x80 + bin_data[new_default] ^= 0x80 + +open(BIN_FILE, 'wb').write(bin_data) diff --git a/linux/sysroot_scripts/sysroot-creator-bullseye.sh b/linux/sysroot_scripts/sysroot-creator-bullseye.sh new file mode 100755 index 000000000000..3f40e809580d --- /dev/null +++ b/linux/sysroot_scripts/sysroot-creator-bullseye.sh @@ -0,0 +1,505 @@ +#!/bin/bash +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +DISTRO=debian +DIST=bullseye + +# This number is appended to the sysroot key to cause full rebuilds. It +# should be incremented when removing packages or patching existing packages. +# It should not be incremented when adding packages. +SYSROOT_RELEASE=1 + +ARCHIVE_TIMESTAMP=20230329T085712Z +ARCHIVE_URL="https://snapshot.debian.org/archive/debian/$ARCHIVE_TIMESTAMP/" +APT_SOURCES_LIST=( + # Debian 12 (Bookworm) is needed for GTK4. It should be kept before bullseye + # so that bullseye takes precedence. + "${ARCHIVE_URL} bookworm main" + "${ARCHIVE_URL} bookworm-updates main" + + # Debian 9 (Stretch) is needed for gnome-keyring. It should be kept before + # bullseye so that bullseye takes precedence. + "${ARCHIVE_URL} stretch main" + "${ARCHIVE_URL} stretch-updates main" + + # This mimicks a sources.list from bullseye. + "${ARCHIVE_URL} bullseye main contrib non-free" + "${ARCHIVE_URL} bullseye-updates main contrib non-free" + "${ARCHIVE_URL} bullseye-backports main contrib non-free" +) + +# gpg keyring file generated using generate_keyring.sh +KEYRING_FILE="${SCRIPT_DIR}/keyring.gpg" + +HAS_ARCH_AMD64=1 +HAS_ARCH_I386=1 +HAS_ARCH_ARM=1 +HAS_ARCH_ARM64=1 +HAS_ARCH_ARMEL=1 +HAS_ARCH_MIPS=1 +HAS_ARCH_MIPS64EL=1 + +# Sysroot packages: these are the packages needed to build chrome. +DEBIAN_PACKAGES="\ + comerr-dev + krb5-multidev + libasound2 + libasound2-dev + libasyncns0 + libatk-bridge2.0-0 + libatk-bridge2.0-dev + libatk1.0-0 + libatk1.0-dev + libatomic1 + libatspi2.0-0 + libatspi2.0-dev + libattr1 + libaudit1 + libavahi-client3 + libavahi-common3 + libb2-1 + libblkid-dev + libblkid1 + libbluetooth-dev + libbluetooth3 + libbrotli-dev + libbrotli1 + libbsd0 + libc6 + libc6-dev + libcairo-gobject2 + libcairo-script-interpreter2 + libcairo2 + libcairo2-dev + libcap-dev + libcap-ng0 + libcap2 + libcloudproviders0 + libcolord2 + libcom-err2 + libcrypt-dev + libcrypt1 + libcups2 + libcups2-dev + libcupsimage2 + libcupsimage2-dev + libcurl3-gnutls + libcurl4-gnutls-dev + libdatrie-dev + libdatrie1 + libdb5.3 + libdbus-1-3 + libdbus-1-dev + libdbus-glib-1-2 + libdbusmenu-glib-dev + libdbusmenu-glib4 + libdbusmenu-gtk3-4 + libdbusmenu-gtk4 + libdeflate-dev + libdeflate0 + libdouble-conversion3 + libdrm-amdgpu1 + libdrm-dev + libdrm-nouveau2 + libdrm-radeon1 + libdrm2 + libegl-dev + libegl1 + libegl1-mesa + libegl1-mesa-dev + libelf-dev + libelf1 + libepoxy-dev + libepoxy0 + libevdev-dev + libevdev2 + libevent-2.1-7 + libexpat1 + libexpat1-dev + libffi-dev + libffi7 + libflac-dev + libflac8 + libfontconfig-dev + libfontconfig1 + libfreetype-dev + libfreetype6 + libfribidi-dev + libfribidi0 + libgbm-dev + libgbm1 + libgcc-10-dev + libgcc-s1 + libgcrypt20 + libgcrypt20-dev + libgdk-pixbuf-2.0-0 + libgdk-pixbuf-2.0-dev + libgl-dev + libgl1 + libgl1-mesa-dev + libgl1-mesa-glx + libglapi-mesa + libgles-dev + libgles1 + libgles2 + libglib2.0-0 + libglib2.0-dev + libglvnd-dev + libglvnd0 + libglx-dev + libglx0 + libgmp10 + libgnome-keyring-dev + libgnome-keyring0 + libgnutls-dane0 + libgnutls-openssl27 + libgnutls28-dev + libgnutls30 + libgnutlsxx28 + libgomp1 + libgpg-error-dev + libgpg-error0 + libgraphene-1.0-0 + libgraphene-1.0-dev + libgraphite2-3 + libgraphite2-dev + libgssapi-krb5-2 + libgssrpc4 + libgtk-3-0 + libgtk-3-dev + libgtk-4-1 + libgtk-4-dev + libgtk2.0-0 + libgudev-1.0-0 + libharfbuzz-dev + libharfbuzz-gobject0 + libharfbuzz-icu0 + libharfbuzz0b + libhogweed6 + libice6 + libicu-le-hb0 + libicu67 + libidl-2-0 + libidn11 + libidn2-0 + libinput-dev + libinput10 + libjbig-dev + libjbig0 + libjpeg62-turbo + libjpeg62-turbo-dev + libjson-glib-1.0-0 + libjsoncpp-dev + libjsoncpp24 + libk5crypto3 + libkadm5clnt-mit12 + libkadm5srv-mit12 + libkdb5-10 + libkeyutils1 + libkrb5-3 + libkrb5-dev + libkrb5support0 + liblcms2-2 + libldap-2.4-2 + liblerc4 + libltdl7 + liblz4-1 + liblzma5 + liblzo2-2 + libmd0 + libmd4c0 + libminizip-dev + libminizip1 + libmount-dev + libmount1 + libmtdev1 + libncurses-dev + libncurses6 + libncursesw6 + libnettle8 + libnghttp2-14 + libnsl2 + libnspr4 + libnspr4-dev + libnss-db + libnss3 + libnss3-dev + libogg-dev + libogg0 + libopengl0 + libopus-dev + libopus0 + libp11-kit0 + libpam0g + libpam0g-dev + libpango-1.0-0 + libpango1.0-dev + libpangocairo-1.0-0 + libpangoft2-1.0-0 + libpangox-1.0-0 + libpangoxft-1.0-0 + libpci-dev + libpci3 + libpciaccess0 + libpcre16-3 + libpcre2-16-0 + libpcre2-32-0 + libpcre2-8-0 + libpcre2-dev + libpcre2-posix2 + libpcre3 + libpcre3-dev + libpcre32-3 + libpcrecpp0v5 + libpipewire-0.3-0 + libpipewire-0.3-dev + libpixman-1-0 + libpixman-1-dev + libpng-dev + libpng16-16 + libproxy1v5 + libpsl5 + libpthread-stubs0-dev + libpulse-dev + libpulse-mainloop-glib0 + libpulse0 + libqt5concurrent5 + libqt5core5a + libqt5dbus5 + libqt5gui5 + libqt5network5 + libqt5printsupport5 + libqt5sql5 + libqt5test5 + libqt5widgets5 + libqt5xml5 + libqt6concurrent6 + libqt6core6 + libqt6dbus6 + libqt6gui6 + libqt6network6 + libqt6opengl6 + libqt6openglwidgets6 + libqt6printsupport6 + libqt6sql6 + libqt6test6 + libqt6widgets6 + libqt6xml6 + libre2-9 + libre2-dev + librest-0.7-0 + librtmp1 + libsasl2-2 + libselinux1 + libselinux1-dev + libsepol1 + libsepol1-dev + libsm6 + libsnappy-dev + libsnappy1v5 + libsndfile1 + libsoup-gnome2.4-1 + libsoup2.4-1 + libspa-0.2-dev + libspeechd-dev + libspeechd2 + libsqlite3-0 + libssh2-1 + libssl-dev + libssl1.1 + libstdc++-10-dev + libstdc++6 + libsystemd-dev + libsystemd0 + libtasn1-6 + libthai-dev + libthai0 + libtiff-dev + libtiff5 + libtiff6 + libtiffxx5 + libtinfo6 + libtirpc3 + libts0 + libudev-dev + libudev1 + libunbound8 + libunistring2 + libutempter-dev + libutempter0 + libuuid1 + libva-dev + libva-drm2 + libva-glx2 + libva-wayland2 + libva-x11-2 + libva2 + libvorbis0a + libvorbisenc2 + libvulkan-dev + libvulkan1 + libwacom2 + libwayland-bin + libwayland-client0 + libwayland-cursor0 + libwayland-dev + libwayland-egl-backend-dev + libwayland-egl1 + libwayland-egl1-mesa + libwayland-server0 + libwebp-dev + libwebp6 + libwebp7 + libwebpdemux2 + libwebpmux3 + libwrap0 + libx11-6 + libx11-dev + libx11-xcb-dev + libx11-xcb1 + libxau-dev + libxau6 + libxcb-dri2-0 + libxcb-dri2-0-dev + libxcb-dri3-0 + libxcb-dri3-dev + libxcb-glx0 + libxcb-glx0-dev + libxcb-icccm4 + libxcb-image0 + libxcb-image0-dev + libxcb-keysyms1 + libxcb-present-dev + libxcb-present0 + libxcb-randr0 + libxcb-randr0-dev + libxcb-render-util0 + libxcb-render-util0-dev + libxcb-render0 + libxcb-render0-dev + libxcb-shape0 + libxcb-shape0-dev + libxcb-shm0 + libxcb-shm0-dev + libxcb-sync-dev + libxcb-sync1 + libxcb-util-dev + libxcb-util1 + libxcb-xfixes0 + libxcb-xfixes0-dev + libxcb-xinerama0 + libxcb-xinput0 + libxcb-xkb1 + libxcb1 + libxcb1-dev + libxcomposite-dev + libxcomposite1 + libxcursor-dev + libxcursor1 + libxdamage-dev + libxdamage1 + libxdmcp-dev + libxdmcp6 + libxext-dev + libxext6 + libxfixes-dev + libxfixes3 + libxft-dev + libxft2 + libxi-dev + libxi6 + libxinerama-dev + libxinerama1 + libxkbcommon-dev + libxkbcommon-x11-0 + libxkbcommon0 + libxml2 + libxml2-dev + libxrandr-dev + libxrandr2 + libxrender-dev + libxrender1 + libxshmfence-dev + libxshmfence1 + libxslt1-dev + libxslt1.1 + libxss-dev + libxss1 + libxt-dev + libxt6 + libxtst-dev + libxtst6 + libxxf86vm-dev + libxxf86vm1 + libzstd1 + linux-libc-dev + mesa-common-dev + qt6-base-dev + qt6-base-dev-tools + qtbase5-dev + qtbase5-dev-tools + shared-mime-info + uuid-dev + wayland-protocols + x11proto-dev + zlib1g + zlib1g-dev +" + +DEBIAN_PACKAGES_AMD64=" + libtsan0 + liblsan0 +" + +DEBIAN_PACKAGES_X86=" + libasan6 + libdrm-intel1 + libitm1 + libquadmath0 + libubsan1 + valgrind +" + +DEBIAN_PACKAGES_ARM=" + libasan6 + libdrm-etnaviv1 + libdrm-exynos1 + libdrm-freedreno1 + libdrm-omap1 + libdrm-tegra0 + libubsan1 + valgrind +" + +DEBIAN_PACKAGES_ARM64=" + libasan6 + libdrm-etnaviv1 + libdrm-freedreno1 + libdrm-tegra0 + libgmp10 + libitm1 + liblsan0 + libthai0 + libtsan0 + libubsan1 + valgrind +" + +DEBIAN_PACKAGES_ARMEL=" + libasan6 + libdrm-exynos1 + libdrm-freedreno1 + libdrm-omap1 + libdrm-tegra0 + libubsan1 +" + +DEBIAN_PACKAGES_MIPS64EL=" + valgrind +" + +. "${SCRIPT_DIR}/sysroot-creator.sh" diff --git a/linux/sysroot_scripts/sysroot-creator.sh b/linux/sysroot_scripts/sysroot-creator.sh new file mode 100644 index 000000000000..63d5baa1f90e --- /dev/null +++ b/linux/sysroot_scripts/sysroot-creator.sh @@ -0,0 +1,963 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# This script should not be run directly but sourced by the other +# scripts (e.g. sysroot-creator-bullseye.sh). Its up to the parent scripts +# to define certain environment variables: e.g. +# DISTRO=debian +# DIST=bullseye +# # Similar in syntax to /etc/apt/sources.list +# APT_SOURCES_LIST=( "http://ftp.us.debian.org/debian/ bullseye main" ) +# KEYRING_FILE=debian-archive-bullseye-stable.gpg +# DEBIAN_PACKAGES="gcc libz libssl" + +#@ This script builds Debian/Ubuntu sysroot images for building Google Chrome. +#@ +#@ Generally this script is invoked as: +#@ sysroot-creator-.sh * +#@ Available modes are shown below. +#@ +#@ List of modes: + +###################################################################### +# Config +###################################################################### + +set -o nounset +set -o errexit + +SCRIPT_DIR=$(cd $(dirname $0) && pwd) + +if [ -z "${DIST:-}" ]; then + echo "error: DIST not defined" + exit 1 +fi + +if [ -z "${KEYRING_FILE:-}" ]; then + echo "error: KEYRING_FILE not defined" + exit 1 +fi + +if [ -z "${DEBIAN_PACKAGES:-}" ]; then + echo "error: DEBIAN_PACKAGES not defined" + exit 1 +fi + +readonly HAS_ARCH_AMD64=${HAS_ARCH_AMD64:=0} +readonly HAS_ARCH_I386=${HAS_ARCH_I386:=0} +readonly HAS_ARCH_ARM=${HAS_ARCH_ARM:=0} +readonly HAS_ARCH_ARM64=${HAS_ARCH_ARM64:=0} +readonly HAS_ARCH_ARMEL=${HAS_ARCH_ARMEL:=0} +readonly HAS_ARCH_MIPS=${HAS_ARCH_MIPS:=0} +readonly HAS_ARCH_MIPS64EL=${HAS_ARCH_MIPS64EL:=0} + +readonly REQUIRED_TOOLS="curl xzcat" + +###################################################################### +# Package Config +###################################################################### + +readonly PACKAGES_EXT=xz +readonly RELEASE_FILE="Release" +readonly RELEASE_FILE_GPG="Release.gpg" + +readonly DEBIAN_DEP_LIST_AMD64="generated_package_lists/${DIST}.amd64" +readonly DEBIAN_DEP_LIST_I386="generated_package_lists/${DIST}.i386" +readonly DEBIAN_DEP_LIST_ARM="generated_package_lists/${DIST}.arm" +readonly DEBIAN_DEP_LIST_ARM64="generated_package_lists/${DIST}.arm64" +readonly DEBIAN_DEP_LIST_ARMEL="generated_package_lists/${DIST}.armel" +readonly DEBIAN_DEP_LIST_MIPS="generated_package_lists/${DIST}.mipsel" +readonly DEBIAN_DEP_LIST_MIPS64EL="generated_package_lists/${DIST}.mips64el" + + +###################################################################### +# Helper +###################################################################### + +Banner() { + echo "######################################################################" + echo $* + echo "######################################################################" +} + + +SubBanner() { + echo "----------------------------------------------------------------------" + echo $* + echo "----------------------------------------------------------------------" +} + + +Usage() { + egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3- +} + + +DownloadOrCopyNonUniqueFilename() { + # Use this function instead of DownloadOrCopy when the url uniquely + # identifies the file, but the filename (excluding the directory) + # does not. + local url="$1" + local dest="$2" + + local hash="$(echo "$url" | sha256sum | cut -d' ' -f1)" + + DownloadOrCopy "${url}" "${dest}.${hash}" + # cp the file to prevent having to redownload it, but mv it to the + # final location so that it's atomic. + cp "${dest}.${hash}" "${dest}.$$" + mv "${dest}.$$" "${dest}" +} + +DownloadOrCopy() { + if [ -f "$2" ] ; then + echo "$2 already in place" + return + fi + + HTTP=0 + echo "$1" | grep -Eqs '^https?://' && HTTP=1 + if [ "$HTTP" = "1" ]; then + SubBanner "downloading from $1 -> $2" + # Appending the "$$" shell pid is necessary here to prevent concurrent + # instances of sysroot-creator.sh from trying to write to the same file. + local temp_file="${2}.partial.$$" + # curl --retry doesn't retry when the page gives a 4XX error, so we need to + # manually rerun. + for i in {1..10}; do + # --create-dirs is added in case there are slashes in the filename, as can + # happen with the "debian/security" release class. + local http_code=$(curl -L "$1" --create-dirs -o "${temp_file}" \ + -w "%{http_code}") + if [ ${http_code} -eq 200 ]; then + break + fi + echo "Bad HTTP code ${http_code} when downloading $1" + rm -f "${temp_file}" + sleep $i + done + if [ ! -f "${temp_file}" ]; then + exit 1 + fi + mv "${temp_file}" $2 + else + SubBanner "copying from $1" + cp "$1" "$2" + fi +} + + +SetEnvironmentVariables() { + case $1 in + *Amd64) + ARCH=AMD64 + ;; + *I386) + ARCH=I386 + ;; + *Mips64el) + ARCH=MIPS64EL + ;; + *Mips) + ARCH=MIPS + ;; + *ARM) + ARCH=ARM + ;; + *ARM64) + ARCH=ARM64 + ;; + *ARMEL) + ARCH=ARMEL + ;; + *) + echo "ERROR: Unable to determine architecture based on: $1" + exit 1 + ;; + esac + ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]') +} + + +# some sanity checks to make sure this script is run from the right place +# with the right tools +SanityCheck() { + Banner "Sanity Checks" + + local chrome_dir=$(cd "${SCRIPT_DIR}/../../.." && pwd) + BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}" + mkdir -p ${BUILD_DIR} + echo "Using build directory: ${BUILD_DIR}" + + for tool in ${REQUIRED_TOOLS} ; do + if ! which ${tool} > /dev/null ; then + echo "Required binary $tool not found." + echo "Exiting." + exit 1 + fi + done + + # This is where the staging sysroot is. + INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging" + TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tar.xz" + + if ! mkdir -p "${INSTALL_ROOT}" ; then + echo "ERROR: ${INSTALL_ROOT} can't be created." + exit 1 + fi +} + + +ChangeDirectory() { + # Change directory to where this script is. + cd ${SCRIPT_DIR} +} + + +ClearInstallDir() { + Banner "Clearing dirs in ${INSTALL_ROOT}" + rm -rf ${INSTALL_ROOT}/* +} + + +CreateTarBall() { + Banner "Creating tarball ${TARBALL}" + tar -I "xz -9 -T0" -cf ${TARBALL} -C ${INSTALL_ROOT} . +} + +ExtractPackageXz() { + local src_file="$1" + local dst_file="$2" + local repo="$3" + xzcat "${src_file}" | egrep '^(Package:|Filename:|SHA256:) ' | + sed "s|Filename: |Filename: ${repo}|" > "${dst_file}" +} + +GeneratePackageListDistRepo() { + local arch="$1" + local repo="$2" + local dist="$3" + local repo_name="$4" + + local tmp_package_list="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}" + local repo_basedir="${repo}/dists/${dist}" + local package_list="${BUILD_DIR}/Packages.${dist}_${repo_name}_${arch}.${PACKAGES_EXT}" + local package_file_arch="${repo_name}/binary-${arch}/Packages.${PACKAGES_EXT}" + local package_list_arch="${repo_basedir}/${package_file_arch}" + + DownloadOrCopyNonUniqueFilename "${package_list_arch}" "${package_list}" + VerifyPackageListing "${package_file_arch}" "${package_list}" ${repo} ${dist} + ExtractPackageXz "${package_list}" "${tmp_package_list}" ${repo} + cat "${tmp_package_list}" | ./merge-package-lists.py "${list_base}" +} + +GeneratePackageListDist() { + local arch="$1" + set -- $2 + local repo="$1" + local dist="$2" + shift 2 + while (( "$#" )); do + GeneratePackageListDistRepo "$arch" "$repo" "$dist" "$1" + shift + done +} + +GeneratePackageListCommon() { + local output_file="$1" + local arch="$2" + local packages="$3" + + local list_base="${BUILD_DIR}/Packages.${DIST}_${arch}" + > "${list_base}" # Create (or truncate) a zero-length file. + printf '%s\n' "${APT_SOURCES_LIST[@]}" | while read source; do + GeneratePackageListDist "${arch}" "${source}" + done + + GeneratePackageList "${list_base}" "${output_file}" "${packages}" +} + +GeneratePackageListAmd64() { + GeneratePackageListCommon "$1" amd64 "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_X86:=} ${DEBIAN_PACKAGES_AMD64:=}" +} + +GeneratePackageListI386() { + GeneratePackageListCommon "$1" i386 "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_X86:=}" +} + +GeneratePackageListARM() { + GeneratePackageListCommon "$1" armhf "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_ARM:=}" +} + +GeneratePackageListARM64() { + GeneratePackageListCommon "$1" arm64 "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_ARM64:=}" +} + +GeneratePackageListARMEL() { + GeneratePackageListCommon "$1" armel "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_ARMEL:=}" +} + +GeneratePackageListMips() { + GeneratePackageListCommon "$1" mipsel "${DEBIAN_PACKAGES}" +} + +GeneratePackageListMips64el() { + GeneratePackageListCommon "$1" mips64el "${DEBIAN_PACKAGES} + ${DEBIAN_PACKAGES_MIPS64EL:=}" +} + +StripChecksumsFromPackageList() { + local package_file="$1" + sed -i 's/ [a-f0-9]\{64\}$//' "$package_file" +} + +###################################################################### +# +###################################################################### + +HacksAndPatchesCommon() { + local arch=$1 + local os=$2 + Banner "Misc Hacks & Patches" + + # Remove an unnecessary dependency on qtchooser. + rm "${INSTALL_ROOT}/usr/lib/${arch}-${os}/qt-default/qtchooser/default.conf" + + # libxcomposite1 is missing a symbols file. + cp "${SCRIPT_DIR}/libxcomposite1-symbols" \ + "${INSTALL_ROOT}/debian/libxcomposite1/DEBIAN/symbols" + + # __GLIBC_MINOR__ is used as a feature test macro. Replace it with the + # earliest supported version of glibc (2.26, obtained from the oldest glibc + # version in //chrome/installer/linux/debian/dist_packag_versions.json and + # //chrome/installer/linux/rpm/dist_package_provides.json). + local usr_include="${INSTALL_ROOT}/usr/include" + local features_h="${usr_include}/features.h" + sed -i 's|\(#define\s\+__GLIBC_MINOR__\)|\1 26 //|' "${features_h}" + + # fcntl64() was introduced in glibc 2.28. Make sure to use fcntl() instead. + local fcntl_h="${INSTALL_ROOT}/usr/include/fcntl.h" + sed -i '{N; s/#ifndef __USE_FILE_OFFSET64\(\nextern int fcntl\)/#if 1\1/}' \ + "${fcntl_h}" + + # Do not use pthread_cond_clockwait as it was introduced in glibc 2.30. + local cppconfig_h="${usr_include}/${arch}-${os}/c++/10/bits/c++config.h" + sed -i 's|\(#define\s\+_GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT\)|// \1|' \ + "${cppconfig_h}" + + # This is for chrome's ./build/linux/pkg-config-wrapper + # which overwrites PKG_CONFIG_LIBDIR internally + SubBanner "Move pkgconfig scripts" + mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig + mv ${INSTALL_ROOT}/usr/lib/${arch}-${os}/pkgconfig/* \ + ${INSTALL_ROOT}/usr/lib/pkgconfig +} + + +ReversionGlibc() { + local arch=$1 + local os=$2 + + # Avoid requiring unsupported glibc versions. + "${SCRIPT_DIR}/reversion_glibc.py" \ + "${INSTALL_ROOT}/lib/${arch}-${os}/libc.so.6" + "${SCRIPT_DIR}/reversion_glibc.py" \ + "${INSTALL_ROOT}/lib/${arch}-${os}/libm.so.6" + "${SCRIPT_DIR}/reversion_glibc.py" \ + "${INSTALL_ROOT}/lib/${arch}-${os}/libcrypt.so.1" +} + + +HacksAndPatchesAmd64() { + HacksAndPatchesCommon x86_64 linux-gnu + ReversionGlibc x86_64 linux-gnu +} + + +HacksAndPatchesI386() { + HacksAndPatchesCommon i386 linux-gnu + ReversionGlibc i386 linux-gnu +} + + +HacksAndPatchesARM() { + HacksAndPatchesCommon arm linux-gnueabihf + ReversionGlibc arm linux-gnueabihf +} + +HacksAndPatchesARM64() { + HacksAndPatchesCommon aarch64 linux-gnu + ReversionGlibc aarch64 linux-gnu +} + +HacksAndPatchesARMEL() { + HacksAndPatchesCommon arm linux-gnueabi + ReversionGlibc arm linux-gnueabi +} + +HacksAndPatchesMips() { + HacksAndPatchesCommon mipsel linux-gnu + ReversionGlibc mipsel linux-gnu +} + + +HacksAndPatchesMips64el() { + HacksAndPatchesCommon mips64el linux-gnuabi64 + ReversionGlibc mips64el linux-gnuabi64 +} + + +InstallIntoSysroot() { + Banner "Install Libs And Headers Into Jail" + + mkdir -p ${BUILD_DIR}/debian-packages + # The /debian directory is an implementation detail that's used to cd into + # when running dpkg-shlibdeps. + mkdir -p ${INSTALL_ROOT}/debian + # An empty control file is necessary to run dpkg-shlibdeps. + touch ${INSTALL_ROOT}/debian/control + while (( "$#" )); do + local file="$1" + local package="${BUILD_DIR}/debian-packages/${file##*/}" + shift + local sha256sum="$1" + shift + if [ "${#sha256sum}" -ne "64" ]; then + echo "Bad sha256sum from package list" + exit 1 + fi + + Banner "Installing $(basename ${file})" + DownloadOrCopy ${file} ${package} + if [ ! -s "${package}" ] ; then + echo + echo "ERROR: bad package ${package}" + exit 1 + fi + echo "${sha256sum} ${package}" | sha256sum --quiet -c + + SubBanner "Extracting to ${INSTALL_ROOT}" + dpkg-deb -x ${package} ${INSTALL_ROOT} + + base_package=$(dpkg-deb --field ${package} Package) + mkdir -p ${INSTALL_ROOT}/debian/${base_package}/DEBIAN + dpkg-deb -e ${package} ${INSTALL_ROOT}/debian/${base_package}/DEBIAN + done + + # Prune /usr/share, leaving only pkgconfig, wayland, and wayland-protocols. + ls -d ${INSTALL_ROOT}/usr/share/* | \ + grep -v "/\(pkgconfig\|wayland\|wayland-protocols\)$" | xargs rm -r +} + + +CleanupJailSymlinks() { + Banner "Jail symlink cleanup" + + SAVEDPWD=$(pwd) + cd ${INSTALL_ROOT} + local libdirs="lib usr/lib" + if [ "${ARCH}" != "MIPS" ]; then + libdirs="${libdirs} lib64" + fi + + find $libdirs -type l -printf '%p %l\n' | while read link target; do + # skip links with non-absolute paths + echo "${target}" | grep -qs ^/ || continue + echo "${link}: ${target}" + # Relativize the symlink. + prefix=$(echo "${link}" | sed -e 's/[^/]//g' | sed -e 's|/|../|g') + ln -snfv "${prefix}${target}" "${link}" + done + + failed=0 + while read link target; do + # Make sure we catch new bad links. + if [ ! -r "${link}" ]; then + echo "ERROR: FOUND BAD LINK ${link}" + ls -l ${link} + failed=1 + fi + done < <(find $libdirs -type l -printf '%p %l\n') + if [ $failed -eq 1 ]; then + exit 1 + fi + cd "$SAVEDPWD" +} + + +VerifyLibraryDepsCommon() { + local arch=$1 + local os=$2 + local find_dirs=( + "${INSTALL_ROOT}/lib/" + "${INSTALL_ROOT}/lib/${arch}-${os}/" + "${INSTALL_ROOT}/usr/lib/${arch}-${os}/" + ) + local needed_libs="$( + find ${find_dirs[*]} -name "*\.so*" -type f -exec file {} \; | \ + grep ': ELF' | sed 's/^\(.*\): .*$/\1/' | xargs readelf -d | \ + grep NEEDED | sort | uniq | sed 's/^.*Shared library: \[\(.*\)\]$/\1/g')" + local all_libs="$(find ${find_dirs[*]} -printf '%f\n')" + # Ignore missing libdbus-1.so.0 + all_libs+="$(echo -e '\nlibdbus-1.so.0')" + local missing_libs="$(grep -vFxf <(echo "${all_libs}") \ + <(echo "${needed_libs}"))" + if [ ! -z "${missing_libs}" ]; then + echo "Missing libraries:" + echo "${missing_libs}" + exit 1 + fi +} + + +VerifyLibraryDepsAmd64() { + VerifyLibraryDepsCommon x86_64 linux-gnu +} + + +VerifyLibraryDepsI386() { + VerifyLibraryDepsCommon i386 linux-gnu +} + + +VerifyLibraryDepsARM() { + VerifyLibraryDepsCommon arm linux-gnueabihf +} + + +VerifyLibraryDepsARM64() { + VerifyLibraryDepsCommon aarch64 linux-gnu +} + +VerifyLibraryDepsARMEL() { + VerifyLibraryDepsCommon arm linux-gnueabi +} + +VerifyLibraryDepsMips() { + VerifyLibraryDepsCommon mipsel linux-gnu +} + + +VerifyLibraryDepsMips64el() { + VerifyLibraryDepsCommon mips64el linux-gnuabi64 +} + + +#@ +#@ BuildSysrootAmd64 +#@ +#@ Build everything and package it +BuildSysrootAmd64() { + if [ "$HAS_ARCH_AMD64" = "0" ]; then + return + fi + ClearInstallDir + local package_file="${DEBIAN_DEP_LIST_AMD64}" + GeneratePackageListAmd64 "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + InstallIntoSysroot ${files_and_sha256sums} + HacksAndPatchesAmd64 + CleanupJailSymlinks + VerifyLibraryDepsAmd64 + CreateTarBall +} + +#@ +#@ BuildSysrootI386 +#@ +#@ Build everything and package it +BuildSysrootI386() { + if [ "$HAS_ARCH_I386" = "0" ]; then + return + fi + ClearInstallDir + local package_file="${DEBIAN_DEP_LIST_I386}" + GeneratePackageListI386 "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + InstallIntoSysroot ${files_and_sha256sums} + HacksAndPatchesI386 + CleanupJailSymlinks + VerifyLibraryDepsI386 + CreateTarBall +} + +#@ +#@ BuildSysrootARM +#@ +#@ Build everything and package it +BuildSysrootARM() { + if [ "$HAS_ARCH_ARM" = "0" ]; then + return + fi + ClearInstallDir + local package_file="${DEBIAN_DEP_LIST_ARM}" + GeneratePackageListARM "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + InstallIntoSysroot ${files_and_sha256sums} + HacksAndPatchesARM + CleanupJailSymlinks + VerifyLibraryDepsARM + CreateTarBall +} + +#@ +#@ BuildSysrootARM64 +#@ +#@ Build everything and package it +BuildSysrootARM64() { + if [ "$HAS_ARCH_ARM64" = "0" ]; then + return + fi + ClearInstallDir + local package_file="${DEBIAN_DEP_LIST_ARM64}" + GeneratePackageListARM64 "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + InstallIntoSysroot ${files_and_sha256sums} + HacksAndPatchesARM64 + CleanupJailSymlinks + VerifyLibraryDepsARM64 + CreateTarBall +} + +#@ +#@ BuildSysrootARMEL +#@ +#@ Build everything and package it +BuildSysrootARMEL() { + if [ "$HAS_ARCH_ARMEL" = "0" ]; then + return + fi + ClearInstallDir + local package_file="${DEBIAN_DEP_LIST_ARMEL}" + GeneratePackageListARMEL "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + InstallIntoSysroot ${files_and_sha256sums} + HacksAndPatchesARMEL + CleanupJailSymlinks + VerifyLibraryDepsARMEL + CreateTarBall +} + +#@ +#@ BuildSysrootMips +#@ +#@ Build everything and package it +BuildSysrootMips() { + if [ "$HAS_ARCH_MIPS" = "0" ]; then + return + fi + ClearInstallDir + local package_file="${DEBIAN_DEP_LIST_MIPS}" + GeneratePackageListMips "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + InstallIntoSysroot ${files_and_sha256sums} + HacksAndPatchesMips + CleanupJailSymlinks + VerifyLibraryDepsMips + CreateTarBall +} + +#@ +#@ BuildSysrootMips64el +#@ +#@ Build everything and package it +BuildSysrootMips64el() { + if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then + return + fi + ClearInstallDir + local package_file="${DEBIAN_DEP_LIST_MIPS64EL}" + GeneratePackageListMips64el "$package_file" + local files_and_sha256sums="$(cat ${package_file})" + StripChecksumsFromPackageList "$package_file" + InstallIntoSysroot ${files_and_sha256sums} + HacksAndPatchesMips64el + CleanupJailSymlinks + VerifyLibraryDepsMips64el + CreateTarBall +} + +#@ +#@ BuildSysrootAll +#@ +#@ Build sysroot images for all architectures +BuildSysrootAll() { + RunCommand BuildSysrootAmd64 + RunCommand BuildSysrootI386 + RunCommand BuildSysrootARM + RunCommand BuildSysrootARM64 + RunCommand BuildSysrootARMEL + RunCommand BuildSysrootMips + RunCommand BuildSysrootMips64el +} + +UploadSysroot() { + local sha=$(sha1sum "${TARBALL}" | awk '{print $1;}') + set -x + gsutil.py cp -a public-read "${TARBALL}" \ + "gs://chrome-linux-sysroot/toolchain/$sha/" + set +x +} + +#@ +#@ UploadSysrootAmd64 +#@ +UploadSysrootAmd64() { + if [ "$HAS_ARCH_AMD64" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootI386 +#@ +UploadSysrootI386() { + if [ "$HAS_ARCH_I386" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootARM +#@ +UploadSysrootARM() { + if [ "$HAS_ARCH_ARM" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootARM64 +#@ +UploadSysrootARM64() { + if [ "$HAS_ARCH_ARM64" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootARMEL +#@ +UploadSysrootARMEL() { + if [ "$HAS_ARCH_ARMEL" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootMips +#@ +UploadSysrootMips() { + if [ "$HAS_ARCH_MIPS" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootMips64el +#@ +UploadSysrootMips64el() { + if [ "$HAS_ARCH_MIPS64EL" = "0" ]; then + return + fi + UploadSysroot "$@" +} + +#@ +#@ UploadSysrootAll +#@ +#@ Upload sysroot image for all architectures +UploadSysrootAll() { + RunCommand UploadSysrootAmd64 "$@" + RunCommand UploadSysrootI386 "$@" + RunCommand UploadSysrootARM "$@" + RunCommand UploadSysrootARM64 "$@" + RunCommand UploadSysrootARMEL "$@" + RunCommand UploadSysrootMips "$@" + RunCommand UploadSysrootMips64el "$@" + +} + +# +# CheckForDebianGPGKeyring +# +# Make sure the Debian GPG keys exist. Otherwise print a helpful message. +# +CheckForDebianGPGKeyring() { + if [ ! -e "$KEYRING_FILE" ]; then + echo "KEYRING_FILE not found: ${KEYRING_FILE}" + echo "Debian GPG keys missing. Install the debian-archive-keyring package." + exit 1 + fi +} + +# +# VerifyPackageListing +# +# Verifies the downloaded Packages.xz file has the right checksums. +# +VerifyPackageListing() { + local file_path="$1" + local output_file="$2" + local repo="$3" + local dist="$4" + + local repo_basedir="${repo}/dists/${dist}" + local release_list="${repo_basedir}/${RELEASE_FILE}" + local release_list_gpg="${repo_basedir}/${RELEASE_FILE_GPG}" + + local release_file="${BUILD_DIR}/${dist}-${RELEASE_FILE}" + local release_file_gpg="${BUILD_DIR}/${dist}-${RELEASE_FILE_GPG}" + + CheckForDebianGPGKeyring + + DownloadOrCopyNonUniqueFilename ${release_list} ${release_file} + DownloadOrCopyNonUniqueFilename ${release_list_gpg} ${release_file_gpg} + echo "Verifying: ${release_file} with ${release_file_gpg}" + set -x + gpgv --keyring "${KEYRING_FILE}" "${release_file_gpg}" "${release_file}" + set +x + + echo "Verifying: ${output_file}" + local sha256sum=$(grep -E "${file_path}\$|:\$" "${release_file}" | \ + grep "SHA256:" -A 1 | xargs echo | awk '{print $2;}') + + if [ "${#sha256sum}" -ne "64" ]; then + echo "Bad sha256sum from ${release_list}" + exit 1 + fi + + echo "${sha256sum} ${output_file}" | sha256sum --quiet -c +} + +# +# GeneratePackageList +# +# Looks up package names in ${BUILD_DIR}/Packages and write list of URLs +# to output file. +# +GeneratePackageList() { + local input_file="$1" + local output_file="$2" + echo "Updating: ${output_file} from ${input_file}" + /bin/rm -f "${output_file}" + shift + shift + local failed=0 + for pkg in $@ ; do + local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \ + egrep "pool/.*" | sed 's/.*Filename: //') + if [ -z "${pkg_full}" ]; then + echo "ERROR: missing package: $pkg" + local failed=1 + else + local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \ + grep ^SHA256: | sed 's/^SHA256: //') + if [ "${#sha256sum}" -ne "64" ]; then + echo "Bad sha256sum from Packages" + local failed=1 + fi + echo $pkg_full $sha256sum >> "$output_file" + fi + done + if [ $failed -eq 1 ]; then + exit 1 + fi + # sort -o does an in-place sort of this file + sort "$output_file" -o "$output_file" +} + +#@ +#@ PrintArchitectures +#@ +#@ Prints supported architectures. +PrintArchitectures() { + if [ "$HAS_ARCH_AMD64" = "1" ]; then + echo Amd64 + fi + if [ "$HAS_ARCH_I386" = "1" ]; then + echo I386 + fi + if [ "$HAS_ARCH_ARM" = "1" ]; then + echo ARM + fi + if [ "$HAS_ARCH_ARM64" = "1" ]; then + echo ARM64 + fi + if [ "$HAS_ARCH_ARMEL" = "1" ]; then + echo ARMEL + fi + if [ "$HAS_ARCH_MIPS" = "1" ]; then + echo Mips + fi + if [ "$HAS_ARCH_MIPS64EL" = "1" ]; then + echo Mips64el + fi +} + +#@ +#@ PrintDistro +#@ +#@ Prints distro. eg: ubuntu +PrintDistro() { + echo ${DISTRO} +} + +#@ +#@ PrintRelease +#@ +#@ Prints disto release. eg: bullseye +PrintRelease() { + echo ${DIST} +} + +#@ +#@ PrintKey +#@ +#@ Prints sysroot key identifier. +PrintKey() { + echo "${ARCHIVE_TIMESTAMP}-${SYSROOT_RELEASE}" +} + +RunCommand() { + SetEnvironmentVariables "$1" + SanityCheck + "$@" +} + +if [ $# -eq 0 ] ; then + echo "ERROR: you must specify a mode on the commandline" + echo + Usage + exit 1 +elif [ "$(type -t $1)" != "function" ]; then + echo "ERROR: unknown function '$1'." >&2 + echo "For help, try:" + echo " $0 help" + exit 1 +else + ChangeDirectory + if echo $1 | grep -qs --regexp='\(^Print\)\|\(All$\)'; then + "$@" + else + RunCommand "$@" + fi +fi diff --git a/linux/sysroot_scripts/sysroots.json b/linux/sysroot_scripts/sysroots.json new file mode 100644 index 000000000000..02004260f7b8 --- /dev/null +++ b/linux/sysroot_scripts/sysroots.json @@ -0,0 +1,44 @@ +{ + "bullseye_amd64": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "f5f68713249b52b35db9e08f67184cac392369ab", + "SysrootDir": "debian_bullseye_amd64-sysroot", + "Tarball": "debian_bullseye_amd64_sysroot.tar.xz" + }, + "bullseye_arm": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "f80db01d7b3b973ca2aeeed000aa43bd0c082f15", + "SysrootDir": "debian_bullseye_arm-sysroot", + "Tarball": "debian_bullseye_arm_sysroot.tar.xz" + }, + "bullseye_arm64": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "80fc74e431f37f590d0c85f16a9d8709088929e8", + "SysrootDir": "debian_bullseye_arm64-sysroot", + "Tarball": "debian_bullseye_arm64_sysroot.tar.xz" + }, + "bullseye_armel": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "37801ea4e948feabd69c74390c4b80e932b63de0", + "SysrootDir": "debian_bullseye_armel-sysroot", + "Tarball": "debian_bullseye_armel_sysroot.tar.xz" + }, + "bullseye_i386": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "b6c18d06d79c0abb870a126a3ae5f8086e355e5f", + "SysrootDir": "debian_bullseye_i386-sysroot", + "Tarball": "debian_bullseye_i386_sysroot.tar.xz" + }, + "bullseye_mips": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "fed66c435eeb0bd71e9394a0fb0c4e078d90ea50", + "SysrootDir": "debian_bullseye_mips-sysroot", + "Tarball": "debian_bullseye_mips_sysroot.tar.xz" + }, + "bullseye_mips64el": { + "Key": "20230329T085712Z-1", + "Sha1Sum": "37e23cd7512b3c4d0dacbc5d253f3a496c38f5fb", + "SysrootDir": "debian_bullseye_mips64el-sysroot", + "Tarball": "debian_bullseye_mips64el_sysroot.tar.xz" + } +} diff --git a/linux/sysroot_scripts/update-archive-timestamp.sh b/linux/sysroot_scripts/update-archive-timestamp.sh new file mode 100755 index 000000000000..ff5adf16915a --- /dev/null +++ b/linux/sysroot_scripts/update-archive-timestamp.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script updates sysroot-creator-*.sh with the timestamp of the latest +# snapshot from snapshot.debian.org. + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ARCHIVE_URL="http://snapshot.debian.org/archive/debian" + +# Use 9999-01-01 as the date so that we get a redirect to the page with the +# latest timestamp. +TIMESTAMP=$(curl -s "${ARCHIVE_URL}/99990101T000000Z/pool/" | \ + sed -n "s|.*${ARCHIVE_URL}/\([[:digit:]TZ]\+\)/pool/.*|\1|p" | head -n 1) + +sed -i "s/ARCHIVE_TIMESTAMP=.*$/ARCHIVE_TIMESTAMP=${TIMESTAMP}/" \ + "${SCRIPT_DIR}"/sysroot-creator-*.sh diff --git a/linux/unbundle/README b/linux/unbundle/README new file mode 100644 index 000000000000..b6b6321b3848 --- /dev/null +++ b/linux/unbundle/README @@ -0,0 +1,57 @@ +This directory contains files that make it possible for Linux +distributions to build Chromium using system libraries and exclude the +source code for Chromium's bundled copies of system libraries in a +consistent manner. Nothing here is used in normal developer builds. + + +For more info on the Linux distros' philosophy on bundling system +libraries and why this exists, please read the following: + + - https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries + - https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies + - http://www.debian.org/doc/debian-policy/ch-source.html#s-embeddedfiles + +For more Chromium-specific context please read +http://spot.livejournal.com/312320.html . + +Additional resources which might provide even more context: + + - http://events.linuxfoundation.org/sites/events/files/slides/LinuxCon%202014%20Slides_0.pdf + - https://lwn.net/Articles/619158/ + + +This directory is provided in the source tree so one can follow the +above guidelines without having to download additional tools and worry +about having the right version of the tool. It is a compromise solution +which takes into account Chromium developers who want to avoid the +perceived burden of more conditional code in build files, and +expectations of Open Source community, where using system-provided +libraries is the norm. + +Usage: + +1. remove_bundled_libraries.py + + For example: remove_bundled_libraries.py third_party/zlib + + The script scans sources looking for third_party directories. + Everything that is not explicitly preserved is removed (except for + GYP/GN build files), and the script fails if any directory passed on + command line does not exist (to ensure list is kept up to date). + + This is intended to be used on source code extracted from a tarball, + not on a git repository. + + NOTE: by default this will not remove anything (for safety). Pass + the --do-remove flag to actually remove files. + +2. replace_gn_files.py --system-libraries lib... + + This swaps out a normal library GN build file that is intended for + use with a bundled library for a build file that is set up to use + the system library. While some build files have use_system_libfoo + build flags, using unbundled build files has the advantage that Linux + distros can build Chromium without having to specify many additional + build flags. + + For example: replace_gn_files.py --system-libraries libxml diff --git a/linux/unbundle/absl_algorithm.gn b/linux/unbundle/absl_algorithm.gn new file mode 100644 index 000000000000..cc41c6861728 --- /dev/null +++ b/linux/unbundle/absl_algorithm.gn @@ -0,0 +1,22 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_algorithm_container") { + packages = [ "absl_algorithm_container" ] +} + +shim_headers("container_shim") { + root_path = "." + prefix = "absl/algorithm/" + headers = [ "container.h" ] +} + +source_set("container") { + deps = [ ":container_shim" ] + public_configs = [ ":system_absl_algorithm_container" ] +} + +source_set("algorithm_test") { +} +source_set("container_test") { +} diff --git a/linux/unbundle/absl_base.gn b/linux/unbundle/absl_base.gn new file mode 100644 index 000000000000..d83e9786a315 --- /dev/null +++ b/linux/unbundle/absl_base.gn @@ -0,0 +1,67 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_base") { + packages = [ "absl_base" ] +} + +pkg_config("system_absl_config") { + packages = [ "absl_config" ] +} + +pkg_config("system_absl_core_headers") { + packages = [ "absl_core_headers" ] +} + +shim_headers("base_shim") { + root_path = "." + prefix = "absl/base/" + headers = [ + "call_once.h", + "casts.h", + ] +} + +source_set("base") { + deps = [ ":base_shim" ] + public_configs = [ ":system_absl_base" ] +} + +shim_headers("config_shim") { + root_path = "." + prefix = "absl/base/" + headers = [ + "config.h", + "options.h", + "policy_checks.h", + ] +} + +source_set("config") { + deps = [ ":config_shim" ] + public_configs = [ ":system_absl_config" ] +} + +shim_headers("core_headers_shim") { + root_path = "." + prefix = "absl/base/" + headers = [ + "attributes.h", + "const_init.h", + "macros.h", + "optimization.h", + "port.h", + "thread_annotations.h", + ] +} + +source_set("core_headers") { + deps = [ ":core_headers_shim" ] + public_configs = [ ":system_absl_core_headers" ] +} + +source_set("config_test") { +} + +source_set("prefetch_test") { +} diff --git a/linux/unbundle/absl_cleanup.gn b/linux/unbundle/absl_cleanup.gn new file mode 100644 index 000000000000..61455edf31ff --- /dev/null +++ b/linux/unbundle/absl_cleanup.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_cleanup") { + packages = [ "absl_cleanup" ] +} + +shim_headers("cleanup_shim") { + root_path = "." + prefix = "absl/cleanup/" + headers = [ "cleanup.h" ] +} + +source_set("cleanup") { + deps = [ ":cleanup_shim" ] + public_configs = [ ":system_absl_cleanup" ] +} + +source_set("cleanup_test") { +} diff --git a/linux/unbundle/absl_container.gn b/linux/unbundle/absl_container.gn new file mode 100644 index 000000000000..4e9796a83b4a --- /dev/null +++ b/linux/unbundle/absl_container.gn @@ -0,0 +1,119 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_btree") { + packages = [ "absl_btree" ] +} + +pkg_config("system_absl_fixed_array") { + packages = [ "absl_fixed_array" ] +} + +pkg_config("system_absl_flat_hash_map") { + packages = [ "absl_flat_hash_map" ] +} + +pkg_config("system_absl_flat_hash_set") { + packages = [ "absl_flat_hash_set" ] +} + +pkg_config("system_absl_inlined_vector") { + packages = [ "absl_inlined_vector" ] +} + +pkg_config("system_absl_node_hash_map") { + packages = [ "absl_node_hash_map" ] +} + +pkg_config("system_absl_node_hash_set") { + packages = [ "absl_node_hash_set" ] +} + +shim_headers("btree_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ + "btree_map.h", + "btree_set.h", + ] +} + +source_set("btree") { + deps = [ ":btree_shim" ] + public_configs = [ ":system_absl_btree" ] +} + +shim_headers("fixed_array_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "fixed_array.h" ] +} + +source_set("fixed_array") { + deps = [ ":fixed_array_shim" ] + public_configs = [ ":system_absl_fixed_array" ] +} + +shim_headers("flat_hash_map_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "flat_hash_map.h" ] +} + +source_set("flat_hash_map") { + deps = [ ":flat_hash_map_shim" ] + public_configs = [ ":system_absl_flat_hash_map" ] +} + +shim_headers("flat_hash_set_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "flat_hash_set.h" ] +} + +source_set("flat_hash_set") { + deps = [ ":flat_hash_set_shim" ] + public_configs = [ ":system_absl_flat_hash_set" ] +} + +shim_headers("inlined_vector_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "inlined_vector.h" ] +} + +source_set("inlined_vector") { + deps = [ ":inlined_vector_shim" ] + public_configs = [ ":system_absl_inlined_vector" ] +} + +shim_headers("node_hash_map_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "node_hash_map.h" ] +} + +source_set("node_hash_map") { + deps = [ ":node_hash_map_shim" ] + public_configs = [ ":system_absl_node_hash_map" ] +} + +shim_headers("node_hash_set_shim") { + root_path = "." + prefix = "absl/container/" + headers = [ "node_hash_set.h" ] +} + +source_set("node_hash_set") { + deps = [ ":node_hash_set_shim" ] + public_configs = [ ":system_absl_node_hash_set" ] +} + +source_set("common_policy_traits_test") { +} +source_set("inlined_vector_test") { +} +source_set("node_slot_policy_test") { +} +source_set("sample_element_size_test") { +} diff --git a/linux/unbundle/absl_debugging.gn b/linux/unbundle/absl_debugging.gn new file mode 100644 index 000000000000..2c38e4357e16 --- /dev/null +++ b/linux/unbundle/absl_debugging.gn @@ -0,0 +1,47 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_failure_signal_handler") { + packages = [ "absl_failure_signal_handler" ] +} + +pkg_config("system_absl_stacktrace") { + packages = [ "absl_stacktrace" ] +} + +pkg_config("system_absl_symbolize") { + packages = [ "absl_symbolize" ] +} + +shim_headers("failure_signal_handler_shim") { + root_path = "." + prefix = "absl/debugging/" + headers = [ "failure_signal_handler.h" ] +} + +source_set("failure_signal_handler") { + deps = [ ":failure_signal_handler_shim" ] + public_configs = [ ":system_absl_failure_signal_handler" ] +} + +shim_headers("stacktrace_shim") { + root_path = "." + prefix = "absl/debugging/" + headers = [ "stacktrace.h" ] +} + +source_set("stacktrace") { + deps = [ ":stacktrace_shim" ] + public_configs = [ ":system_absl_stacktrace" ] +} + +shim_headers("symbolize_shim") { + root_path = "." + prefix = "absl/debugging/" + headers = [ "symbolize.h" ] +} + +source_set("symbolize") { + deps = [ ":symbolize_shim" ] + public_configs = [ ":system_absl_symbolize" ] +} diff --git a/linux/unbundle/absl_flags.gn b/linux/unbundle/absl_flags.gn new file mode 100644 index 000000000000..e420603fc860 --- /dev/null +++ b/linux/unbundle/absl_flags.gn @@ -0,0 +1,50 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_flags") { + packages = [ "absl_flags" ] +} + +pkg_config("system_absl_flags_parse") { + packages = [ "absl_flags_parse" ] +} + +pkg_config("system_absl_flags_usage") { + packages = [ "absl_flags_usage" ] +} + +shim_headers("flag_shim") { + root_path = "." + prefix = "absl/flags/" + headers = [ + "declare.h", + "flag.h", + ] +} + +source_set("flag") { + deps = [ ":flag_shim" ] + public_configs = [ ":system_absl_flags" ] +} + +shim_headers("parse_shim") { + root_path = "." + prefix = "absl/flags/" + headers = [ "parse.h" ] +} + +source_set("parse") { + deps = [ ":parse_shim" ] + public_configs = [ ":system_absl_flags_parse" ] +} + +shim_headers("usage_shim") { + root_path = "." + prefix = "absl/flags/" + headers = [ "usage.h" ] +} + +source_set("usage") { + deps = [ ":usage_shim" ] + public_configs = [ ":system_absl_flags_usage" ] +} diff --git a/linux/unbundle/absl_functional.gn b/linux/unbundle/absl_functional.gn new file mode 100644 index 000000000000..1719587a9b7a --- /dev/null +++ b/linux/unbundle/absl_functional.gn @@ -0,0 +1,49 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_any_invocable") { + packages = [ "absl_any_invocable" ] +} + +shim_headers("any_invocable_shim") { + root_path = "." + prefix = "absl/functional/" + headers = [ "any_invocable.h" ] +} + +source_set("any_invocable") { + deps = [ ":any_invocable_shim" ] + public_configs = [ ":system_absl_any_invocable" ] +} + +pkg_config("system_absl_bind_front") { + packages = [ "absl_bind_front" ] +} + +shim_headers("bind_front_shim") { + root_path = "." + prefix = "absl/functional/" + headers = [ "bind_front.h" ] +} + +source_set("bind_front") { + deps = [ ":bind_front_shim" ] + public_configs = [ ":system_absl_bind_front" ] +} + +pkg_config("system_absl_function_ref") { + packages = [ "absl_function_ref" ] +} + +shim_headers("function_ref_shim") { + root_path = "." + prefix = "absl/functional/" + headers = [ "function_ref.h" ] +} + +source_set("function_ref") { + deps = [ ":function_ref_shim" ] + public_configs = [ ":system_absl_function_ref" ] +} + +source_set("any_invocable_test") {} diff --git a/linux/unbundle/absl_hash.gn b/linux/unbundle/absl_hash.gn new file mode 100644 index 000000000000..cb07851c8b16 --- /dev/null +++ b/linux/unbundle/absl_hash.gn @@ -0,0 +1,22 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_hash") { + packages = [ "absl_hash" ] +} + +shim_headers("hash_shim") { + root_path = "." + prefix = "absl/hash/" + headers = [ "hash.h" ] +} + +source_set("hash") { + deps = [ ":hash_shim" ] + public_configs = [ ":system_absl_hash" ] +} + +source_set("hash_test") { +} +source_set("low_level_hash_test") { +} diff --git a/linux/unbundle/absl_log.gn b/linux/unbundle/absl_log.gn new file mode 100644 index 000000000000..85a09a068348 --- /dev/null +++ b/linux/unbundle/absl_log.gn @@ -0,0 +1,13 @@ +source_set("basic_log_test") {} +source_set("check_test") {} +source_set("die_if_null_test") {} +source_set("flags_test") {} +source_set("globals_test") {} +source_set("log_entry_test") {} +source_set("log_format_test") {} +source_set("log_macro_hygiene_test") {} +source_set("log_modifier_methods_test") {} +source_set("log_sink_test") {} +source_set("log_streamer_test") {} +source_set("scoped_mock_log_test") {} +source_set("stripping_test") {} diff --git a/linux/unbundle/absl_log_internal.gn b/linux/unbundle/absl_log_internal.gn new file mode 100644 index 000000000000..f58c7f861389 --- /dev/null +++ b/linux/unbundle/absl_log_internal.gn @@ -0,0 +1 @@ +source_set("stderr_log_sink_test") {} diff --git a/linux/unbundle/absl_memory.gn b/linux/unbundle/absl_memory.gn new file mode 100644 index 000000000000..5d6abe87f98f --- /dev/null +++ b/linux/unbundle/absl_memory.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_memory") { + packages = [ "absl_memory" ] +} + +shim_headers("memory_shim") { + root_path = "." + prefix = "absl/memory/" + headers = [ "memory.h" ] +} + +source_set("memory") { + deps = [ ":memory_shim" ] + public_configs = [ ":system_absl_memory" ] +} + +source_set("memory_test") { +} diff --git a/linux/unbundle/absl_meta.gn b/linux/unbundle/absl_meta.gn new file mode 100644 index 000000000000..7f79a06ccb0f --- /dev/null +++ b/linux/unbundle/absl_meta.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_type_traits") { + packages = [ "absl_type_traits" ] +} + +shim_headers("type_traits_shim") { + root_path = "." + prefix = "absl/meta/" + headers = [ "type_traits.h" ] +} + +source_set("type_traits") { + deps = [ ":type_traits_shim" ] + public_configs = [ ":system_absl_type_traits" ] +} + +source_set("type_traits_test") { +} diff --git a/linux/unbundle/absl_numeric.gn b/linux/unbundle/absl_numeric.gn new file mode 100644 index 000000000000..c3688f9f2ae5 --- /dev/null +++ b/linux/unbundle/absl_numeric.gn @@ -0,0 +1,32 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_bits") { + packages = [ "absl_bits" ] +} + +pkg_config("system_absl_int128") { + packages = [ "absl_int128" ] +} + +shim_headers("bits_shim") { + root_path = "." + prefix = "absl/numeric/" + headers = [ "bits.h" ] +} + +source_set("bits") { + deps = [ ":bits_shim" ] + public_configs = [ ":system_absl_bits" ] +} + +shim_headers("int128_shim") { + root_path = "." + prefix = "absl/numeric/" + headers = [ "int128.h" ] +} + +source_set("int128") { + deps = [ ":int128_shim" ] + public_configs = [ ":system_absl_int128" ] +} diff --git a/linux/unbundle/absl_random.gn b/linux/unbundle/absl_random.gn new file mode 100644 index 000000000000..e52c9fcd961b --- /dev/null +++ b/linux/unbundle/absl_random.gn @@ -0,0 +1,17 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_random_random") { + packages = [ "absl_random_random" ] +} + +shim_headers("random_shim") { + root_path = "." + prefix = "absl/random/" + headers = [ "random.h" ] +} + +source_set("random") { + deps = [ ":random_shim" ] + public_configs = [ ":system_absl_random_random" ] +} diff --git a/linux/unbundle/absl_status.gn b/linux/unbundle/absl_status.gn new file mode 100644 index 000000000000..b7f40b0bf09e --- /dev/null +++ b/linux/unbundle/absl_status.gn @@ -0,0 +1,38 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_status") { + packages = [ "absl_status" ] +} + +pkg_config("system_absl_statusor") { + packages = [ "absl_statusor" ] +} + +shim_headers("status_shim") { + root_path = "." + prefix = "absl/status/" + headers = [ + "status.h", + "status_payload_printer.h", + ] +} + +source_set("status") { + deps = [ ":status_shim" ] + public_configs = [ ":system_absl_status" ] +} + +shim_headers("statusor_shim") { + root_path = "." + prefix = "absl/status/" + headers = [ "statusor.h" ] +} + +source_set("statusor") { + deps = [ ":statusor_shim" ] + public_configs = [ ":system_absl_statusor" ] +} + +source_set("statusor_test") { +} diff --git a/linux/unbundle/absl_strings.gn b/linux/unbundle/absl_strings.gn new file mode 100644 index 000000000000..f06a19bc11ca --- /dev/null +++ b/linux/unbundle/absl_strings.gn @@ -0,0 +1,93 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_cord") { + packages = [ "absl_cord" ] +} + +pkg_config("system_absl_strings") { + packages = [ "absl_strings" ] +} + +pkg_config("system_absl_str_format") { + packages = [ "absl_str_format" ] +} + +shim_headers("cord_shim") { + root_path = "." + prefix = "absl/strings/" + headers = [ "cord.h" ] +} + +source_set("cord") { + deps = [ ":cord_shim" ] + public_configs = [ ":system_absl_cord" ] +} + +shim_headers("strings_shim") { + root_path = "." + prefix = "absl/strings/" + headers = [ + "ascii.h", + "charconv.h", + "escaping.h", + "match.h", + "numbers.h", + "str_cat.h", + "str_join.h", + "str_replace.h", + "str_split.h", + "string_view.h", + "strip.h", + "substitute.h", + ] +} + +source_set("strings") { + deps = [ ":strings_shim" ] + public_configs = [ ":system_absl_strings" ] +} + +shim_headers("str_format_shim") { + root_path = "." + prefix = "absl/strings/" + headers = [ "str_format.h" ] +} + +source_set("str_format") { + deps = [ ":str_format_shim" ] + public_configs = [ ":system_absl_str_format" ] +} + +source_set("ascii_test") { +} +source_set("cord_buffer_test") { +} +source_set("cord_data_edge_test") { +} +source_set("cord_rep_btree_navigator_test") { +} +source_set("cord_rep_btree_reader_test") { +} +source_set("cord_rep_btree_test") { +} +source_set("cord_rep_crc_test") { +} +source_set("cordz_functions_test") { +} +source_set("cordz_info_statistics_test") { +} +source_set("cordz_info_test") { +} +source_set("cordz_test") { +} +source_set("cordz_update_scope_test") { +} +source_set("cordz_update_tracker_test") { +} +source_set("match_test") { +} +source_set("str_replace_test") { +} +source_set("string_view_test") { +} diff --git a/linux/unbundle/absl_synchronization.gn b/linux/unbundle/absl_synchronization.gn new file mode 100644 index 000000000000..60bcf94213fd --- /dev/null +++ b/linux/unbundle/absl_synchronization.gn @@ -0,0 +1,22 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_synchronization") { + packages = [ "absl_synchronization" ] +} + +shim_headers("synchronization_shim") { + root_path = "." + prefix = "absl/synchronization/" + headers = [ + "barrier.h", + "blocking_counter.h", + "mutex.h", + "notification.h", + ] +} + +source_set("synchronization") { + deps = [ ":synchronization_shim" ] + public_configs = [ ":system_absl_synchronization" ] +} diff --git a/linux/unbundle/absl_time.gn b/linux/unbundle/absl_time.gn new file mode 100644 index 000000000000..df5cd20f4995 --- /dev/null +++ b/linux/unbundle/absl_time.gn @@ -0,0 +1,21 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_time") { + packages = [ "absl_time" ] +} + +shim_headers("time_shim") { + root_path = "." + prefix = "absl/time/" + headers = [ + "civil_time.h", + "clock.h", + "time.h", + ] +} + +source_set("time") { + deps = [ ":time_shim" ] + public_configs = [ ":system_absl_time" ] +} diff --git a/linux/unbundle/absl_types.gn b/linux/unbundle/absl_types.gn new file mode 100644 index 000000000000..4bb77f1b631c --- /dev/null +++ b/linux/unbundle/absl_types.gn @@ -0,0 +1,97 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_any") { + packages = [ "absl_any" ] +} + +pkg_config("system_absl_bad_any_cast") { + packages = [ "absl_bad_any_cast" ] +} + +pkg_config("system_absl_bad_optional_access") { + packages = [ "absl_bad_optional_access" ] +} + +pkg_config("system_absl_optional") { + packages = [ "absl_optional" ] +} + +pkg_config("system_absl_span") { + packages = [ "absl_span" ] +} + +pkg_config("system_absl_variant") { + packages = [ "absl_variant" ] +} + +shim_headers("any_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "any.h" ] +} + +source_set("any") { + deps = [ ":any_shim" ] + public_configs = [ ":system_absl_any" ] +} + +shim_headers("bad_any_cast_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "bad_any_cast.h" ] +} + +source_set("bad_any_cast") { + deps = [ ":bad_any_cast_shim" ] + public_configs = [ ":system_absl_bad_any_cast" ] +} + +shim_headers("bad_optional_access_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "bad_optional_access.h" ] +} + +source_set("bad_optional_access") { + deps = [ ":bad_optional_access_shim" ] + public_configs = [ ":system_absl_bad_optional_access" ] +} + +shim_headers("optional_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "optional.h" ] +} + +source_set("optional") { + deps = [ ":optional_shim" ] + public_configs = [ ":system_absl_optional" ] +} + +shim_headers("span_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "span.h" ] +} + +source_set("span") { + deps = [ ":span_shim" ] + public_configs = [ ":system_absl_span" ] +} + +shim_headers("variant_shim") { + root_path = "." + prefix = "absl/types/" + headers = [ "variant.h" ] +} + +source_set("variant") { + deps = [ ":variant_shim" ] + public_configs = [ ":system_absl_variant" ] +} + +source_set("optional_test") { +} +source_set("variant_test") { +} diff --git a/linux/unbundle/absl_utility.gn b/linux/unbundle/absl_utility.gn new file mode 100644 index 000000000000..8dda6ed01afa --- /dev/null +++ b/linux/unbundle/absl_utility.gn @@ -0,0 +1,17 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_absl_utility") { + packages = [ "absl_utility" ] +} + +shim_headers("utility_shim") { + root_path = "." + prefix = "absl/utility/" + headers = [ "utility.h" ] +} + +source_set("utility") { + deps = [ ":utility_shim" ] + public_configs = [ ":system_absl_utility" ] +} diff --git a/linux/unbundle/brotli.gn b/linux/unbundle/brotli.gn new file mode 100644 index 000000000000..09f55d1ada19 --- /dev/null +++ b/linux/unbundle/brotli.gn @@ -0,0 +1,35 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_brotli_dec") { + packages = [ "libbrotlidec" ] +} + +pkg_config("system_brotli_enc") { + packages = [ "libbrotlienc" ] +} + +shim_headers("brotli_shim") { + root_path = "include" + headers = [ + "brotli/decode.h", + "brotli/encode.h", + "brotli/port.h", + "brotli/types.h", + ] +} + +source_set("dec") { + deps = [ ":brotli_shim" ] + public_configs = [ ":system_brotli_dec" ] +} + +source_set("enc") { + deps = [ ":brotli_shim" ] + public_configs = [ ":system_brotli_enc" ] +} + +copy("brotli") { + sources = [ "/usr/bin/brotli" ] + outputs = [ "$root_out_dir/brotli" ] +} diff --git a/linux/unbundle/crc32c.gn b/linux/unbundle/crc32c.gn new file mode 100644 index 000000000000..23f2292d9d49 --- /dev/null +++ b/linux/unbundle/crc32c.gn @@ -0,0 +1,11 @@ +import("//build/shim_headers.gni") + +shim_headers("crc32c_shim") { + root_path = "src/include" + headers = [ "crc32c/crc32c.h" ] +} + +source_set("crc32c") { + deps = [ ":crc32c_shim" ] + libs = [ "crc32c" ] +} diff --git a/linux/unbundle/dav1d.gn b/linux/unbundle/dav1d.gn new file mode 100644 index 000000000000..3d65158bc614 --- /dev/null +++ b/linux/unbundle/dav1d.gn @@ -0,0 +1,23 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_dav1d") { + packages = [ "dav1d" ] +} + +shim_headers("dav1d_shim") { + root_path = "libdav1d/include" + headers = [ + "dav1d/common.h", + "dav1d/data.h", + "dav1d/dav1d.h", + "dav1d/headers.h", + "dav1d/picture.h", + "dav1d/version.h", + ] +} + +source_set("dav1d") { + deps = [ ":dav1d_shim" ] + public_configs = [ ":system_dav1d" ] +} diff --git a/linux/unbundle/double-conversion.gn b/linux/unbundle/double-conversion.gn new file mode 100644 index 000000000000..8f970c5a2d5c --- /dev/null +++ b/linux/unbundle/double-conversion.gn @@ -0,0 +1,23 @@ +import("//build/shim_headers.gni") + +shim_headers("double_conversion_shim") { + root_path = "." + headers = [ + "double-conversion/bignum.h", + "double-conversion/cached-powers.h", + "double-conversion/diy-fp.h", + "double-conversion/double-conversion.h", + "double-conversion/double-to-string.h", + "double-conversion/fast-dtoa.h", + "double-conversion/fixed-dtoa.h", + "double-conversion/ieee.h", + "double-conversion/string-to-double.h", + "double-conversion/strtod.h", + "double-conversion/utils.h", + ] +} + +source_set("double_conversion") { + deps = [ ":double_conversion_shim" ] + libs = [ "double-conversion" ] +} diff --git a/linux/unbundle/ffmpeg.gn b/linux/unbundle/ffmpeg.gn new file mode 100644 index 000000000000..0008275dade8 --- /dev/null +++ b/linux/unbundle/ffmpeg.gn @@ -0,0 +1,38 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_ffmpeg") { + packages = [ + "libavcodec", + "libavformat", + "libavutil", + ] +} + +buildflag_header("ffmpeg_features") { + header = "ffmpeg_features.h" + flags = [ "USE_SYSTEM_FFMPEG=true" ] +} + +shim_headers("ffmpeg_shim") { + root_path = "." + headers = [ + "libavcodec/avcodec.h", + "libavcodec/packet.h", + "libavformat/avformat.h", + "libavutil/imgutils.h", + ] +} + +source_set("ffmpeg") { + deps = [ + ":ffmpeg_features", + ":ffmpeg_shim", + ] + public_configs = [ ":system_ffmpeg" ] +} diff --git a/linux/unbundle/flac.gn b/linux/unbundle/flac.gn new file mode 100644 index 000000000000..ce9a1b3778bb --- /dev/null +++ b/linux/unbundle/flac.gn @@ -0,0 +1,30 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_flac") { + packages = [ "flac" ] +} + +shim_headers("flac_shim") { + root_path = "include" + headers = [ + "FLAC/all.h", + "FLAC/assert.h", + "FLAC/callback.h", + "FLAC/export.h", + "FLAC/format.h", + "FLAC/metadata.h", + "FLAC/ordinals.h", + "FLAC/stream_decoder.h", + "FLAC/stream_encoder.h", + ] +} + +source_set("flac") { + deps = [ ":flac_shim" ] + public_configs = [ ":system_flac" ] +} diff --git a/linux/unbundle/fontconfig.gn b/linux/unbundle/fontconfig.gn new file mode 100644 index 000000000000..87926d59fdbc --- /dev/null +++ b/linux/unbundle/fontconfig.gn @@ -0,0 +1,13 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_linux || is_chromeos) + +config("fontconfig_config") { + libs = [ "fontconfig" ] +} + +group("fontconfig") { + public_configs = [ ":fontconfig_config" ] +} diff --git a/linux/unbundle/freetype.gn b/linux/unbundle/freetype.gn new file mode 100644 index 000000000000..73f9666833e5 --- /dev/null +++ b/linux/unbundle/freetype.gn @@ -0,0 +1,14 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Blink needs a recent and properly build-configured FreeType version to + # support OpenType variations, color emoji and avoid security bugs. By default + # we ship and link such a version as part of Chrome. For distributions that + # prefer to keep linking to the version the system, FreeType must be newer + # than version 2.7.1 and have color bitmap support compiled in. WARNING: + # System FreeType configurations other than as described WILL INTRODUCE TEXT + # RENDERING AND SECURITY REGRESSIONS. + use_system_freetype = true +} diff --git a/linux/unbundle/harfbuzz-ng.gn b/linux/unbundle/harfbuzz-ng.gn new file mode 100644 index 000000000000..604272d937ae --- /dev/null +++ b/linux/unbundle/harfbuzz-ng.gn @@ -0,0 +1,13 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Blink uses a cutting-edge version of Harfbuzz (version listed in + # third_party/harfbuzz-ng/README.chromium); most Linux distros do not contain + # a new enough version of the code to work correctly. However, ChromeOS + # chroots (i.e. real ChromeOS builds for devices) do contain a new enough + # version of the library, and so this variable exists so that ChromeOS can + # build against the system lib and keep binary sizes smaller. + use_system_harfbuzz = true +} diff --git a/linux/unbundle/icu.gn b/linux/unbundle/icu.gn new file mode 100644 index 000000000000..366ffd5a3597 --- /dev/null +++ b/linux/unbundle/icu.gn @@ -0,0 +1,254 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +group("icu") { + public_deps = [ + ":icui18n", + ":icuuc", + ] +} + +config("icu_config") { + defines = [ + "USING_SYSTEM_ICU=1", + "ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC", + + # U_EXPORT (defined in unicode/platform.h) is used to set public visibility + # on classes through the U_COMMON_API and U_I18N_API macros (among others). + # When linking against the system ICU library, we want its symbols to have + # public LTO visibility. This disables CFI checks for the ICU classes and + # allows whole-program optimization to be applied to the rest of Chromium. + # + # Both U_COMMON_API and U_I18N_API macros would be defined to U_EXPORT only + # when U_COMBINED_IMPLEMENTATION is defined (see unicode/utypes.h). Because + # we override the default system UCHAR_TYPE (char16_t), it is not possible + # to use U_COMBINED_IMPLEMENTATION at this moment, meaning the U_COMMON_API + # and U_I18N_API macros are set to U_IMPORT which is an empty definition. + # + # Until building with UCHAR_TYPE=char16_t is supported, one way to apply + # public visibility (and thus public LTO visibility) to all ICU classes is + # to define U_IMPORT to have the same value as U_EXPORT. For more details, + # please see: https://crbug.com/822820 + "U_IMPORT=U_EXPORT", + ] +} + +pkg_config("system_icui18n") { + packages = [ "icu-i18n" ] +} + +pkg_config("system_icuuc") { + packages = [ "icu-uc" ] +} + +source_set("icui18n") { + deps = [ ":icui18n_shim" ] + public_configs = [ + ":icu_config", + ":system_icui18n", + ] +} + +source_set("icuuc") { + deps = [ ":icuuc_shim" ] + public_configs = [ + ":icu_config", + ":system_icuuc", + ] +} + +shim_headers("icui18n_shim") { + root_path = "source/i18n" + headers = [ + # This list can easily be updated using the commands below: + # cd third_party/icu/source/i18n + # find unicode -iname '*.h' -printf ' "%p",\n' | LC_ALL=C sort -u + "unicode/alphaindex.h", + "unicode/basictz.h", + "unicode/calendar.h", + "unicode/choicfmt.h", + "unicode/coleitr.h", + "unicode/coll.h", + "unicode/compactdecimalformat.h", + "unicode/curramt.h", + "unicode/currpinf.h", + "unicode/currunit.h", + "unicode/datefmt.h", + "unicode/dcfmtsym.h", + "unicode/decimfmt.h", + "unicode/dtfmtsym.h", + "unicode/dtitvfmt.h", + "unicode/dtitvinf.h", + "unicode/dtptngen.h", + "unicode/dtrule.h", + "unicode/fieldpos.h", + "unicode/fmtable.h", + "unicode/format.h", + "unicode/fpositer.h", + "unicode/gender.h", + "unicode/gregocal.h", + "unicode/listformatter.h", + "unicode/measfmt.h", + "unicode/measunit.h", + "unicode/measure.h", + "unicode/msgfmt.h", + "unicode/numfmt.h", + "unicode/numsys.h", + "unicode/plurfmt.h", + "unicode/plurrule.h", + "unicode/rbnf.h", + "unicode/rbtz.h", + "unicode/regex.h", + "unicode/region.h", + "unicode/reldatefmt.h", + "unicode/scientificnumberformatter.h", + "unicode/search.h", + "unicode/selfmt.h", + "unicode/simpletz.h", + "unicode/smpdtfmt.h", + "unicode/sortkey.h", + "unicode/stsearch.h", + "unicode/tblcoll.h", + "unicode/timezone.h", + "unicode/tmunit.h", + "unicode/tmutamt.h", + "unicode/tmutfmt.h", + "unicode/translit.h", + "unicode/tzfmt.h", + "unicode/tznames.h", + "unicode/tzrule.h", + "unicode/tztrans.h", + "unicode/ucal.h", + "unicode/ucol.h", + "unicode/ucoleitr.h", + "unicode/ucsdet.h", + "unicode/udat.h", + "unicode/udateintervalformat.h", + "unicode/udatpg.h", + "unicode/ufieldpositer.h", + "unicode/uformattable.h", + "unicode/ugender.h", + "unicode/ulocdata.h", + "unicode/umsg.h", + "unicode/unirepl.h", + "unicode/unum.h", + "unicode/unumsys.h", + "unicode/upluralrules.h", + "unicode/uregex.h", + "unicode/uregion.h", + "unicode/ureldatefmt.h", + "unicode/usearch.h", + "unicode/uspoof.h", + "unicode/utmscale.h", + "unicode/utrans.h", + "unicode/vtzone.h", + ] +} + +shim_headers("icuuc_shim") { + root_path = "source/common" + headers = [ + # This list can easily be updated using the commands below: + # cd third_party/icu/source/common + # find unicode -iname '*.h' -printf ' "%p",\n' | LC_ALL=C sort -u + "unicode/appendable.h", + "unicode/brkiter.h", + "unicode/bytestream.h", + "unicode/bytestrie.h", + "unicode/bytestriebuilder.h", + "unicode/caniter.h", + "unicode/casemap.h", + "unicode/char16ptr.h", + "unicode/chariter.h", + "unicode/dbbi.h", + "unicode/docmain.h", + "unicode/dtintrv.h", + "unicode/edits.h", + "unicode/enumset.h", + "unicode/errorcode.h", + "unicode/filteredbrk.h", + "unicode/icudataver.h", + "unicode/icuplug.h", + "unicode/idna.h", + "unicode/localematcher.h", + "unicode/localpointer.h", + "unicode/locdspnm.h", + "unicode/locid.h", + "unicode/messagepattern.h", + "unicode/normalizer2.h", + "unicode/normlzr.h", + "unicode/parseerr.h", + "unicode/parsepos.h", + "unicode/platform.h", + "unicode/ptypes.h", + "unicode/putil.h", + "unicode/rbbi.h", + "unicode/rep.h", + "unicode/resbund.h", + "unicode/schriter.h", + "unicode/simpleformatter.h", + "unicode/std_string.h", + "unicode/strenum.h", + "unicode/stringpiece.h", + "unicode/stringtriebuilder.h", + "unicode/symtable.h", + "unicode/ubidi.h", + "unicode/ubiditransform.h", + "unicode/ubrk.h", + "unicode/ucasemap.h", + "unicode/ucat.h", + "unicode/uchar.h", + "unicode/ucharstrie.h", + "unicode/ucharstriebuilder.h", + "unicode/uchriter.h", + "unicode/uclean.h", + "unicode/ucnv.h", + "unicode/ucnv_cb.h", + "unicode/ucnv_err.h", + "unicode/ucnvsel.h", + "unicode/uconfig.h", + "unicode/ucurr.h", + "unicode/udata.h", + "unicode/udisplaycontext.h", + "unicode/uenum.h", + "unicode/uidna.h", + "unicode/uiter.h", + "unicode/uldnames.h", + "unicode/ulistformatter.h", + "unicode/uloc.h", + "unicode/umachine.h", + "unicode/umisc.h", + "unicode/unifilt.h", + "unicode/unifunct.h", + "unicode/unimatch.h", + "unicode/uniset.h", + "unicode/unistr.h", + "unicode/unorm.h", + "unicode/unorm2.h", + "unicode/uobject.h", + "unicode/urename.h", + "unicode/urep.h", + "unicode/ures.h", + "unicode/uscript.h", + "unicode/uset.h", + "unicode/usetiter.h", + "unicode/ushape.h", + "unicode/usprep.h", + "unicode/ustring.h", + "unicode/ustringtrie.h", + "unicode/utext.h", + "unicode/utf.h", + "unicode/utf16.h", + "unicode/utf32.h", + "unicode/utf8.h", + "unicode/utf_old.h", + "unicode/utrace.h", + "unicode/utypes.h", + "unicode/uvernum.h", + "unicode/uversion.h", + ] +} diff --git a/linux/unbundle/jsoncpp.gn b/linux/unbundle/jsoncpp.gn new file mode 100644 index 000000000000..e84a0ef27a04 --- /dev/null +++ b/linux/unbundle/jsoncpp.gn @@ -0,0 +1,32 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("jsoncpp_config") { + packages = [ "jsoncpp" ] + + # Defining JSON_DLL_BUILD applies public visibility to jsoncpp classes + # thus deactivating CFI checks for them. This avoids CFI violations in + # virtual calls to system jsoncpp library (https://crbug.com/1365218). + defines = [ "JSON_DLL_BUILD" ] +} + +shim_headers("jsoncpp_shim") { + root_path = "source/include" + headers = [ + "json/allocator.h", + "json/assertions.h", + "json/config.h", + "json/forwards.h", + "json/json.h", + "json/json_features.h", + "json/reader.h", + "json/value.h", + "json/version.h", + "json/writer.h", + ] +} + +source_set("jsoncpp") { + deps = [ ":jsoncpp_shim" ] + public_configs = [ ":jsoncpp_config" ] +} diff --git a/linux/unbundle/libXNVCtrl.gn b/linux/unbundle/libXNVCtrl.gn new file mode 100644 index 000000000000..0e1265b8c430 --- /dev/null +++ b/linux/unbundle/libXNVCtrl.gn @@ -0,0 +1,19 @@ +import("//build/shim_headers.gni") + +shim_headers("libXNVCtrl_shim") { + root_path = "../../../../../third_party/libXNVCtrl" + prefix = "NVCtrl/" + headers = [ + "NVCtrl.h", + "NVCtrlLib.h", + "nv_control.h", + ] +} + +source_set("libXNVCtrl") { + deps = [ ":libXNVCtrl_shim" ] + libs = [ + "XNVCtrl", + "xcb", + ] +} diff --git a/linux/unbundle/libaom.gn b/linux/unbundle/libaom.gn new file mode 100644 index 000000000000..dab8dfaba753 --- /dev/null +++ b/linux/unbundle/libaom.gn @@ -0,0 +1,34 @@ +import("//build/buildflag_header.gni") +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") +import("//third_party/libaom/options.gni") + +buildflag_header("libaom_buildflags") { + header = "libaom_buildflags.h" + flags = [ "ENABLE_LIBAOM=$enable_libaom" ] +} + +pkg_config("system_aom") { + packages = [ "aom" ] +} + +shim_headers("aom_shim") { + root_path = "source/libaom" + headers = [ + "aom/aom.h", + "aom/aom_codec.h", + "aom/aom_decoder.h", + "aom/aom_encoder.h", + "aom/aom_external_partition.h", + "aom/aom_frame_buffer.h", + "aom/aom_image.h", + "aom/aom_integer.h", + "aom/aomcx.h", + "aom/aomdx.h", + ] +} + +source_set("libaom") { + deps = [ ":aom_shim" ] + public_configs = [ ":system_aom" ] +} diff --git a/linux/unbundle/libavif.gn b/linux/unbundle/libavif.gn new file mode 100644 index 000000000000..c79f95ba45ee --- /dev/null +++ b/linux/unbundle/libavif.gn @@ -0,0 +1,16 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_libavif") { + packages = [ "libavif" ] +} + +shim_headers("avif_shim") { + root_path = "src/include" + headers = [ "avif/avif.h" ] +} + +source_set("libavif") { + deps = [ ":avif_shim" ] + public_configs = [ ":system_libavif" ] +} diff --git a/linux/unbundle/libdrm.gn b/linux/unbundle/libdrm.gn new file mode 100644 index 000000000000..d461b8ca5780 --- /dev/null +++ b/linux/unbundle/libdrm.gn @@ -0,0 +1,20 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_libdrm") { + packages = [ "libdrm" ] +} + +shim_headers("libdrm_shim") { + root_path = "src/include" + headers = [ "drm.h" ] +} + +source_set("libdrm") { + deps = [ ":libdrm_shim" ] + public_configs = [ ":system_libdrm" ] +} diff --git a/linux/unbundle/libevent.gn b/linux/unbundle/libevent.gn new file mode 100644 index 000000000000..2eb6d0223805 --- /dev/null +++ b/linux/unbundle/libevent.gn @@ -0,0 +1,15 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/shim_headers.gni") + +shim_headers("libevent_shim") { + root_path = "." + headers = [ "event.h" ] +} + +source_set("libevent") { + deps = [ ":libevent_shim" ] + libs = [ "event" ] +} diff --git a/linux/unbundle/libjpeg.gn b/linux/unbundle/libjpeg.gn new file mode 100644 index 000000000000..3236fb802c44 --- /dev/null +++ b/linux/unbundle/libjpeg.gn @@ -0,0 +1,12 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +declare_args() { + # Uses system libjpeg. If true, overrides use_libjpeg_turbo. + use_system_libjpeg = true + + # Uses libjpeg_turbo as the jpeg implementation. Has no effect if + # use_system_libjpeg is set. + use_libjpeg_turbo = true +} diff --git a/linux/unbundle/libpng.gn b/linux/unbundle/libpng.gn new file mode 100644 index 000000000000..91e0ee45da63 --- /dev/null +++ b/linux/unbundle/libpng.gn @@ -0,0 +1,23 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("libpng_config") { + packages = [ "libpng" ] +} + +shim_headers("libpng_shim") { + root_path = "." + headers = [ + "png.h", + "pngconf.h", + ] +} + +source_set("libpng") { + deps = [ ":libpng_shim" ] + public_configs = [ ":libpng_config" ] +} diff --git a/linux/unbundle/libvpx.gn b/linux/unbundle/libvpx.gn new file mode 100644 index 000000000000..8b7bb5e7c1a9 --- /dev/null +++ b/linux/unbundle/libvpx.gn @@ -0,0 +1,32 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_libvpx") { + packages = [ "vpx" ] +} + +shim_headers("libvpx_shim") { + root_path = "source/libvpx" + headers = [ + "vpx/vp8.h", + "vpx/vp8cx.h", + "vpx/vp8dx.h", + "vpx/vpx_codec.h", + "vpx/vpx_codec_impl_bottom.h", + "vpx/vpx_codec_impl_top.h", + "vpx/vpx_decoder.h", + "vpx/vpx_encoder.h", + "vpx/vpx_frame_buffer.h", + "vpx/vpx_image.h", + "vpx/vpx_integer.h", + ] +} + +source_set("libvpx") { + deps = [ ":libvpx_shim" ] + public_configs = [ ":system_libvpx" ] +} diff --git a/linux/unbundle/libwebp.gn b/linux/unbundle/libwebp.gn new file mode 100644 index 000000000000..708cc9c2bb8e --- /dev/null +++ b/linux/unbundle/libwebp.gn @@ -0,0 +1,35 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_libwebp") { + packages = [ + "libwebp", + "libwebpdemux", + "libwebpmux", + ] +} + +shim_headers("libwebp_shim") { + root_path = "src/src" + headers = [ + "webp/decode.h", + "webp/demux.h", + "webp/encode.h", + "webp/mux.h", + "webp/mux_types.h", + "webp/types.h", + ] +} + +source_set("libwebp_webp") { + deps = [ ":libwebp_shim" ] + public_configs = [ ":system_libwebp" ] +} + +group("libwebp") { + deps = [ ":libwebp_webp" ] +} diff --git a/linux/unbundle/libxml.gn b/linux/unbundle/libxml.gn new file mode 100644 index 000000000000..8567c154f828 --- /dev/null +++ b/linux/unbundle/libxml.gn @@ -0,0 +1,58 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +pkg_config("system_libxml") { + packages = [ "libxml-2.0" ] +} + +source_set("libxml") { + public_configs = [ ":system_libxml" ] +} + +# Please keep in sync with //third_party/libxml/BUILD.gn. +static_library("libxml_utils") { + # Do not expand this visibility list without first consulting with the + # Security Team. + visibility = [ + ":xml_reader", + ":xml_writer", + "//base/test:test_support", + "//services/data_decoder:lib", + "//services/data_decoder:xml_parser_fuzzer_deps", + ] + sources = [ + "chromium/libxml_utils.cc", + "chromium/libxml_utils.h", + ] + public_configs = [ ":system_libxml" ] +} + +# Please keep in sync with //third_party/libxml/BUILD.gn. +static_library("xml_reader") { + # Do not expand this visibility list without first consulting with the + # Security Team. + visibility = [ + "//base/test:test_support", + "//components/policy/core/common:unit_tests", + "//services/data_decoder:*", + ] + sources = [ + "chromium/xml_reader.cc", + "chromium/xml_reader.h", + ] + deps = [ ":libxml_utils" ] +} + +# Please keep in sync with //third_party/libxml/BUILD.gn. +static_library("xml_writer") { + # The XmlWriter is considered safe to use from any target. + visibility = [ "*" ] + sources = [ + "chromium/xml_writer.cc", + "chromium/xml_writer.h", + ] + deps = [ ":libxml_utils" ] +} diff --git a/linux/unbundle/libxslt.gn b/linux/unbundle/libxslt.gn new file mode 100644 index 000000000000..787796063a28 --- /dev/null +++ b/linux/unbundle/libxslt.gn @@ -0,0 +1,13 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") + +pkg_config("system_libxslt") { + packages = [ "libxslt" ] +} + +source_set("libxslt") { + public_configs = [ ":system_libxslt" ] +} diff --git a/linux/unbundle/libyuv.gn b/linux/unbundle/libyuv.gn new file mode 100644 index 000000000000..a3363e45eebe --- /dev/null +++ b/linux/unbundle/libyuv.gn @@ -0,0 +1,37 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_yuv") { + packages = [ "libyuv" ] +} + +shim_headers("libyuv_shim") { + root_path = "include" + headers = [ + "libyuv.h", + "libyuv/basic_types.h", + "libyuv/compare.h", + "libyuv/convert.h", + "libyuv/convert_argb.h", + "libyuv/convert_from.h", + "libyuv/convert_from_argb.h", + "libyuv/cpu_id.h", + "libyuv/mjpeg_decoder.h", + "libyuv/planar_functions.h", + "libyuv/rotate.h", + "libyuv/rotate_argb.h", + "libyuv/rotate_row.h", + "libyuv/row.h", + "libyuv/scale.h", + "libyuv/scale_argb.h", + "libyuv/scale_row.h", + "libyuv/scale_uv.h", + "libyuv/version.h", + "libyuv/video_common.h", + ] +} + +source_set("libyuv") { + deps = [ ":libyuv_shim" ] + public_configs = [ ":system_yuv" ] +} diff --git a/linux/unbundle/openh264.gn b/linux/unbundle/openh264.gn new file mode 100644 index 000000000000..f4abd9b5f7b7 --- /dev/null +++ b/linux/unbundle/openh264.gn @@ -0,0 +1,36 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("config") { + packages = [ "openh264" ] +} + +shim_headers("openh264_shim") { + prefix = "wels/" + root_path = "src/codec/api/wels" + headers = [ + "codec_api.h", + "codec_app_def.h", + "codec_def.h", + "codec_ver.h", + ] +} + +source_set("common") { + deps = [ ":openh264_shim" ] + public_configs = [ ":config" ] +} + +source_set("processing") { + deps = [ ":openh264_shim" ] + public_configs = [ ":config" ] +} + +source_set("encoder") { + deps = [ ":openh264_shim" ] + public_configs = [ ":config" ] +} diff --git a/linux/unbundle/opus.gn b/linux/unbundle/opus.gn new file mode 100644 index 000000000000..b1a199a53240 --- /dev/null +++ b/linux/unbundle/opus.gn @@ -0,0 +1,43 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("opus_config") { + packages = [ "opus" ] +} + +shim_headers("opus_shim") { + root_path = "src/include" + headers = [ + "opus.h", + "opus_defines.h", + "opus_multistream.h", + "opus_types.h", + ] +} + +source_set("opus") { + deps = [ ":opus_shim" ] + public_configs = [ ":opus_config" ] +} + +source_set("opus_compare") { +} + +source_set("opus_demo") { +} + +source_set("test_opus_api") { +} + +source_set("test_opus_decode") { +} + +source_set("test_opus_encode") { +} + +source_set("test_opus_padding") { +} diff --git a/linux/unbundle/re2.gn b/linux/unbundle/re2.gn new file mode 100644 index 000000000000..d8e4d79b182f --- /dev/null +++ b/linux/unbundle/re2.gn @@ -0,0 +1,27 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/c++/c++.gni") +import("//build/shim_headers.gni") + +assert(!use_custom_libcxx, + "Usage of the system libre2.so is not supported with " + + "use_custom_libcxx=true because the library's interface relies on " + + "libstdc++'s std::string and std::vector.") + +shim_headers("re2_shim") { + root_path = "src" + headers = [ + "re2/filtered_re2.h", + "re2/re2.h", + "re2/set.h", + "re2/stringpiece.h", + "re2/variadic_function.h", + ] +} + +source_set("re2") { + deps = [ ":re2_shim" ] + libs = [ "re2" ] +} diff --git a/linux/unbundle/remove_bundled_libraries.py b/linux/unbundle/remove_bundled_libraries.py new file mode 100755 index 000000000000..43050eee7d6b --- /dev/null +++ b/linux/unbundle/remove_bundled_libraries.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Removes bundled libraries to make sure they are not used. + +See README for more details. +""" + +import optparse +import os.path +import sys + + +def DoMain(argv): + my_dirname = os.path.abspath(os.path.dirname(__file__)) + source_tree_root = os.path.abspath( + os.path.join(my_dirname, '..', '..', '..')) + + if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname: + print('Sanity check failed: please run this script from ' + 'build/linux/unbundle directory.') + return 1 + + parser = optparse.OptionParser() + parser.add_option('--do-remove', action='store_true') + + options, args = parser.parse_args(argv) + + exclusion_used = {} + for exclusion in args: + exclusion_used[exclusion] = False + + for root, dirs, files in os.walk(source_tree_root, topdown=False): + # Only look at paths which contain a "third_party" component + # (note that e.g. third_party.png doesn't count). + root_relpath = os.path.relpath(root, source_tree_root) + if 'third_party' not in root_relpath.split(os.sep): + continue + + for f in files: + path = os.path.join(root, f) + relpath = os.path.relpath(path, source_tree_root) + + excluded = False + for exclusion in args: + # Require precise exclusions. Find the right-most third_party + # in the relative path, and if there is more than one ignore + # the exclusion if it's completely contained within the part + # before right-most third_party path component. + split = relpath.rsplit(os.sep + 'third_party' + os.sep, 1) + if len(split) > 1 and split[0].startswith(exclusion): + continue + + if relpath.startswith(exclusion): + # Multiple exclusions can match the same path. Go through all of them + # and mark each one as used. + exclusion_used[exclusion] = True + excluded = True + if excluded: + continue + + # Deleting gyp files almost always leads to gyp failures. + # These files come from Chromium project, and can be replaced if needed. + if f.endswith('.gyp') or f.endswith('.gypi'): + continue + + # Same about GN files. + if f.endswith('.gn') or f.endswith('.gni'): + continue + + # Deleting .isolate files leads to gyp failures. They are usually + # not used by a distro build anyway. + # See http://www.chromium.org/developers/testing/isolated-testing + # for more info. + if f.endswith('.isolate'): + continue + + if options.do_remove: + # Delete the file - best way to ensure it's not used during build. + os.remove(path) + else: + # By default just print paths that would be removed. + print(path) + + exit_code = 0 + + # Fail if exclusion list contains stale entries - this helps keep it + # up to date. + for exclusion, used in exclusion_used.items(): + if not used: + print('%s does not exist' % exclusion) + exit_code = 1 + + if not options.do_remove: + print('To actually remove files printed above, please pass ' + '--do-remove flag.') + + return exit_code + + +if __name__ == '__main__': + sys.exit(DoMain(sys.argv[1:])) diff --git a/linux/unbundle/replace_gn_files.py b/linux/unbundle/replace_gn_files.py new file mode 100755 index 000000000000..0483cd6921f0 --- /dev/null +++ b/linux/unbundle/replace_gn_files.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Replaces GN files in tree with files from here that +make the build use system libraries. +""" + +import argparse +import os +import shutil +import sys + + +REPLACEMENTS = { + # Use system libabsl_2xxx. These 20 shims MUST be used together. + 'absl_algorithm': 'third_party/abseil-cpp/absl/algorithm/BUILD.gn', + 'absl_base': 'third_party/abseil-cpp/absl/base/BUILD.gn', + 'absl_cleanup': 'third_party/abseil-cpp/absl/cleanup/BUILD.gn', + 'absl_container': 'third_party/abseil-cpp/absl/container/BUILD.gn', + 'absl_debugging': 'third_party/abseil-cpp/absl/debugging/BUILD.gn', + 'absl_flags': 'third_party/abseil-cpp/absl/flags/BUILD.gn', + 'absl_functional': 'third_party/abseil-cpp/absl/functional/BUILD.gn', + 'absl_hash': 'third_party/abseil-cpp/absl/hash/BUILD.gn', + 'absl_log': 'third_party/abseil-cpp/absl/log/BUILD.gn', + 'absl_log_internal': 'third_party/abseil-cpp/absl/log/internal/BUILD.gn', + 'absl_memory': 'third_party/abseil-cpp/absl/memory/BUILD.gn', + 'absl_meta': 'third_party/abseil-cpp/absl/meta/BUILD.gn', + 'absl_numeric': 'third_party/abseil-cpp/absl/numeric/BUILD.gn', + 'absl_random': 'third_party/abseil-cpp/absl/random/BUILD.gn', + 'absl_status': 'third_party/abseil-cpp/absl/status/BUILD.gn', + 'absl_strings': 'third_party/abseil-cpp/absl/strings/BUILD.gn', + 'absl_synchronization': 'third_party/abseil-cpp/absl/synchronization/BUILD.gn', + 'absl_time': 'third_party/abseil-cpp/absl/time/BUILD.gn', + 'absl_types': 'third_party/abseil-cpp/absl/types/BUILD.gn', + 'absl_utility': 'third_party/abseil-cpp/absl/utility/BUILD.gn', + # + 'brotli': 'third_party/brotli/BUILD.gn', + 'crc32c': 'third_party/crc32c/BUILD.gn', + 'dav1d': 'third_party/dav1d/BUILD.gn', + 'double-conversion': 'base/third_party/double_conversion/BUILD.gn', + 'ffmpeg': 'third_party/ffmpeg/BUILD.gn', + 'flac': 'third_party/flac/BUILD.gn', + 'fontconfig': 'third_party/fontconfig/BUILD.gn', + 'freetype': 'build/config/freetype/freetype.gni', + 'harfbuzz-ng': 'third_party/harfbuzz-ng/harfbuzz.gni', + 'icu': 'third_party/icu/BUILD.gn', + 'jsoncpp' : 'third_party/jsoncpp/BUILD.gn', + 'libaom' : 'third_party/libaom/BUILD.gn', + 'libavif' : 'third_party/libavif/BUILD.gn', + 'libdrm': 'third_party/libdrm/BUILD.gn', + 'libevent': 'third_party/libevent/BUILD.gn', + 'libjpeg': 'third_party/libjpeg.gni', + 'libpng': 'third_party/libpng/BUILD.gn', + 'libvpx': 'third_party/libvpx/BUILD.gn', + 'libwebp': 'third_party/libwebp/BUILD.gn', + 'libxml': 'third_party/libxml/BUILD.gn', + 'libXNVCtrl' : 'third_party/angle/src/third_party/libXNVCtrl/BUILD.gn', + 'libxslt': 'third_party/libxslt/BUILD.gn', + 'libyuv' : 'third_party/libyuv/BUILD.gn', + 'openh264': 'third_party/openh264/BUILD.gn', + 'opus': 'third_party/opus/BUILD.gn', + 're2': 'third_party/re2/BUILD.gn', + 'snappy': 'third_party/snappy/BUILD.gn', + # Use system libSPIRV-Tools in Swiftshader. These two shims MUST be used together. + 'swiftshader-SPIRV-Headers' : 'third_party/swiftshader/third_party/SPIRV-Headers/BUILD.gn', + 'swiftshader-SPIRV-Tools' : 'third_party/swiftshader/third_party/SPIRV-Tools/BUILD.gn', + # Use system libSPIRV-Tools inside ANGLE. These two shims MUST be used together + # and can only be used if WebGPU is not compiled (use_dawn=false) + 'vulkan-SPIRV-Headers' : 'third_party/vulkan-deps/spirv-headers/src/BUILD.gn', + 'vulkan-SPIRV-Tools' : 'third_party/vulkan-deps/spirv-tools/src/BUILD.gn', + # + 'woff2': 'third_party/woff2/BUILD.gn', + 'zlib': 'third_party/zlib/BUILD.gn', +} + + +def DoMain(argv): + my_dirname = os.path.dirname(__file__) + source_tree_root = os.path.abspath( + os.path.join(my_dirname, '..', '..', '..')) + + parser = argparse.ArgumentParser() + parser.add_argument('--system-libraries', nargs='*', default=[]) + parser.add_argument('--undo', action='store_true') + + args = parser.parse_args(argv) + + handled_libraries = set() + for lib, path in REPLACEMENTS.items(): + if lib not in args.system_libraries: + continue + handled_libraries.add(lib) + + if args.undo: + # Restore original file, and also remove the backup. + # This is meant to restore the source tree to its original state. + os.rename(os.path.join(source_tree_root, path + '.orig'), + os.path.join(source_tree_root, path)) + else: + # Create a backup copy for --undo. + shutil.copyfile(os.path.join(source_tree_root, path), + os.path.join(source_tree_root, path + '.orig')) + + # Copy the GN file from directory of this script to target path. + shutil.copyfile(os.path.join(my_dirname, '%s.gn' % lib), + os.path.join(source_tree_root, path)) + + unhandled_libraries = set(args.system_libraries) - handled_libraries + if unhandled_libraries: + print('Unrecognized system libraries requested: %s' % ', '.join( + sorted(unhandled_libraries)), file=sys.stderr) + return 1 + + return 0 + + +if __name__ == '__main__': + sys.exit(DoMain(sys.argv[1:])) diff --git a/linux/unbundle/snappy.gn b/linux/unbundle/snappy.gn new file mode 100644 index 000000000000..dea0b5543d8d --- /dev/null +++ b/linux/unbundle/snappy.gn @@ -0,0 +1,20 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/shim_headers.gni") + +shim_headers("snappy_shim") { + root_path = "src" + headers = [ + "snappy-c.h", + "snappy-sinksource.h", + "snappy-stubs-public.h", + "snappy.h", + ] +} + +source_set("snappy") { + deps = [ ":snappy_shim" ] + libs = [ "snappy" ] +} diff --git a/linux/unbundle/swiftshader-SPIRV-Headers.gn b/linux/unbundle/swiftshader-SPIRV-Headers.gn new file mode 100644 index 000000000000..24f79de1e46c --- /dev/null +++ b/linux/unbundle/swiftshader-SPIRV-Headers.gn @@ -0,0 +1,17 @@ +import("//build/shim_headers.gni") + +shim_headers("SPIRV-Headers_shim") { + root_path = "../../../../third_party/SPIRV-Headers/include" + headers = [ + "spirv/unified1/GLSL.std.450.h", + "spirv/unified1/NonSemanticClspvReflection.h", + "spirv/unified1/NonSemanticDebugPrintf.h", + "spirv/unified1/OpenCL.std.h", + "spirv/unified1/spirv.h", + "spirv/unified1/spirv.hpp", + ] +} + +source_set("spv_headers") { + deps = [ ":SPIRV-Headers_shim" ] +} diff --git a/linux/unbundle/swiftshader-SPIRV-Tools.gn b/linux/unbundle/swiftshader-SPIRV-Tools.gn new file mode 100644 index 000000000000..eb9d9224ec01 --- /dev/null +++ b/linux/unbundle/swiftshader-SPIRV-Tools.gn @@ -0,0 +1,32 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_SPIRV-Tools") { + packages = [ "SPIRV-Tools" ] +} + +shim_headers("SPIRV-Tools_shim") { + root_path = "../../../../third_party/SPIRV-Tools/include" + headers = [ + "spirv-tools/instrument.hpp", + "spirv-tools/libspirv.h", + "spirv-tools/libspirv.hpp", + "spirv-tools/linker.hpp", + "spirv-tools/optimizer.hpp", + ] +} + +source_set("spvtools_headers") { + deps = [ ":SPIRV-Tools_shim" ] + public_configs = [ ":system_SPIRV-Tools" ] +} + +source_set("spvtools_opt") { + deps = [ ":SPIRV-Tools_shim" ] + public_configs = [ ":system_SPIRV-Tools" ] +} + +source_set("spvtools_val") { + deps = [ ":SPIRV-Tools_shim" ] + public_configs = [ ":system_SPIRV-Tools" ] +} diff --git a/linux/unbundle/vulkan-SPIRV-Headers.gn b/linux/unbundle/vulkan-SPIRV-Headers.gn new file mode 100644 index 000000000000..eb2495ce2712 --- /dev/null +++ b/linux/unbundle/vulkan-SPIRV-Headers.gn @@ -0,0 +1,19 @@ +# This shim can only be used if you build Chromium without DAWN + +import("//build/shim_headers.gni") + +shim_headers("vulkan-SPIRV-Headers_shim") { + root_path = "include" + headers = [ + "spirv/unified1/GLSL.std.450.h", + "spirv/unified1/NonSemanticClspvReflection.h", + "spirv/unified1/NonSemanticDebugPrintf.h", + "spirv/unified1/OpenCL.std.h", + "spirv/unified1/spirv.h", + "spirv/unified1/spirv.hpp", + ] +} + +source_set("spv_headers") { + deps = [ ":vulkan-SPIRV-Headers_shim" ] +} diff --git a/linux/unbundle/vulkan-SPIRV-Tools.gn b/linux/unbundle/vulkan-SPIRV-Tools.gn new file mode 100644 index 000000000000..a65c64c6193a --- /dev/null +++ b/linux/unbundle/vulkan-SPIRV-Tools.gn @@ -0,0 +1,69 @@ +# This shim can only be used if you build Chromium without DAWN + +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("spvtools_internal_config") { + packages = [ "SPIRV-Tools" ] +} + +shim_headers("vulkan-SPIRV-Tools_shim") { + root_path = "include" + headers = [ + "spirv-tools/instrument.hpp", + "spirv-tools/libspirv.h", + "spirv-tools/libspirv.hpp", + "spirv-tools/linker.hpp", + "spirv-tools/optimizer.hpp", + ] +} + +source_set("SPIRV-Tools") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_core_enums_unified1") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_core_tables_unified1") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_headers") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_language_header_cldebuginfo100") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_language_header_debuginfo") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_language_header_vkdebuginfo100") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_opt") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} + +source_set("spvtools_val") { + deps = [ ":vulkan-SPIRV-Tools_shim" ] + public_configs = [ ":spvtools_internal_config" ] +} diff --git a/linux/unbundle/woff2.gn b/linux/unbundle/woff2.gn new file mode 100644 index 000000000000..e7bae10fdc1d --- /dev/null +++ b/linux/unbundle/woff2.gn @@ -0,0 +1,20 @@ +import("//build/config/linux/pkg_config.gni") +import("//build/shim_headers.gni") + +pkg_config("system_woff2") { + packages = [ "libwoff2dec" ] +} + +shim_headers("woff2_shim") { + root_path = "include" + headers = [ + "woff2/decode.h", + "woff2/encode.h", + "woff2/output.h", + ] +} + +source_set("woff2_dec") { + deps = [ ":woff2_shim" ] + public_configs = [ ":system_woff2" ] +} diff --git a/linux/unbundle/zlib.gn b/linux/unbundle/zlib.gn new file mode 100644 index 000000000000..2019a4064a84 --- /dev/null +++ b/linux/unbundle/zlib.gn @@ -0,0 +1,64 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/shim_headers.gni") + +shim_headers("zlib_shim") { + root_path = "." + headers = [ "zlib.h" ] +} + +config("system_zlib") { + defines = [ "USE_SYSTEM_ZLIB=1" ] +} + +config("zlib_config") { + configs = [ ":system_zlib" ] +} + +source_set("zlib") { + deps = [ ":zlib_shim" ] + libs = [ "z" ] + public_configs = [ ":system_zlib" ] +} + +shim_headers("minizip_shim") { + root_path = "contrib" + headers = [ + "minizip/crypt.h", + "minizip/ioapi.h", + "minizip/iowin32.h", + "minizip/mztools.h", + "minizip/unzip.h", + "minizip/zip.h", + ] +} + +source_set("minizip") { + deps = [ ":minizip_shim" ] + libs = [ "minizip" ] +} + +static_library("zip") { + sources = [ + "google/zip.cc", + "google/zip.h", + "google/zip_internal.cc", + "google/zip_internal.h", + "google/zip_reader.cc", + "google/zip_reader.h", + ] + deps = [ + ":minizip", + "//base", + ] +} + +static_library("compression_utils") { + sources = [ + "google/compression_utils.cc", + "google/compression_utils.h", + ] + deps = [ ":zlib" ] +} diff --git a/locale_tool.py b/locale_tool.py new file mode 100755 index 000000000000..c9fd395b4868 --- /dev/null +++ b/locale_tool.py @@ -0,0 +1,1511 @@ +#!/usr/bin/env vpython3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Helper script used to manage locale-related files in Chromium. + +This script is used to check, and potentially fix, many locale-related files +in your Chromium workspace, such as: + + - GRIT input files (.grd) and the corresponding translations (.xtb). + + - BUILD.gn files listing Android localized resource string resource .xml + generated by GRIT for all supported Chrome locales. These correspond to + elements that use the type="android" attribute. + +The --scan-dir

    option can be used to check for all files under a specific +directory, and the --fix-inplace option can be used to try fixing any file +that doesn't pass the check. + +This can be very handy to avoid tedious and repetitive work when adding new +translations / locales to the Chrome code base, since this script can update +said input files for you. + +Important note: checks and fix may fail on some input files. For example +remoting/resources/remoting_strings.grd contains an in-line comment element +inside its section that breaks the script. The check will fail, and +trying to fix it too, but at least the file will not be modified. +""" + + +import argparse +import json +import os +import re +import shutil +import subprocess +import sys +import unittest + +# Assume this script is under build/ +_SCRIPT_DIR = os.path.dirname(__file__) +_SCRIPT_NAME = os.path.join(_SCRIPT_DIR, os.path.basename(__file__)) +_TOP_SRC_DIR = os.path.join(_SCRIPT_DIR, '..') + +# Need to import android/gyp/util/resource_utils.py here. +sys.path.insert(0, os.path.join(_SCRIPT_DIR, 'android/gyp')) + +from util import build_utils +from util import resource_utils + + +# This locale is the default and doesn't have translations. +_DEFAULT_LOCALE = 'en-US' + +# Misc terminal codes to provide human friendly progress output. +_CONSOLE_CODE_MOVE_CURSOR_TO_COLUMN_0 = '\x1b[0G' +_CONSOLE_CODE_ERASE_LINE = '\x1b[K' +_CONSOLE_START_LINE = ( + _CONSOLE_CODE_MOVE_CURSOR_TO_COLUMN_0 + _CONSOLE_CODE_ERASE_LINE) + +########################################################################## +########################################################################## +##### +##### G E N E R I C H E L P E R F U N C T I O N S +##### +########################################################################## +########################################################################## + +def _FixChromiumLangAttribute(lang): + """Map XML "lang" attribute values to Chromium locale names.""" + _CHROMIUM_LANG_FIXES = { + 'en': 'en-US', # For now, Chromium doesn't have an 'en' locale. + 'iw': 'he', # 'iw' is the obsolete form of ISO 639-1 for Hebrew + 'no': 'nb', # 'no' is used by the Translation Console for Norwegian (nb). + } + return _CHROMIUM_LANG_FIXES.get(lang, lang) + + +def _FixTranslationConsoleLocaleName(locale): + _FIXES = { + 'nb': 'no', # Norwegian. + 'he': 'iw', # Hebrew + } + return _FIXES.get(locale, locale) + + +def _CompareLocaleLists(list_a, list_expected, list_name): + """Compare two lists of locale names. Print errors if they differ. + + Args: + list_a: First list of locales. + list_expected: Second list of locales, as expected. + list_name: Name of list printed in error messages. + Returns: + On success, return False. On error, print error messages and return True. + """ + errors = [] + missing_locales = sorted(set(list_a) - set(list_expected)) + if missing_locales: + errors.append('Missing locales: %s' % missing_locales) + + extra_locales = sorted(set(list_expected) - set(list_a)) + if extra_locales: + errors.append('Unexpected locales: %s' % extra_locales) + + if errors: + print('Errors in %s definition:' % list_name) + for error in errors: + print(' %s\n' % error) + return True + + return False + + +def _BuildIntervalList(input_list, predicate): + """Find ranges of contiguous list items that pass a given predicate. + + Args: + input_list: An input list of items of any type. + predicate: A function that takes a list item and return True if it + passes a given test. + Returns: + A list of (start_pos, end_pos) tuples, where all items in + [start_pos, end_pos) pass the predicate. + """ + result = [] + size = len(input_list) + start = 0 + while True: + # Find first item in list that passes the predicate. + while start < size and not predicate(input_list[start]): + start += 1 + + if start >= size: + return result + + # Find first item in the rest of the list that does not pass the + # predicate. + end = start + 1 + while end < size and predicate(input_list[end]): + end += 1 + + result.append((start, end)) + start = end + 1 + + +def _SortListSubRange(input_list, start, end, key_func): + """Sort an input list's sub-range according to a specific key function. + + Args: + input_list: An input list. + start: Sub-range starting position in list. + end: Sub-range limit position in list. + key_func: A function that extracts a sort key from a line. + Returns: + A copy of |input_list|, with all items in [|start|, |end|) sorted + according to |key_func|. + """ + result = input_list[:start] + inputs = [] + for pos in xrange(start, end): + line = input_list[pos] + key = key_func(line) + inputs.append((key, line)) + + for _, line in sorted(inputs): + result.append(line) + + result += input_list[end:] + return result + + +def _SortElementsRanges(lines, element_predicate, element_key): + """Sort all elements of a given type in a list of lines by a given key. + + Args: + lines: input lines. + element_predicate: predicate function to select elements to sort. + element_key: lambda returning a comparison key for each element that + passes the predicate. + Returns: + A new list of input lines, with lines [start..end) sorted. + """ + intervals = _BuildIntervalList(lines, element_predicate) + for start, end in intervals: + lines = _SortListSubRange(lines, start, end, element_key) + + return lines + + +def _ProcessFile(input_file, locales, check_func, fix_func): + """Process a given input file, potentially fixing it. + + Args: + input_file: Input file path. + locales: List of Chrome locales to consider / expect. + check_func: A lambda called to check the input file lines with + (input_lines, locales) argument. It must return an list of error + messages, or None on success. + fix_func: None, or a lambda called to fix the input file lines with + (input_lines, locales). It must return the new list of lines for + the input file, and may raise an Exception in case of error. + Returns: + True at the moment. + """ + print('%sProcessing %s...' % (_CONSOLE_START_LINE, input_file), end=' ') + sys.stdout.flush() + with open(input_file) as f: + input_lines = f.readlines() + errors = check_func(input_file, input_lines, locales) + if errors: + print('\n%s%s' % (_CONSOLE_START_LINE, '\n'.join(errors))) + if fix_func: + try: + input_lines = fix_func(input_file, input_lines, locales) + output = ''.join(input_lines) + with open(input_file, 'wt') as f: + f.write(output) + print('Fixed %s.' % input_file) + except Exception as e: # pylint: disable=broad-except + print('Skipped %s: %s' % (input_file, e)) + + return True + + +def _ScanDirectoriesForFiles(scan_dirs, file_predicate): + """Scan a directory for files that match a given predicate. + + Args: + scan_dir: A list of top-level directories to start scan in. + file_predicate: lambda function which is passed the file's base name + and returns True if its full path, relative to |scan_dir|, should be + passed in the result. + Returns: + A list of file full paths. + """ + result = [] + for src_dir in scan_dirs: + for root, _, files in os.walk(src_dir): + result.extend(os.path.join(root, f) for f in files if file_predicate(f)) + return result + + +def _WriteFile(file_path, file_data): + """Write |file_data| to |file_path|.""" + with open(file_path, 'w') as f: + f.write(file_data) + + +def _FindGnExecutable(): + """Locate the real GN executable used by this Chromium checkout. + + This is needed because the depot_tools 'gn' wrapper script will look + for .gclient and other things we really don't need here. + + Returns: + Path of real host GN executable from current Chromium src/ checkout. + """ + # Simply scan buildtools/*/gn and return the first one found so we don't + # have to guess the platform-specific sub-directory name (e.g. 'linux64' + # for 64-bit Linux machines). + buildtools_dir = os.path.join(_TOP_SRC_DIR, 'buildtools') + for subdir in os.listdir(buildtools_dir): + subdir_path = os.path.join(buildtools_dir, subdir) + if not os.path.isdir(subdir_path): + continue + gn_path = os.path.join(subdir_path, 'gn') + if os.path.exists(gn_path): + return gn_path + return None + + +def _PrettyPrintListAsLines(input_list, available_width, trailing_comma=False): + result = [] + input_str = ', '.join(input_list) + while len(input_str) > available_width: + pos = input_str.rfind(',', 0, available_width) + result.append(input_str[:pos + 1]) + input_str = input_str[pos + 1:].lstrip() + if trailing_comma and input_str: + input_str += ',' + result.append(input_str) + return result + + +class _PrettyPrintListAsLinesTest(unittest.TestCase): + + def test_empty_list(self): + self.assertListEqual([''], _PrettyPrintListAsLines([], 10)) + + def test_wrapping(self): + input_list = ['foo', 'bar', 'zoo', 'tool'] + self.assertListEqual( + _PrettyPrintListAsLines(input_list, 8), + ['foo,', 'bar,', 'zoo,', 'tool']) + self.assertListEqual( + _PrettyPrintListAsLines(input_list, 12), ['foo, bar,', 'zoo, tool']) + self.assertListEqual( + _PrettyPrintListAsLines(input_list, 79), ['foo, bar, zoo, tool']) + + def test_trailing_comma(self): + input_list = ['foo', 'bar', 'zoo', 'tool'] + self.assertListEqual( + _PrettyPrintListAsLines(input_list, 8, trailing_comma=True), + ['foo,', 'bar,', 'zoo,', 'tool,']) + self.assertListEqual( + _PrettyPrintListAsLines(input_list, 12, trailing_comma=True), + ['foo, bar,', 'zoo, tool,']) + self.assertListEqual( + _PrettyPrintListAsLines(input_list, 79, trailing_comma=True), + ['foo, bar, zoo, tool,']) + + +########################################################################## +########################################################################## +##### +##### L O C A L E S L I S T S +##### +########################################################################## +########################################################################## + +# Various list of locales that will be extracted from build/config/locales.gni +# Do not use these directly, use ChromeLocales(), and IosUnsupportedLocales() +# instead to access these lists. +_INTERNAL_CHROME_LOCALES = [] +_INTERNAL_IOS_UNSUPPORTED_LOCALES = [] + + +def ChromeLocales(): + """Return the list of all locales supported by Chrome.""" + if not _INTERNAL_CHROME_LOCALES: + _ExtractAllChromeLocalesLists() + return _INTERNAL_CHROME_LOCALES + + +def IosUnsupportedLocales(): + """Return the list of locales that are unsupported on iOS.""" + if not _INTERNAL_IOS_UNSUPPORTED_LOCALES: + _ExtractAllChromeLocalesLists() + return _INTERNAL_IOS_UNSUPPORTED_LOCALES + + +def _PrepareTinyGnWorkspace(work_dir, out_subdir_name='out'): + """Populate an empty directory with a tiny set of working GN config files. + + This allows us to run 'gn gen --root ' as fast as possible + to generate files containing the locales list. This takes about 300ms on + a decent machine, instead of more than 5 seconds when running the equivalent + commands from a real Chromium workspace, which requires regenerating more + than 23k targets. + + Args: + work_dir: target working directory. + out_subdir_name: Name of output sub-directory. + Returns: + Full path of output directory created inside |work_dir|. + """ + # Create top-level .gn file that must point to the BUILDCONFIG.gn. + _WriteFile(os.path.join(work_dir, '.gn'), + 'buildconfig = "//BUILDCONFIG.gn"\n') + # Create BUILDCONFIG.gn which must set a default toolchain. Also add + # all variables that may be used in locales.gni in a declare_args() block. + _WriteFile( + os.path.join(work_dir, 'BUILDCONFIG.gn'), + r'''set_default_toolchain("toolchain") +declare_args () { + is_ios = false + is_android = true +} +''') + + # Create fake toolchain required by BUILDCONFIG.gn. + os.mkdir(os.path.join(work_dir, 'toolchain')) + _WriteFile(os.path.join(work_dir, 'toolchain', 'BUILD.gn'), + r'''toolchain("toolchain") { + tool("stamp") { + command = "touch {{output}}" # Required by action() + } +} +''') + + # Create top-level BUILD.gn, GN requires at least one target to build so do + # that with a fake action which will never be invoked. Also write the locales + # to misc files in the output directory. + _WriteFile( + os.path.join(work_dir, 'BUILD.gn'), r'''import("//locales.gni") + +action("create_foo") { # fake action to avoid GN complaints. + script = "//build/create_foo.py" + inputs = [] + outputs = [ "$target_out_dir/$target_name" ] +} + +# Write the locales lists to files in the output directory. +_filename = root_build_dir + "/foo" +write_file(_filename + ".locales", locales, "json") +write_file(_filename + ".ios_unsupported_locales", + ios_unsupported_locales, + "json") +''') + + # Copy build/config/locales.gni to the workspace, as required by BUILD.gn. + shutil.copyfile(os.path.join(_TOP_SRC_DIR, 'build', 'config', 'locales.gni'), + os.path.join(work_dir, 'locales.gni')) + + # Create output directory. + out_path = os.path.join(work_dir, out_subdir_name) + os.mkdir(out_path) + + # And ... we're good. + return out_path + + +# Set this global variable to the path of a given temporary directory +# before calling _ExtractAllChromeLocalesLists() if you want to debug +# the locales list extraction process. +_DEBUG_LOCALES_WORK_DIR = None + + +def _ReadJsonList(file_path): + """Read a JSON file that must contain a list, and return it.""" + with open(file_path) as f: + data = json.load(f) + assert isinstance(data, list), "JSON file %s is not a list!" % file_path + return [item.encode('utf8') for item in data] + + +def _ExtractAllChromeLocalesLists(): + with build_utils.TempDir() as tmp_path: + if _DEBUG_LOCALES_WORK_DIR: + tmp_path = _DEBUG_LOCALES_WORK_DIR + build_utils.DeleteDirectory(tmp_path) + build_utils.MakeDirectory(tmp_path) + + out_path = _PrepareTinyGnWorkspace(tmp_path, 'out') + + # NOTE: The file suffixes used here should be kept in sync with + # build/config/locales.gni + gn_executable = _FindGnExecutable() + try: + subprocess.check_output( + [gn_executable, 'gen', out_path, '--root=' + tmp_path]) + except subprocess.CalledProcessError as e: + print(e.output) + raise e + + global _INTERNAL_CHROME_LOCALES + _INTERNAL_CHROME_LOCALES = _ReadJsonList( + os.path.join(out_path, 'foo.locales')) + + global _INTERNAL_IOS_UNSUPPORTED_LOCALES + _INTERNAL_IOS_UNSUPPORTED_LOCALES = _ReadJsonList( + os.path.join(out_path, 'foo.ios_unsupported_locales')) + + +########################################################################## +########################################################################## +##### +##### G R D H E L P E R F U N C T I O N S +##### +########################################################################## +########################################################################## + +# Technical note: +# +# Even though .grd files are XML, an xml parser library is not used in order +# to preserve the original file's structure after modification. ElementTree +# tends to re-order attributes in each element when re-writing an XML +# document tree, which is undesirable here. +# +# Thus simple line-based regular expression matching is used instead. +# + +# Misc regular expressions used to match elements and their attributes. +_RE_OUTPUT_ELEMENT = re.compile(r'') +_RE_TRANSLATION_ELEMENT = re.compile(r'') +_RE_FILENAME_ATTRIBUTE = re.compile(r'filename="([^"]*)"') +_RE_LANG_ATTRIBUTE = re.compile(r'lang="([^"]*)"') +_RE_PATH_ATTRIBUTE = re.compile(r'path="([^"]*)"') +_RE_TYPE_ANDROID_ATTRIBUTE = re.compile(r'type="android"') + + + +def _IsGritInputFile(input_file): + """Returns True iff this is a GRIT input file.""" + return input_file.endswith('.grd') + + +def _GetXmlLangAttribute(xml_line): + """Extract the lang attribute value from an XML input line.""" + m = _RE_LANG_ATTRIBUTE.search(xml_line) + if not m: + return None + return m.group(1) + + +class _GetXmlLangAttributeTest(unittest.TestCase): + TEST_DATA = { + '': None, + 'foo': None, + 'lang=foo': None, + 'lang="foo"': 'foo', + '': 'foo bar', + '': 'fr-CA', + } + + def test_GetXmlLangAttribute(self): + for test_line, expected in self.TEST_DATA.items(): + self.assertEquals(_GetXmlLangAttribute(test_line), expected) + + +def _SortGrdElementsRanges(grd_lines, element_predicate): + """Sort all .grd elements of a given type by their lang attribute.""" + return _SortElementsRanges(grd_lines, element_predicate, _GetXmlLangAttribute) + + +def _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales): + """Check the element 'lang' attributes in specific .grd lines range. + + This really checks the following: + - Each item has a correct 'lang' attribute. + - There are no duplicated lines for the same 'lang' attribute. + - That there are no extra locales that Chromium doesn't want. + - That no wanted locale is missing. + + Args: + grd_lines: Input .grd lines. + start: Sub-range start position in input line list. + end: Sub-range limit position in input line list. + wanted_locales: Set of wanted Chromium locale names. + Returns: + List of error message strings for this input. Empty on success. + """ + errors = [] + locales = set() + for pos in xrange(start, end): + line = grd_lines[pos] + lang = _GetXmlLangAttribute(line) + if not lang: + errors.append('%d: Missing "lang" attribute in element' % pos + + 1) + continue + cr_locale = _FixChromiumLangAttribute(lang) + if cr_locale in locales: + errors.append( + '%d: Redefinition of for "%s" locale' % (pos + 1, lang)) + locales.add(cr_locale) + + extra_locales = locales.difference(wanted_locales) + if extra_locales: + errors.append('%d-%d: Extra locales found: %s' % (start + 1, end + 1, + sorted(extra_locales))) + + missing_locales = wanted_locales.difference(locales) + if missing_locales: + errors.append('%d-%d: Missing locales: %s' % (start + 1, end + 1, + sorted(missing_locales))) + + return errors + + +########################################################################## +########################################################################## +##### +##### G R D A N D R O I D O U T P U T S +##### +########################################################################## +########################################################################## + +def _IsGrdAndroidOutputLine(line): + """Returns True iff this is an Android-specific line.""" + m = _RE_OUTPUT_ELEMENT.search(line) + if m: + return 'type="android"' in m.group(1) + return False + +assert _IsGrdAndroidOutputLine(' ') + +# Many of the functions below have unused arguments due to genericity. +# pylint: disable=unused-argument + +def _CheckGrdElementRangeAndroidOutputFilename(grd_lines, start, end, + wanted_locales): + """Check all elements in specific input .grd lines range. + + This really checks the following: + - Filenames exist for each listed locale. + - Filenames are well-formed. + + Args: + grd_lines: Input .grd lines. + start: Sub-range start position in input line list. + end: Sub-range limit position in input line list. + wanted_locales: Set of wanted Chromium locale names. + Returns: + List of error message strings for this input. Empty on success. + """ + errors = [] + for pos in xrange(start, end): + line = grd_lines[pos] + lang = _GetXmlLangAttribute(line) + if not lang: + continue + cr_locale = _FixChromiumLangAttribute(lang) + + m = _RE_FILENAME_ATTRIBUTE.search(line) + if not m: + errors.append('%d: Missing filename attribute in element' % pos + + 1) + else: + filename = m.group(1) + if not filename.endswith('.xml'): + errors.append( + '%d: Filename should end with ".xml": %s' % (pos + 1, filename)) + + dirname = os.path.basename(os.path.dirname(filename)) + prefix = ('values-%s' % resource_utils.ToAndroidLocaleName(cr_locale) + if cr_locale != _DEFAULT_LOCALE else 'values') + if dirname != prefix: + errors.append( + '%s: Directory name should be %s: %s' % (pos + 1, prefix, filename)) + + return errors + + +def _CheckGrdAndroidOutputElements(grd_file, grd_lines, wanted_locales): + """Check all elements related to Android. + + Args: + grd_file: Input .grd file path. + grd_lines: List of input .grd lines. + wanted_locales: set of wanted Chromium locale names. + Returns: + List of error message strings. Empty on success. + """ + intervals = _BuildIntervalList(grd_lines, _IsGrdAndroidOutputLine) + errors = [] + for start, end in intervals: + errors += _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales) + errors += _CheckGrdElementRangeAndroidOutputFilename(grd_lines, start, end, + wanted_locales) + return errors + + +def _AddMissingLocalesInGrdAndroidOutputs(grd_file, grd_lines, wanted_locales): + """Fix an input .grd line by adding missing Android outputs. + + Args: + grd_file: Input .grd file path. + grd_lines: Input .grd line list. + wanted_locales: set of Chromium locale names. + Returns: + A new list of .grd lines, containing new elements when needed + for locales from |wanted_locales| that were not part of the input. + """ + intervals = _BuildIntervalList(grd_lines, _IsGrdAndroidOutputLine) + for start, end in reversed(intervals): + locales = set() + for pos in xrange(start, end): + lang = _GetXmlLangAttribute(grd_lines[pos]) + locale = _FixChromiumLangAttribute(lang) + locales.add(locale) + + missing_locales = wanted_locales.difference(locales) + if not missing_locales: + continue + + src_locale = 'bg' + src_lang_attribute = 'lang="%s"' % src_locale + src_line = None + for pos in xrange(start, end): + if src_lang_attribute in grd_lines[pos]: + src_line = grd_lines[pos] + break + + if not src_line: + raise Exception( + 'Cannot find element with "%s" lang attribute' % src_locale) + + line_count = end - 1 + for locale in missing_locales: + android_locale = resource_utils.ToAndroidLocaleName(locale) + dst_line = src_line.replace( + 'lang="%s"' % src_locale, 'lang="%s"' % locale).replace( + 'values-%s/' % src_locale, 'values-%s/' % android_locale) + grd_lines.insert(line_count, dst_line) + line_count += 1 + + # Sort the new elements. + return _SortGrdElementsRanges(grd_lines, _IsGrdAndroidOutputLine) + + +########################################################################## +########################################################################## +##### +##### G R D T R A N S L A T I O N S +##### +########################################################################## +########################################################################## + + +def _IsTranslationGrdOutputLine(line): + """Returns True iff this is an output .xtb element.""" + m = _RE_TRANSLATION_ELEMENT.search(line) + return m is not None + + +class _IsTranslationGrdOutputLineTest(unittest.TestCase): + + def test_GrdTranslationOutputLines(self): + _VALID_INPUT_LINES = [ + '', + '', + '', + '', + ' ', + ] + _INVALID_INPUT_LINES = [''] + + for line in _VALID_INPUT_LINES: + self.assertTrue( + _IsTranslationGrdOutputLine(line), + '_IsTranslationGrdOutputLine() returned False for [%s]' % line) + + for line in _INVALID_INPUT_LINES: + self.assertFalse( + _IsTranslationGrdOutputLine(line), + '_IsTranslationGrdOutputLine() returned True for [%s]' % line) + + +def _CheckGrdTranslationElementRange(grd_lines, start, end, + wanted_locales): + """Check all sub-elements in specific input .grd lines range. + + This really checks the following: + - Each item has a 'path' attribute. + - Each such path value ends up with '.xtb'. + + Args: + grd_lines: Input .grd lines. + start: Sub-range start position in input line list. + end: Sub-range limit position in input line list. + wanted_locales: Set of wanted Chromium locale names. + Returns: + List of error message strings for this input. Empty on success. + """ + errors = [] + for pos in xrange(start, end): + line = grd_lines[pos] + lang = _GetXmlLangAttribute(line) + if not lang: + continue + m = _RE_PATH_ATTRIBUTE.search(line) + if not m: + errors.append('%d: Missing path attribute in element' % pos + + 1) + else: + filename = m.group(1) + if not filename.endswith('.xtb'): + errors.append( + '%d: Path should end with ".xtb": %s' % (pos + 1, filename)) + + return errors + + +def _CheckGrdTranslations(grd_file, grd_lines, wanted_locales): + """Check all elements that correspond to an .xtb output file. + + Args: + grd_file: Input .grd file path. + grd_lines: List of input .grd lines. + wanted_locales: set of wanted Chromium locale names. + Returns: + List of error message strings. Empty on success. + """ + wanted_locales = wanted_locales - set([_DEFAULT_LOCALE]) + intervals = _BuildIntervalList(grd_lines, _IsTranslationGrdOutputLine) + errors = [] + for start, end in intervals: + errors += _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales) + errors += _CheckGrdTranslationElementRange(grd_lines, start, end, + wanted_locales) + return errors + + +# Regular expression used to replace the lang attribute inside .xtb files. +_RE_TRANSLATIONBUNDLE = re.compile('') + + +def _CreateFakeXtbFileFrom(src_xtb_path, dst_xtb_path, dst_locale): + """Create a fake .xtb file. + + Args: + src_xtb_path: Path to source .xtb file to copy from. + dst_xtb_path: Path to destination .xtb file to write to. + dst_locale: Destination locale, the lang attribute in the source file + will be substituted with this value before its lines are written + to the destination file. + """ + with open(src_xtb_path) as f: + src_xtb_lines = f.readlines() + + def replace_xtb_lang_attribute(line): + m = _RE_TRANSLATIONBUNDLE.search(line) + if not m: + return line + return line[:m.start(1)] + dst_locale + line[m.end(1):] + + dst_xtb_lines = [replace_xtb_lang_attribute(line) for line in src_xtb_lines] + with build_utils.AtomicOutput(dst_xtb_path) as tmp: + tmp.writelines(dst_xtb_lines) + + +def _AddMissingLocalesInGrdTranslations(grd_file, grd_lines, wanted_locales): + """Fix an input .grd line by adding missing Android outputs. + + This also creates fake .xtb files from the one provided for 'en-GB'. + + Args: + grd_file: Input .grd file path. + grd_lines: Input .grd line list. + wanted_locales: set of Chromium locale names. + Returns: + A new list of .grd lines, containing new elements when needed + for locales from |wanted_locales| that were not part of the input. + """ + wanted_locales = wanted_locales - set([_DEFAULT_LOCALE]) + intervals = _BuildIntervalList(grd_lines, _IsTranslationGrdOutputLine) + for start, end in reversed(intervals): + locales = set() + for pos in xrange(start, end): + lang = _GetXmlLangAttribute(grd_lines[pos]) + locale = _FixChromiumLangAttribute(lang) + locales.add(locale) + + missing_locales = wanted_locales.difference(locales) + if not missing_locales: + continue + + src_locale = 'en-GB' + src_lang_attribute = 'lang="%s"' % src_locale + src_line = None + for pos in xrange(start, end): + if src_lang_attribute in grd_lines[pos]: + src_line = grd_lines[pos] + break + + if not src_line: + raise Exception( + 'Cannot find element with "%s" lang attribute' % src_locale) + + src_path = os.path.join( + os.path.dirname(grd_file), + _RE_PATH_ATTRIBUTE.search(src_line).group(1)) + + line_count = end - 1 + for locale in missing_locales: + dst_line = src_line.replace( + 'lang="%s"' % src_locale, 'lang="%s"' % locale).replace( + '_%s.xtb' % src_locale, '_%s.xtb' % locale) + grd_lines.insert(line_count, dst_line) + line_count += 1 + + dst_path = src_path.replace('_%s.xtb' % src_locale, '_%s.xtb' % locale) + _CreateFakeXtbFileFrom(src_path, dst_path, locale) + + + # Sort the new elements. + return _SortGrdElementsRanges(grd_lines, _IsTranslationGrdOutputLine) + + +########################################################################## +########################################################################## +##### +##### G N A N D R O I D O U T P U T S +##### +########################################################################## +########################################################################## + +_RE_GN_VALUES_LIST_LINE = re.compile( + r'^\s*".*values(\-([A-Za-z0-9-]+))?/.*\.xml",\s*$') + +def _IsBuildGnInputFile(input_file): + """Returns True iff this is a BUILD.gn file.""" + return os.path.basename(input_file) == 'BUILD.gn' + + +def _GetAndroidGnOutputLocale(line): + """Check a GN list, and return its Android locale if it is an output .xml""" + m = _RE_GN_VALUES_LIST_LINE.match(line) + if not m: + return None + + if m.group(1): # First group is optional and contains group 2. + return m.group(2) + + return resource_utils.ToAndroidLocaleName(_DEFAULT_LOCALE) + + +def _IsAndroidGnOutputLine(line): + """Returns True iff this is an Android-specific localized .xml output.""" + return _GetAndroidGnOutputLocale(line) != None + + +def _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end): + """Check that a range of GN lines corresponds to localized strings. + + Special case: Some BUILD.gn files list several non-localized .xml files + that should be ignored by this function, e.g. in + components/cronet/android/BUILD.gn, the following appears: + + inputs = [ + ... + "sample/res/layout/activity_main.xml", + "sample/res/layout/dialog_url.xml", + "sample/res/values/dimens.xml", + "sample/res/values/strings.xml", + ... + ] + + These are non-localized strings, and should be ignored. This function is + used to detect them quickly. + """ + for pos in xrange(start, end): + if not 'values/' in gn_lines[pos]: + return True + return False + + +def _CheckGnOutputsRange(gn_lines, start, end, wanted_locales): + if not _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end): + return [] + + errors = [] + locales = set() + for pos in xrange(start, end): + line = gn_lines[pos] + android_locale = _GetAndroidGnOutputLocale(line) + assert android_locale != None + cr_locale = resource_utils.ToChromiumLocaleName(android_locale) + if cr_locale in locales: + errors.append('%s: Redefinition of output for "%s" locale' % + (pos + 1, android_locale)) + locales.add(cr_locale) + + extra_locales = locales.difference(wanted_locales) + if extra_locales: + errors.append('%d-%d: Extra locales: %s' % (start + 1, end + 1, + sorted(extra_locales))) + + missing_locales = wanted_locales.difference(locales) + if missing_locales: + errors.append('%d-%d: Missing locales: %s' % (start + 1, end + 1, + sorted(missing_locales))) + + return errors + + +def _CheckGnAndroidOutputs(gn_file, gn_lines, wanted_locales): + intervals = _BuildIntervalList(gn_lines, _IsAndroidGnOutputLine) + errors = [] + for start, end in intervals: + errors += _CheckGnOutputsRange(gn_lines, start, end, wanted_locales) + return errors + + +def _AddMissingLocalesInGnAndroidOutputs(gn_file, gn_lines, wanted_locales): + intervals = _BuildIntervalList(gn_lines, _IsAndroidGnOutputLine) + # NOTE: Since this may insert new lines to each interval, process the + # list in reverse order to maintain valid (start,end) positions during + # the iteration. + for start, end in reversed(intervals): + if not _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end): + continue + + locales = set() + for pos in xrange(start, end): + lang = _GetAndroidGnOutputLocale(gn_lines[pos]) + locale = resource_utils.ToChromiumLocaleName(lang) + locales.add(locale) + + missing_locales = wanted_locales.difference(locales) + if not missing_locales: + continue + + src_locale = 'bg' + src_values = 'values-%s/' % resource_utils.ToAndroidLocaleName(src_locale) + src_line = None + for pos in xrange(start, end): + if src_values in gn_lines[pos]: + src_line = gn_lines[pos] + break + + if not src_line: + raise Exception( + 'Cannot find output list item with "%s" locale' % src_locale) + + line_count = end - 1 + for locale in missing_locales: + if locale == _DEFAULT_LOCALE: + dst_line = src_line.replace('values-%s/' % src_locale, 'values/') + else: + dst_line = src_line.replace( + 'values-%s/' % src_locale, + 'values-%s/' % resource_utils.ToAndroidLocaleName(locale)) + gn_lines.insert(line_count, dst_line) + line_count += 1 + + gn_lines = _SortListSubRange( + gn_lines, start, line_count, + lambda line: _RE_GN_VALUES_LIST_LINE.match(line).group(1)) + + return gn_lines + + +########################################################################## +########################################################################## +##### +##### T R A N S L A T I O N E X P E C T A T I O N S +##### +########################################################################## +########################################################################## + +_EXPECTATIONS_FILENAME = 'translation_expectations.pyl' + +# Technical note: the format of translation_expectations.pyl +# is a 'Python literal', which defines a python dictionary, so should +# be easy to parse. However, when modifying it, care should be taken +# to respect the line comments and the order of keys within the text +# file. + + +def _ReadPythonLiteralFile(pyl_path): + """Read a .pyl file into a Python data structure.""" + with open(pyl_path) as f: + pyl_content = f.read() + # Evaluate as a Python data structure, use an empty global + # and local dictionary. + return eval(pyl_content, dict(), dict()) + + +def _UpdateLocalesInExpectationLines(pyl_lines, + wanted_locales, + available_width=79): + """Update the locales list(s) found in an expectations file. + + Args: + pyl_lines: Iterable of input lines from the file. + wanted_locales: Set or list of new locale names. + available_width: Optional, number of character colums used + to word-wrap the new list items. + Returns: + New list of updated lines. + """ + locales_list = ['"%s"' % loc for loc in sorted(wanted_locales)] + result = [] + line_count = len(pyl_lines) + line_num = 0 + DICT_START = '"languages": [' + while line_num < line_count: + line = pyl_lines[line_num] + line_num += 1 + result.append(line) + # Look for start of "languages" dictionary. + pos = line.find(DICT_START) + if pos < 0: + continue + + start_margin = pos + start_line = line_num + # Skip over all lines from the list. + while (line_num < line_count and + not pyl_lines[line_num].rstrip().endswith('],')): + line_num += 1 + continue + + if line_num == line_count: + raise Exception('%d: Missing list termination!' % start_line) + + # Format the new list according to the new margin. + locale_width = available_width - (start_margin + 2) + locale_lines = _PrettyPrintListAsLines( + locales_list, locale_width, trailing_comma=True) + for locale_line in locale_lines: + result.append(' ' * (start_margin + 2) + locale_line) + result.append(' ' * start_margin + '],') + line_num += 1 + + return result + + +class _UpdateLocalesInExpectationLinesTest(unittest.TestCase): + + def test_simple(self): + self.maxDiff = 1000 + input_text = r''' +# This comment should be preserved +# 23456789012345678901234567890123456789 +{ + "android_grd": { + "languages": [ + "aa", "bb", "cc", "dd", "ee", + "ff", "gg", "hh", "ii", "jj", + "kk"], + }, + # Example with bad indentation in input. + "another_grd": { + "languages": [ + "aa", "bb", "cc", "dd", "ee", "ff", "gg", "hh", "ii", "jj", "kk", + ], + }, +} +''' + expected_text = r''' +# This comment should be preserved +# 23456789012345678901234567890123456789 +{ + "android_grd": { + "languages": [ + "A2", "AA", "BB", "CC", "DD", + "E2", "EE", "FF", "GG", "HH", + "I2", "II", "JJ", "KK", + ], + }, + # Example with bad indentation in input. + "another_grd": { + "languages": [ + "A2", "AA", "BB", "CC", "DD", + "E2", "EE", "FF", "GG", "HH", + "I2", "II", "JJ", "KK", + ], + }, +} +''' + input_lines = input_text.splitlines() + test_locales = ([ + 'AA', 'BB', 'CC', 'DD', 'EE', 'FF', 'GG', 'HH', 'II', 'JJ', 'KK', 'A2', + 'E2', 'I2' + ]) + expected_lines = expected_text.splitlines() + self.assertListEqual( + _UpdateLocalesInExpectationLines(input_lines, test_locales, 40), + expected_lines) + + def test_missing_list_termination(self): + input_lines = r''' + "languages": [' + "aa", "bb", "cc", "dd" +'''.splitlines() + with self.assertRaises(Exception) as cm: + _UpdateLocalesInExpectationLines(input_lines, ['a', 'b'], 40) + + self.assertEqual(str(cm.exception), '2: Missing list termination!') + + +def _UpdateLocalesInExpectationFile(pyl_path, wanted_locales): + """Update all locales listed in a given expectations file. + + Args: + pyl_path: Path to .pyl file to update. + wanted_locales: List of locales that need to be written to + the file. + """ + tc_locales = { + _FixTranslationConsoleLocaleName(locale) + for locale in set(wanted_locales) - set([_DEFAULT_LOCALE]) + } + + with open(pyl_path) as f: + input_lines = [l.rstrip() for l in f.readlines()] + + updated_lines = _UpdateLocalesInExpectationLines(input_lines, tc_locales) + with build_utils.AtomicOutput(pyl_path) as f: + f.writelines('\n'.join(updated_lines) + '\n') + + +########################################################################## +########################################################################## +##### +##### C H E C K E V E R Y T H I N G +##### +########################################################################## +########################################################################## + +# pylint: enable=unused-argument + + +def _IsAllInputFile(input_file): + return _IsGritInputFile(input_file) or _IsBuildGnInputFile(input_file) + + +def _CheckAllFiles(input_file, input_lines, wanted_locales): + errors = [] + if _IsGritInputFile(input_file): + errors += _CheckGrdTranslations(input_file, input_lines, wanted_locales) + errors += _CheckGrdAndroidOutputElements( + input_file, input_lines, wanted_locales) + elif _IsBuildGnInputFile(input_file): + errors += _CheckGnAndroidOutputs(input_file, input_lines, wanted_locales) + return errors + + +def _AddMissingLocalesInAllFiles(input_file, input_lines, wanted_locales): + if _IsGritInputFile(input_file): + lines = _AddMissingLocalesInGrdTranslations( + input_file, input_lines, wanted_locales) + lines = _AddMissingLocalesInGrdAndroidOutputs( + input_file, lines, wanted_locales) + elif _IsBuildGnInputFile(input_file): + lines = _AddMissingLocalesInGnAndroidOutputs( + input_file, input_lines, wanted_locales) + return lines + + +########################################################################## +########################################################################## +##### +##### C O M M A N D H A N D L I N G +##### +########################################################################## +########################################################################## + +class _Command(object): + """A base class for all commands recognized by this script. + + Usage is the following: + 1) Derived classes must re-define the following class-based fields: + - name: Command name (e.g. 'list-locales') + - description: Command short description. + - long_description: Optional. Command long description. + NOTE: As a convenience, if the first character is a newline, + it will be omitted in the help output. + + 2) Derived classes for commands that take arguments should override + RegisterExtraArgs(), which receives a corresponding argparse + sub-parser as argument. + + 3) Derived classes should implement a Run() command, which can read + the current arguments from self.args. + """ + name = None + description = None + long_description = None + + def __init__(self): + self._parser = None + self.args = None + + def RegisterExtraArgs(self, subparser): + pass + + def RegisterArgs(self, parser): + subp = parser.add_parser( + self.name, help=self.description, + description=self.long_description or self.description, + formatter_class=argparse.RawDescriptionHelpFormatter) + self._parser = subp + subp.set_defaults(command=self) + group = subp.add_argument_group('%s arguments' % self.name) + self.RegisterExtraArgs(group) + + def ProcessArgs(self, args): + self.args = args + + +class _ListLocalesCommand(_Command): + """Implement the 'list-locales' command to list locale lists of interest.""" + name = 'list-locales' + description = 'List supported Chrome locales' + long_description = r''' +List locales of interest, by default this prints all locales supported by +Chrome, but `--type=ios_unsupported` can be used for the list of locales +unsupported on iOS. + +These values are extracted directly from build/config/locales.gni. + +Additionally, use the --as-json argument to print the list as a JSON list, +instead of the default format (which is a space-separated list of locale names). +''' + + # Maps type argument to a function returning the corresponding locales list. + TYPE_MAP = { + 'all': ChromeLocales, + 'ios_unsupported': IosUnsupportedLocales, + } + + def RegisterExtraArgs(self, group): + group.add_argument( + '--as-json', + action='store_true', + help='Output as JSON list.') + group.add_argument( + '--type', + choices=tuple(self.TYPE_MAP.viewkeys()), + default='all', + help='Select type of locale list to print.') + + def Run(self): + locale_list = self.TYPE_MAP[self.args.type]() + if self.args.as_json: + print('[%s]' % ", ".join("'%s'" % loc for loc in locale_list)) + else: + print(' '.join(locale_list)) + + +class _CheckInputFileBaseCommand(_Command): + """Used as a base for other _Command subclasses that check input files. + + Subclasses should also define the following class-level variables: + + - select_file_func: + A predicate that receives a file name (not path) and return True if it + should be selected for inspection. Used when scanning directories with + '--scan-dir '. + + - check_func: + - fix_func: + Two functions passed as parameters to _ProcessFile(), see relevant + documentation in this function's definition. + """ + select_file_func = None + check_func = None + fix_func = None + + def RegisterExtraArgs(self, group): + group.add_argument( + '--scan-dir', + action='append', + help='Optional directory to scan for input files recursively.') + group.add_argument( + 'input', + nargs='*', + help='Input file(s) to check.') + group.add_argument( + '--fix-inplace', + action='store_true', + help='Try to fix the files in-place too.') + group.add_argument( + '--add-locales', + help='Space-separated list of additional locales to use') + + def Run(self): + args = self.args + input_files = [] + if args.input: + input_files = args.input + if args.scan_dir: + input_files.extend(_ScanDirectoriesForFiles( + args.scan_dir, self.select_file_func.__func__)) + locales = ChromeLocales() + if args.add_locales: + locales.extend(args.add_locales.split(' ')) + + locales = set(locales) + + for input_file in input_files: + _ProcessFile(input_file, + locales, + self.check_func.__func__, + self.fix_func.__func__ if args.fix_inplace else None) + print('%sDone.' % (_CONSOLE_START_LINE)) + + +class _CheckGrdAndroidOutputsCommand(_CheckInputFileBaseCommand): + name = 'check-grd-android-outputs' + description = ( + 'Check the Android resource (.xml) files outputs in GRIT input files.') + long_description = r''' +Check the Android .xml files outputs in one or more input GRIT (.grd) files +for the following conditions: + + - Each item has a correct 'lang' attribute. + - There are no duplicated lines for the same 'lang' attribute. + - That there are no extra locales that Chromium doesn't want. + - That no wanted locale is missing. + - Filenames exist for each listed locale. + - Filenames are well-formed. +''' + select_file_func = _IsGritInputFile + check_func = _CheckGrdAndroidOutputElements + fix_func = _AddMissingLocalesInGrdAndroidOutputs + + +class _CheckGrdTranslationsCommand(_CheckInputFileBaseCommand): + name = 'check-grd-translations' + description = ( + 'Check the translation (.xtb) files outputted by .grd input files.') + long_description = r''' +Check the translation (.xtb) file outputs in one or more input GRIT (.grd) files +for the following conditions: + + - Each item has a correct 'lang' attribute. + - There are no duplicated lines for the same 'lang' attribute. + - That there are no extra locales that Chromium doesn't want. + - That no wanted locale is missing. + - Each item has a 'path' attribute. + - Each such path value ends up with '.xtb'. +''' + select_file_func = _IsGritInputFile + check_func = _CheckGrdTranslations + fix_func = _AddMissingLocalesInGrdTranslations + + +class _CheckGnAndroidOutputsCommand(_CheckInputFileBaseCommand): + name = 'check-gn-android-outputs' + description = 'Check the Android .xml file lists in GN build files.' + long_description = r''' +Check one or more BUILD.gn file, looking for lists of Android resource .xml +files, and checking that: + + - There are no duplicated output files in the list. + - Each output file belongs to a wanted Chromium locale. + - There are no output files for unwanted Chromium locales. +''' + select_file_func = _IsBuildGnInputFile + check_func = _CheckGnAndroidOutputs + fix_func = _AddMissingLocalesInGnAndroidOutputs + + +class _CheckAllCommand(_CheckInputFileBaseCommand): + name = 'check-all' + description = 'Check everything.' + long_description = 'Equivalent to calling all other check-xxx commands.' + select_file_func = _IsAllInputFile + check_func = _CheckAllFiles + fix_func = _AddMissingLocalesInAllFiles + + +class _UpdateExpectationsCommand(_Command): + name = 'update-expectations' + description = 'Update translation expectations file.' + long_description = r''' +Update %s files to match the current list of locales supported by Chromium. +This is especially useful to add new locales before updating any GRIT or GN +input file with the --add-locales option. +''' % _EXPECTATIONS_FILENAME + + def RegisterExtraArgs(self, group): + group.add_argument( + '--add-locales', + help='Space-separated list of additional locales to use.') + + def Run(self): + locales = ChromeLocales() + add_locales = self.args.add_locales + if add_locales: + locales.extend(add_locales.split(' ')) + + expectation_paths = [ + 'tools/gritsettings/translation_expectations.pyl', + 'clank/tools/translation_expectations.pyl', + ] + missing_expectation_files = [] + for path in enumerate(expectation_paths): + file_path = os.path.join(_TOP_SRC_DIR, path) + if not os.path.exists(file_path): + missing_expectation_files.append(file_path) + continue + _UpdateLocalesInExpectationFile(file_path, locales) + + if missing_expectation_files: + sys.stderr.write('WARNING: Missing file(s): %s\n' % + (', '.join(missing_expectation_files))) + + +class _UnitTestsCommand(_Command): + name = 'unit-tests' + description = 'Run internal unit-tests for this script' + + def RegisterExtraArgs(self, group): + group.add_argument( + '-v', '--verbose', action='count', help='Increase test verbosity.') + group.add_argument('args', nargs=argparse.REMAINDER) + + def Run(self): + argv = [_SCRIPT_NAME] + self.args.args + unittest.main(argv=argv, verbosity=self.args.verbose) + + +# List of all commands supported by this script. +_COMMANDS = [ + _ListLocalesCommand, + _CheckGrdAndroidOutputsCommand, + _CheckGrdTranslationsCommand, + _CheckGnAndroidOutputsCommand, + _CheckAllCommand, + _UpdateExpectationsCommand, + _UnitTestsCommand, +] + + +def main(argv): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + + subparsers = parser.add_subparsers() + commands = [clazz() for clazz in _COMMANDS] + for command in commands: + command.RegisterArgs(subparsers) + + if not argv: + argv = ['--help'] + + args = parser.parse_args(argv) + args.command.ProcessArgs(args) + args.command.Run() + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/mac/OWNERS b/mac/OWNERS new file mode 100644 index 000000000000..6f3324f07c8f --- /dev/null +++ b/mac/OWNERS @@ -0,0 +1 @@ +file://build/apple/OWNERS diff --git a/mac/find_sdk.py b/mac/find_sdk.py new file mode 100755 index 000000000000..3dcc4d5d36bf --- /dev/null +++ b/mac/find_sdk.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +r"""Prints the lowest locally available SDK version greater than or equal to a +given minimum sdk version to standard output. + +If --print_sdk_path is passed, then the script will also print the SDK path. +If --print_bin_path is passed, then the script will also print the path to the +toolchain bin dir. + +Usage: + python find_sdk.py \ + [--print_sdk_path] \ + [--print_bin_path] \ + 10.6 # Ignores SDKs < 10.6 + +Sample Output: +/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.14.sdk +/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ +10.14 +""" + + +import os +import plistlib +import re +import subprocess +import sys + +from optparse import OptionParser + + +class SdkError(Exception): + def __init__(self, value): + self.value = value + def __str__(self): + return repr(self.value) + + +def parse_version(version_str): + """'10.6' => [10, 6]""" + return [int(s) for s in re.findall(r'(\d+)', version_str)] + + +def main(): + parser = OptionParser() + parser.add_option("--print_sdk_path", + action="store_true", dest="print_sdk_path", default=False, + help="Additionally print the path the SDK (appears first).") + parser.add_option("--print_bin_path", + action="store_true", dest="print_bin_path", default=False, + help="Additionally print the path the toolchain bin dir.") + parser.add_option("--print_sdk_build", + action="store_true", dest="print_sdk_build", default=False, + help="Additionally print the build version of the SDK.") + options, args = parser.parse_args() + if len(args) != 1: + parser.error('Please specify a minimum SDK version') + min_sdk_version = args[0] + + + job = subprocess.Popen(['xcode-select', '-print-path'], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + out, err = job.communicate() + if job.returncode != 0: + print(out, file=sys.stderr) + print(err, file=sys.stderr) + raise Exception('Error %d running xcode-select' % job.returncode) + dev_dir = out.decode('UTF-8').rstrip() + sdk_dir = os.path.join( + dev_dir, 'Platforms/MacOSX.platform/Developer/SDKs') + + if not os.path.isdir(sdk_dir): + raise SdkError('Install Xcode, launch it, accept the license ' + + 'agreement, and run `sudo xcode-select -s /path/to/Xcode.app` ' + + 'to continue.') + sdks = [re.findall('^MacOSX(\d+\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)] + sdks = [s[0] for s in sdks if s] # [['10.5'], ['10.6']] => ['10.5', '10.6'] + sdks = [s for s in sdks # ['10.5', '10.6'] => ['10.6'] + if parse_version(s) >= parse_version(min_sdk_version)] + if not sdks: + raise Exception('No %s+ SDK found' % min_sdk_version) + best_sdk = sorted(sdks, key=parse_version)[0] + sdk_name = 'MacOSX' + best_sdk + '.sdk' + sdk_path = os.path.join(sdk_dir, sdk_name) + + if options.print_sdk_path: + print(sdk_path) + + if options.print_bin_path: + bin_path = 'Toolchains/XcodeDefault.xctoolchain/usr/bin/' + print(os.path.join(dev_dir, bin_path)) + + if options.print_sdk_build: + system_version_plist = os.path.join(sdk_path, + 'System/Library/CoreServices/SystemVersion.plist') + with open(system_version_plist, 'rb') as f: + system_version_info = plistlib.load(f) + if 'ProductBuildVersion' not in system_version_info: + raise Exception('Failed to determine ProductBuildVersion' + + 'for SDK at path %s' % system_version_plist) + print(system_version_info['ProductBuildVersion']) + + print(best_sdk) + + +if __name__ == '__main__': + if sys.platform != 'darwin': + raise Exception("This script only runs on Mac") + sys.exit(main()) diff --git a/mac/should_use_hermetic_xcode.py b/mac/should_use_hermetic_xcode.py new file mode 100755 index 000000000000..e4cea4a5a1a6 --- /dev/null +++ b/mac/should_use_hermetic_xcode.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Prints "1" if Chrome targets should be built with hermetic Xcode. +Prints "2" if Chrome targets should be built with hermetic Xcode, but the OS +version does not meet the minimum requirements of the hermetic version of Xcode. +Prints "3" if FORCE_MAC_TOOLCHAIN is set for an iOS target_os +Otherwise prints "0". + +Usage: + python should_use_hermetic_xcode.py +""" + + +import argparse +import os +import sys + +_THIS_DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) +_BUILD_PATH = os.path.join(_THIS_DIR_PATH, os.pardir) +sys.path.insert(0, _BUILD_PATH) + +import mac_toolchain + + +def _IsCorpMachine(): + if sys.platform == 'darwin': + return os.path.isdir('/Library/GoogleCorpSupport/') + if sys.platform.startswith('linux'): + import subprocess + try: + return subprocess.check_output(['lsb_release', + '-sc']).rstrip() == b'rodete' + except: + return False + return False + + +def main(): + parser = argparse.ArgumentParser(description='Download hermetic Xcode.') + parser.add_argument('platform') + args = parser.parse_args() + + force_toolchain = os.environ.get('FORCE_MAC_TOOLCHAIN') + if force_toolchain and args.platform == 'ios': + return "3" + allow_corp = args.platform == 'mac' and _IsCorpMachine() + if force_toolchain or allow_corp: + if not mac_toolchain.PlatformMeetsHermeticXcodeRequirements(): + return "2" + return "1" + else: + return "0" + + +if __name__ == '__main__': + print(main()) + sys.exit(0) diff --git a/mac_toolchain.py b/mac_toolchain.py new file mode 100755 index 000000000000..cd253cd7923b --- /dev/null +++ b/mac_toolchain.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python3 + +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +If should_use_hermetic_xcode.py emits "1", and the current toolchain is out of +date: + * Downloads the hermetic mac toolchain + * Requires CIPD authentication. Run `cipd auth-login`, use Google account. + * Accepts the license. + * If xcode-select and xcodebuild are not passwordless in sudoers, requires + user interaction. + * Downloads standalone binaries from [a possibly different version of Xcode]. + +The toolchain version can be overridden by setting MAC_TOOLCHAIN_REVISION with +the full revision, e.g. 9A235. +""" + +import argparse +import os +import pkg_resources +import platform +import plistlib +import shutil +import subprocess +import sys + + +def LoadPList(path): + """Loads Plist at |path| and returns it as a dictionary.""" + with open(path, 'rb') as f: + return plistlib.load(f) + + +# This contains binaries from Xcode 14.3 14E222b along with the macOS 13.3 SDK +# (13.3 22E245). To build these packages, see comments in +# build/xcode_binaries.yaml +# To update the version numbers, open Xcode's "About Xcode" for the first number +# and run `xcrun --show-sdk-build-version` for the second. +# To update the _TAG, use the output of the `cipd create` command mentioned in +# xcode_binaries.yaml. + +MAC_BINARIES_LABEL = 'infra_internal/ios/xcode/xcode_binaries/mac-amd64' +MAC_BINARIES_TAG = 'ajH0-Cuzzqtyj98qUlsgO1-lepRhXoVVNAjVXDIYHxcC' + +# The toolchain will not be downloaded if the minimum OS version is not met. 19 +# is the major version number for macOS 10.15. Xcode 14.0 14B47b only runs on +# macOS 12.4 and newer, but some bots are still running older OS versions. macOS +# 10.15.4, the OS minimum through Xcode 12.4, still seems to work. +MAC_MINIMUM_OS_VERSION = [19, 4] + +BASE_DIR = os.path.abspath(os.path.dirname(__file__)) +TOOLCHAIN_ROOT = os.path.join(BASE_DIR, 'mac_files') +TOOLCHAIN_BUILD_DIR = os.path.join(TOOLCHAIN_ROOT, 'Xcode.app') + +# Always integrity-check the entire SDK. Mac SDK packages are complex and often +# hit edge cases in cipd (eg https://crbug.com/1033987, +# https://crbug.com/915278), and generally when this happens it requires manual +# intervention to fix. +# Note the trailing \n! +PARANOID_MODE = '$ParanoidMode CheckIntegrity\n' + + +def PlatformMeetsHermeticXcodeRequirements(): + if sys.platform != 'darwin': + return True + needed = MAC_MINIMUM_OS_VERSION + major_version = [int(v) for v in platform.release().split('.')[:len(needed)]] + return major_version >= needed + + +def _UseHermeticToolchain(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + script_path = os.path.join(current_dir, 'mac/should_use_hermetic_xcode.py') + proc = subprocess.Popen([script_path, 'mac'], stdout=subprocess.PIPE) + return '1' in proc.stdout.readline().decode() + + +def RequestCipdAuthentication(): + """Requests that the user authenticate to access Xcode CIPD packages.""" + + print('Access to Xcode CIPD package requires authentication.') + print('-----------------------------------------------------------------') + print() + print('You appear to be a Googler.') + print() + print('I\'m sorry for the hassle, but you may need to do a one-time manual') + print('authentication. Please run:') + print() + print(' cipd auth-login') + print() + print('and follow the instructions.') + print() + print('NOTE: Use your google.com credentials, not chromium.org.') + print() + print('-----------------------------------------------------------------') + print() + sys.stdout.flush() + + +def PrintError(message): + # Flush buffers to ensure correct output ordering. + sys.stdout.flush() + sys.stderr.write(message + '\n') + sys.stderr.flush() + + +def InstallXcodeBinaries(): + """Installs the Xcode binaries needed to build Chrome and accepts the license. + + This is the replacement for InstallXcode that installs a trimmed down version + of Xcode that is OS-version agnostic. + """ + # First make sure the directory exists. It will serve as the cipd root. This + # also ensures that there will be no conflicts of cipd root. + binaries_root = os.path.join(TOOLCHAIN_ROOT, 'xcode_binaries') + if not os.path.exists(binaries_root): + os.makedirs(binaries_root) + + # 'cipd ensure' is idempotent. + args = ['cipd', 'ensure', '-root', binaries_root, '-ensure-file', '-'] + + p = subprocess.Popen(args, + universal_newlines=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = p.communicate(input=PARANOID_MODE + MAC_BINARIES_LABEL + + ' ' + MAC_BINARIES_TAG) + if p.returncode != 0: + print(stdout) + print(stderr) + RequestCipdAuthentication() + return 1 + + if sys.platform != 'darwin': + return 0 + + # Accept the license for this version of Xcode if it's newer than the + # currently accepted version. + cipd_xcode_version_plist_path = os.path.join(binaries_root, + 'Contents/version.plist') + cipd_xcode_version_plist = LoadPList(cipd_xcode_version_plist_path) + cipd_xcode_version = cipd_xcode_version_plist['CFBundleShortVersionString'] + + cipd_license_path = os.path.join(binaries_root, + 'Contents/Resources/LicenseInfo.plist') + cipd_license_plist = LoadPList(cipd_license_path) + cipd_license_version = cipd_license_plist['licenseID'] + + should_overwrite_license = True + current_license_path = '/Library/Preferences/com.apple.dt.Xcode.plist' + if os.path.exists(current_license_path): + current_license_plist = LoadPList(current_license_path) + xcode_version = current_license_plist.get( + 'IDEXcodeVersionForAgreedToGMLicense') + if (xcode_version is not None and pkg_resources.parse_version(xcode_version) + >= pkg_resources.parse_version(cipd_xcode_version)): + should_overwrite_license = False + + if not should_overwrite_license: + return 0 + + # Use puppet's sudoers script to accept the license if its available. + license_accept_script = '/usr/local/bin/xcode_accept_license.sh' + if os.path.exists(license_accept_script): + args = [ + 'sudo', license_accept_script, cipd_xcode_version, cipd_license_version + ] + subprocess.check_call(args) + return 0 + + # Otherwise manually accept the license. This will prompt for sudo. + print('Accepting new Xcode license. Requires sudo.') + sys.stdout.flush() + args = [ + 'sudo', 'defaults', 'write', current_license_path, + 'IDEXcodeVersionForAgreedToGMLicense', cipd_xcode_version + ] + subprocess.check_call(args) + args = [ + 'sudo', 'defaults', 'write', current_license_path, + 'IDELastGMLicenseAgreedTo', cipd_license_version + ] + subprocess.check_call(args) + args = ['sudo', 'plutil', '-convert', 'xml1', current_license_path] + subprocess.check_call(args) + + return 0 + + +def main(): + if not _UseHermeticToolchain(): + print('Skipping Mac toolchain installation for mac') + return 0 + + parser = argparse.ArgumentParser(description='Download hermetic Xcode.') + args = parser.parse_args() + + if not PlatformMeetsHermeticXcodeRequirements(): + print('OS version does not support toolchain.') + return 0 + + return InstallXcodeBinaries() + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/metadata.json.in b/metadata.json.in new file mode 100644 index 000000000000..3fceff256c18 --- /dev/null +++ b/metadata.json.in @@ -0,0 +1,6 @@ +{ + "content": { + "version": "@MAJOR@.@MINOR@.@BUILD@.@PATCH@" + }, + "metadata_version": 1 +} diff --git a/nocompile.gni b/nocompile.gni new file mode 100644 index 000000000000..942ad9ecef46 --- /dev/null +++ b/nocompile.gni @@ -0,0 +1,151 @@ +# Copyright 2011 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is meant to be included into an target to create a unittest that +# invokes a set of no-compile tests. A no-compile test is a test that asserts +# a particular construct will not compile. +# +# Also see: +# http://dev.chromium.org/developers/testing/no-compile-tests +# +# To use this, create a gyp target with the following form: +# +# import("//build/nocompile.gni") +# nocompile_test("my_module_nc_unittests") { +# sources = [ +# 'nc_testset_1.nc', +# 'nc_testset_2.nc', +# ] +# +# # optional extra include dirs: +# include_dirs = [ ... ] +# } +# +# The .nc files are C++ files that contain code we wish to assert will not +# compile. Each individual test case in the file should be put in its own +# #ifdef section. The expected output should be appended with a C++-style +# comment that has a python list of regular expressions. This will likely +# be greater than 80-characters. Giving a solid expected output test is +# important so that random compile failures do not cause the test to pass. +# +# Example .nc file: +# +# #if defined(TEST_NEEDS_SEMICOLON) // [r"expected ',' or ';' at end of input"] +# +# int a = 1 +# +# #elif defined(TEST_NEEDS_CAST) // [r"invalid conversion from 'void*' to 'char*'"] +# +# void* a = NULL; +# char* b = a; +# +# #endif +# +# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to: +# +# DISABLE_TEST_NEEDS_SEMICOLON +# TEST_NEEDS_CAST +# +# The lines above are parsed by a regexp so avoid getting creative with the +# formatting or ifdef logic; it will likely just not work. +# +# Implementation notes: +# The .nc files are actually processed by a python script which executes the +# compiler and generates a .cc file that is empty on success, or will have a +# series of #error lines on failure, and a set of trivially passing gunit +# TEST() functions on success. This allows us to fail at the compile step when +# something goes wrong, and know during the unittest run that the test was at +# least processed when things go right. + +import("//build/config/clang/clang.gni") +import("//build/config/python.gni") +import("//build/toolchain/toolchain.gni") +import("//testing/test.gni") + +if (is_mac) { + import("//build/config/mac/mac_sdk.gni") +} + +declare_args() { + # TODO(crbug.com/105388): make sure no-compile test is not flaky. + enable_nocompile_tests = (is_linux || is_chromeos || is_apple) && is_clang && + host_cpu == target_cpu +} + +if (enable_nocompile_tests) { + import("//build/config/c++/c++.gni") + import("//build/config/sysroot.gni") + template("nocompile_test") { + nocompile_target = target_name + "_run_nocompile" + + action_foreach(nocompile_target) { + testonly = true + script = "//tools/nocompile_driver.py" + sources = invoker.sources + deps = invoker.deps + if (defined(invoker.public_deps)) { + public_deps = invoker.public_deps + } + + result_path = "$target_gen_dir/{{source_name_part}}_nc.cc" + depfile = "${result_path}.d" + outputs = [ result_path ] + args = [ + rebase_path("$clang_base_path/bin/clang++", root_build_dir), + "4", # number of compilers to invoke in parallel. + "{{source}}", + rebase_path(result_path, root_build_dir), + "--", + "-nostdinc++", + "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir), + "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir), + "-std=c++17", + "-Wall", + "-Werror", + "-Wfatal-errors", + "-Wthread-safety", + "-I" + rebase_path("//", root_build_dir), + "-I" + rebase_path("//third_party/abseil-cpp/", root_build_dir), + "-I" + rebase_path("//buildtools/third_party/libc++/", root_build_dir), + "-I" + rebase_path(root_gen_dir, root_build_dir), + + # TODO(https://crbug.com/989932): Track build/config/compiler/BUILD.gn + "-Wno-implicit-int-float-conversion", + ] + + if (is_mac && host_os != "mac") { + args += [ + "--target=x86_64-apple-macos", + "-mmacos-version-min=$mac_deployment_target", + ] + } + + # Iterate over any extra include dirs and append them to the command line. + if (defined(invoker.include_dirs)) { + foreach(include_dir, invoker.include_dirs) { + args += [ "-I" + rebase_path(include_dir, root_build_dir) ] + } + } + + if (sysroot != "") { + args += [ + "--sysroot", + rebase_path(sysroot, root_build_dir), + ] + } + + if (!is_nacl) { + args += [ + # TODO(crbug.com/1343975) Evaluate and possibly enable. + "-Wno-deprecated-builtins", + ] + } + } + + test(target_name) { + deps = invoker.deps + [ ":$nocompile_target" ] + sources = get_target_outputs(":$nocompile_target") + } + } +} diff --git a/noop.py b/noop.py new file mode 100644 index 000000000000..6c7477591879 --- /dev/null +++ b/noop.py @@ -0,0 +1,4 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Script that does nothing successfully.""" diff --git a/partitioned_shared_library.gni b/partitioned_shared_library.gni new file mode 100644 index 000000000000..2af4f9e93b5d --- /dev/null +++ b/partitioned_shared_library.gni @@ -0,0 +1,142 @@ +# Copyright 2019 The Chromium Authors + +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") + +# This template creates a set of shared libraries, by linking a single +# "partitioned" shared library, then splitting it into multiple pieces. +# The intention is to facilitate code-splitting between a base library and +# additional feature-specific libraries that may be obtained and loaded at a +# later time. +# +# The combined library is an intermediate product made by leveraging the LLVM +# toolchain. Code modules may be labeled via compiler flag as belonging to a +# particular partition. At link time, any symbols reachable by only a single +# partition's entrypoints will be located in a partition-specific library +# segment. After linking, the segments are split apart using objcopy into +# separate libraries. The main library is then packaged with the application +# as usual, while feature libraries may be packaged, delivered and loaded +# separately (via an Android Dynamic Feature Module). +# +# When loading a feature library, the intended address of the library must be +# supplied to the loader, so that it can be mapped to the memory location. The +# address offsets of the feature libraries are stored in the base library and +# accessed through special symbols named according to the partitions. +# +# The template instantiates targets for the base library, as well as each +# specified partition, based on the root target name. Example: +# +# - libmonochrome (base library) +# - libmonochrome_foo (partition library for feature 'foo') +# - libmonochrome_bar (partition library for feature 'bar') +# +# Note that the feature library filenames are chosen based on the main +# library's name (eg. libmonochrome_foo.so), but the soname of the feature +# library is based on the feature name (eg. "foo"). This should generally be +# okay, with the caveat that loading the library multiple times *might* cause +# problems in Android. +# +# This template uses shared_library's default configurations. +# +# Variables: +# partitions: A list of library partition names to extract, in addition to +# the base library. + +template("partitioned_shared_library") { + assert(is_clang) + forward_variables_from(invoker, [ "testonly" ]) + + _combined_library_target = "${target_name}__combined" + + # Strip "lib" from target names; it will be re-added to output libraries. + _output_name = string_replace(target_name, "lib", "") + + shared_library(_combined_library_target) { + forward_variables_from(invoker, "*", [ "partitions" ]) + if (!defined(ldflags)) { + ldflags = [] + } + ldflags += [ + "-Wl,-soname,lib${_output_name}.so", + "--partitioned-library", + ] + + # This shared library is an intermediate artifact that should not packaged + # into the final build. Therefore, reset metadata. + metadata = { + } + } + + template("partition_action") { + action(target_name) { + deps = [ ":$_combined_library_target" ] + script = "//build/extract_partition.py" + sources = + [ "$root_out_dir/lib.unstripped/lib${_output_name}__combined.so" ] + outputs = [ + invoker.unstripped_output, + invoker.stripped_output, + ] + data = [ invoker.unstripped_output ] + metadata = { + shared_libraries = [ invoker.stripped_output ] + } + args = [ + "--objcopy", + rebase_path("$clang_base_path/bin/llvm-objcopy", root_build_dir), + "--unstripped-output", + rebase_path(invoker.unstripped_output, root_build_dir), + "--stripped-output", + rebase_path(invoker.stripped_output, root_build_dir), + ] + if (defined(invoker.partition) && invoker.partition != "") { + args += [ + "--partition", + "${invoker.partition}", + ] + } + + if (use_debug_fission) { + args += [ "--split-dwarf" ] + outputs += [ invoker.unstripped_output + ".dwp" ] + } + args += [ rebase_path(sources[0], root_build_dir) ] + } + } + + partition_action(target_name) { + stripped_output = "$root_out_dir/lib${_output_name}.so" + unstripped_output = "$root_out_dir/lib.unstripped/lib${_output_name}.so" + } + + # Note that as of now, non-base partition libraries are placed in a + # subdirectory of the root output directory. This is because partition + # sonames are not sensitive to the filename of the base library, and as such, + # their corresponding file names may be generated multiple times by different + # base libraries. To avoid collisions, each base library target has a + # corresponding subdir for its extra partitions. + # + # If this proves problematic to various pieces of infrastructure, a proposed + # alternative is allowing the linker to rename partitions. For example, + # feature "foo" may be a partition. If two different base libraries both + # define "foo" partitions, the linker may be made to accept an extra command + # to rename the partition's soname to "foo1" or "foo2". Other build config + # can name the libraries foo1.so and foo2.so, allowing them to reside in the + # same directory. + foreach(_partition, invoker.partitions) { + partition_action("${target_name}_${_partition}") { + partition = "${_partition}_partition" + stripped_output = "$root_out_dir/lib${_output_name}_${partition}.so" + unstripped_output = + "$root_out_dir/lib.unstripped/lib${_output_name}_${partition}.so" + } + } +} + +set_defaults("partitioned_shared_library") { + configs = default_shared_library_configs +} diff --git a/precompile.cc b/precompile.cc new file mode 100644 index 000000000000..8ae429349cc3 --- /dev/null +++ b/precompile.cc @@ -0,0 +1,7 @@ +// Copyright 2011 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Precompiled header generator for Windows builds. No include is needed +// in this file as the PCH include is forced via the "Forced Include File" +// flag in the projects generated by GYP. diff --git a/precompile.h b/precompile.h new file mode 100644 index 000000000000..d6e3dc11a31c --- /dev/null +++ b/precompile.h @@ -0,0 +1,53 @@ +// Copyright 2012 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is used as a precompiled header for both C and C++ files. So +// any C++ headers must go in the __cplusplus block below. + +#if defined(BUILD_PRECOMPILE_H_) +#error You shouldn't include the precompiled header file more than once. +#endif + +#define BUILD_PRECOMPILE_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(__cplusplus) + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#endif // __cplusplus diff --git a/print_python_deps.py b/print_python_deps.py new file mode 100755 index 000000000000..07f988a87113 --- /dev/null +++ b/print_python_deps.py @@ -0,0 +1,186 @@ +#!/usr/bin/env vpython3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Prints all non-system dependencies for the given module. + +The primary use-case for this script is to generate the list of python modules +required for .isolate files. +""" + +import argparse +import os +import pipes +import sys + +# Don't use any helper modules, or else they will end up in the results. + + +_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + + +def ComputePythonDependencies(): + """Gets the paths of imported non-system python modules. + + A path is assumed to be a "system" import if it is outside of chromium's + src/. The paths will be relative to the current directory. + """ + module_paths = (m.__file__ for m in sys.modules.values() + if m and hasattr(m, '__file__') and m.__file__) + + src_paths = set() + for path in module_paths: + if path == __file__: + continue + path = os.path.abspath(path) + if not path.startswith(_SRC_ROOT): + continue + + if (path.endswith('.pyc') + or (path.endswith('c') and not os.path.splitext(path)[1])): + path = path[:-1] + src_paths.add(path) + + return src_paths + + +def quote(string): + if string.count(' ') > 0: + return '"%s"' % string + else: + return string + + +def _NormalizeCommandLine(options): + """Returns a string that when run from SRC_ROOT replicates the command.""" + args = ['build/print_python_deps.py'] + root = os.path.relpath(options.root, _SRC_ROOT) + if root != '.': + args.extend(('--root', root)) + if options.output: + args.extend(('--output', os.path.relpath(options.output, _SRC_ROOT))) + if options.gn_paths: + args.extend(('--gn-paths',)) + for allowlist in sorted(options.allowlists): + args.extend(('--allowlist', os.path.relpath(allowlist, _SRC_ROOT))) + args.append(os.path.relpath(options.module, _SRC_ROOT)) + if os.name == 'nt': + return ' '.join(quote(x) for x in args).replace('\\', '/') + else: + return ' '.join(pipes.quote(x) for x in args) + + +def _FindPythonInDirectory(directory, allow_test): + """Returns an iterable of all non-test python files in the given directory.""" + for root, _dirnames, filenames in os.walk(directory): + for filename in filenames: + if filename.endswith('.py') and (allow_test + or not filename.endswith('_test.py')): + yield os.path.join(root, filename) + + +def _ImportModuleByPath(module_path): + """Imports a module by its source file.""" + # Replace the path entry for print_python_deps.py with the one for the given + # module. + sys.path[0] = os.path.dirname(module_path) + + # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly + module_name = os.path.splitext(os.path.basename(module_path))[0] + import importlib.util # Python 3 only, since it's unavailable in Python 2. + spec = importlib.util.spec_from_file_location(module_name, module_path) + module = importlib.util.module_from_spec(spec) + sys.modules[module_name] = module + spec.loader.exec_module(module) + + +def main(): + parser = argparse.ArgumentParser( + description='Prints all non-system dependencies for the given module.') + parser.add_argument('module', + help='The python module to analyze.') + parser.add_argument('--root', default='.', + help='Directory to make paths relative to.') + parser.add_argument('--output', + help='Write output to a file rather than stdout.') + parser.add_argument('--inplace', action='store_true', + help='Write output to a file with the same path as the ' + 'module, but with a .pydeps extension. Also sets the ' + 'root to the module\'s directory.') + parser.add_argument('--no-header', action='store_true', + help='Do not write the "# Generated by" header.') + parser.add_argument('--gn-paths', action='store_true', + help='Write paths as //foo/bar/baz.py') + parser.add_argument('--did-relaunch', action='store_true', + help=argparse.SUPPRESS) + parser.add_argument('--allowlist', + default=[], + action='append', + dest='allowlists', + help='Recursively include all non-test python files ' + 'within this directory. May be specified multiple times.') + options = parser.parse_args() + + if options.inplace: + if options.output: + parser.error('Cannot use --inplace and --output at the same time!') + if not options.module.endswith('.py'): + parser.error('Input module path should end with .py suffix!') + options.output = options.module + 'deps' + options.root = os.path.dirname(options.module) + + modules = [options.module] + if os.path.isdir(options.module): + modules = list(_FindPythonInDirectory(options.module, allow_test=True)) + if not modules: + parser.error('Input directory does not contain any python files!') + + is_vpython = 'vpython' in sys.executable + if not is_vpython: + # Prevent infinite relaunch if something goes awry. + assert not options.did_relaunch + # Re-launch using vpython will cause us to pick up modules specified in + # //.vpython, but does not cause it to pick up modules defined inline via + # [VPYTHON:BEGIN] ... [VPYTHON:END] comments. + # TODO(agrieve): Add support for this if the need ever arises. + os.execvp('vpython3', ['vpython3'] + sys.argv + ['--did-relaunch']) + + # Work-around for protobuf library not being loadable via importlib + # This is needed due to compile_resources.py. + import importlib._bootstrap_external + importlib._bootstrap_external._NamespacePath.sort = lambda self, **_: 0 + + paths_set = set() + try: + for module in modules: + _ImportModuleByPath(module) + paths_set.update(ComputePythonDependencies()) + except Exception: + # Output extra diagnostics when loading the script fails. + sys.stderr.write('Error running print_python_deps.py.\n') + sys.stderr.write('is_vpython={}\n'.format(is_vpython)) + sys.stderr.write('did_relanuch={}\n'.format(options.did_relaunch)) + sys.stderr.write('python={}\n'.format(sys.executable)) + raise + + for path in options.allowlists: + paths_set.update( + os.path.abspath(p) + for p in _FindPythonInDirectory(path, allow_test=False)) + + paths = [os.path.relpath(p, options.root) for p in paths_set] + + normalized_cmdline = _NormalizeCommandLine(options) + out = open(options.output, 'w', newline='') if options.output else sys.stdout + with out: + if not options.no_header: + out.write('# Generated by running:\n') + out.write('# %s\n' % normalized_cmdline) + prefix = '//' if options.gn_paths else '' + for path in sorted(paths): + out.write(prefix + path.replace('\\', '/') + '\n') + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/private_code_test/BUILD.gn b/private_code_test/BUILD.gn new file mode 100644 index 000000000000..8fcdd54077a0 --- /dev/null +++ b/private_code_test/BUILD.gn @@ -0,0 +1,47 @@ +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/python.gni") +import("//build_overrides/build.gni") +import("private_code_test.gni") + +action("private_paths") { + script = "list_gclient_deps.py" + outputs = [ "$target_gen_dir/private_paths.txt" ] + args = [ + "--source-filter", + "chrome-internal", + "--output", + rebase_path(outputs[0], root_build_dir), + ] + inputs = [ "//../.gclient_entries" ] +} + +# --collect-inputs-only requires a source_set dep or !is_component_build. +if (!is_component_build) { + action("private_code_test_gclient_deps") { + script = "list_gclient_deps.py" + outputs = [ "$target_gen_dir/test_private_paths.txt" ] + args = [ + "--source-filter", + "v8.git", + "--output", + rebase_path(outputs[0], root_build_dir), + ] + inputs = [ "//../.gclient_entries" ] + } + + shared_library("private_code_test_inputs") { + deps = [ "//v8" ] + ldflags = [ "--collect-inputs-only" ] + } + + # Test that ensures the checker fails when it is supposed to. + private_code_test("private_code_failure_test") { + linker_inputs_dep = ":private_code_test_inputs" + private_paths_dep = ":private_code_test_gclient_deps" + private_paths_file = "$target_gen_dir/test_private_paths.txt" + expect_failure = true + } +} diff --git a/private_code_test/README.md b/private_code_test/README.md new file mode 100644 index 000000000000..75329b02a39a --- /dev/null +++ b/private_code_test/README.md @@ -0,0 +1,36 @@ +# Private Code Test + +This directory provides a mechanism for testing that native does not link in +object files from unwanted directories. The test finds all linker inputs, and +checks that none live inside a list of internal paths. + +Original bug: https://bugs.chromium.org/p/chromium/issues/detail?id=1266989 + +## Determining Internal Directories + +This is done by parsing the `.gclient_entries` file for all paths coming from +https://chrome-internal.googlesource.com. I chose this approach since it is +simple. + +The main alternative I found was to use `gclient flatten`. Example output: + +``` + # src -> src-internal + "src-internal": { + "url": "https://chrome-internal.googlesource.com/chrome/src-internal.git@c649c6a155fe65c3730e2d663d7d2058d33bf1f9", + "condition": 'checkout_src_internal', + }, +``` + +* Paths could be found in this way by looking for `checkout_src_internal` + within `condition`, and by looking for the comment line for `recurse_deps` + that went through an internal repo. + +## Determining Linker Inputs + +This is done by performing a custom link step with a linker that just records +inputs. This seemed like the simplest approach. + +Two alternatives: +1) Dump paths found in debug information. +2) Scan a linker map file for input paths. diff --git a/private_code_test/list_gclient_deps.py b/private_code_test/list_gclient_deps.py new file mode 100755 index 000000000000..6a34fc4abae4 --- /dev/null +++ b/private_code_test/list_gclient_deps.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import pathlib +import sys + +_REPO_ROOT = pathlib.Path(__file__).resolve().parents[3] +_ENTRIES_FILE = _REPO_ROOT / '.gclient_entries' + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--source-filter', required=True) + parser.add_argument('--output', required=True) + args = parser.parse_args() + + source_filter = args.source_filter + + # Ninja validates that the file exists since it's marked as an input. + try: + text = _ENTRIES_FILE.read_text() + result = {} + exec(text, result) + entries = result['entries'] + private_dirs = sorted(d for d, s in entries.items() + if s and source_filter in s) + except Exception as e: + # Make the test fail rather than the compile step so that failures here do + # not prevent other bot functionality. + private_dirs = [ + '# ERROR parsing .gclient_entries', + str(e), '', 'File was:', text + ] + + pathlib.Path(args.output).write_text('\n'.join(private_dirs) + '\n') + + +if __name__ == '__main__': + main() diff --git a/private_code_test/private_code_test.gni b/private_code_test/private_code_test.gni new file mode 100644 index 000000000000..6ce82f0328bd --- /dev/null +++ b/private_code_test/private_code_test.gni @@ -0,0 +1,63 @@ +# Copyright 2023 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//testing/test.gni") + +template("private_code_test") { + isolated_script_test(target_name) { + forward_variables_from(invoker, + [ + "data", + "data_deps", + ]) + script = "//build/private_code_test/private_code_test.py" + _linker_inputs_dep = invoker.linker_inputs_dep + if (shlib_prefix != "") { + _so_name = shlib_prefix + get_label_info(_linker_inputs_dep, "name") + _so_name = string_replace(_so_name, + "${shlib_prefix}${shlib_prefix}", + shlib_prefix) + } + _dir = get_label_info(_linker_inputs_dep, "root_out_dir") + if (is_android) { + _dir += "/lib.unstripped" + } + _linker_inputs_file = "$_dir/${_so_name}$shlib_extension" + if (defined(invoker.private_paths_dep)) { + _private_paths_dep = invoker.private_paths_dep + _private_paths_file = invoker.private_paths_file + } else { + _private_paths_dep = + "//build/private_code_test:private_paths($default_toolchain)" + _private_paths_file = + get_label_info(_private_paths_dep, "target_gen_dir") + + "/private_paths.txt" + } + + data_deps = [ + _linker_inputs_dep, + _private_paths_dep, + ] + args = [ + "--linker-inputs", + "@WrappedPath(" + rebase_path(_linker_inputs_file, root_build_dir) + ")", + "--private-paths-file", + "@WrappedPath(" + rebase_path(_private_paths_file, root_build_dir) + ")", + "--root-out-dir", + rebase_path(get_label_info(_linker_inputs_dep, "root_out_dir"), + root_build_dir), + ] + if (defined(invoker.allowed_violations)) { + foreach(_glob, invoker.allowed_violations) { + args += [ + "--allow-violation", + _glob, + ] + } + } + if (defined(invoker.expect_failure) && invoker.expect_failure) { + args += [ "--expect-failure" ] + } + } +} diff --git a/private_code_test/private_code_test.py b/private_code_test/private_code_test.py new file mode 100755 index 000000000000..a164741a4b11 --- /dev/null +++ b/private_code_test/private_code_test.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Tests that no linker inputs are from private paths.""" + +import argparse +import fnmatch +import os +import pathlib +import sys + +_DIR_SRC_ROOT = pathlib.Path(__file__).resolve().parents[2] + + +def _print_paths(paths, limit): + for path in paths[:limit]: + print(path) + if len(paths) > limit: + print(f'... and {len(paths) - limit} more.') + print() + + +def _apply_allowlist(found, globs): + ignored_paths = [] + new_found = [] + for path in found: + for pattern in globs: + if fnmatch.fnmatch(path, pattern): + ignored_paths.append(path) + break + else: + new_found.append(path) + return new_found, ignored_paths + + +def _find_private_paths(linker_inputs, private_paths, root_out_dir): + seen = set() + found = [] + for linker_input in linker_inputs: + dirname = os.path.dirname(linker_input) + if dirname in seen: + continue + + to_check = dirname + # Strip ../ prefix. + if to_check.startswith('..'): + to_check = os.path.relpath(to_check, _DIR_SRC_ROOT) + else: + if root_out_dir: + # Strip secondary toolchain subdir + to_check = to_check[len(root_out_dir) + 1:] + # Strip top-level dir (e.g. "obj", "gen"). + parts = to_check.split(os.path.sep, 1) + if len(parts) == 1: + continue + to_check = parts[1] + + if any(to_check.startswith(p) for p in private_paths): + found.append(linker_input) + else: + seen.add(dirname) + return found + + +def _read_private_paths(path): + text = pathlib.Path(path).read_text() + + # Check if .gclient_entries was not valid. https://crbug.com/1427829 + if text.startswith('# ERROR: '): + sys.stderr.write(text) + sys.exit(1) + + # Remove src/ prefix from paths. + # We care only about paths within src/ since GN cannot reference files + # outside of // (and what would the obj/ path for them look like?). + ret = [p[4:] for p in text.splitlines() if p.startswith('src/')] + if not ret: + sys.stderr.write(f'No src/ paths found in {args.private_paths_file}\n') + sys.stderr.write(f'This test should not be run on public bots.\n') + sys.stderr.write(f'File contents:\n') + sys.stderr.write(text) + sys.exit(1) + + return ret + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--linker-inputs', + required=True, + help='Path to file containing one linker input per line, ' + 'relative to --root-out-dir') + parser.add_argument('--private-paths-file', + required=True, + help='Path to file containing list of paths that are ' + 'considered private, relative gclient root.') + parser.add_argument('--root-out-dir', + required=True, + help='See --linker-inputs.') + parser.add_argument('--allow-violation', + action='append', + help='globs of private paths to allow.') + parser.add_argument('--expect-failure', + action='store_true', + help='Invert exit code.') + args = parser.parse_args() + + private_paths = _read_private_paths(args.private_paths_file) + linker_inputs = pathlib.Path(args.linker_inputs).read_text().splitlines() + + root_out_dir = args.root_out_dir + if root_out_dir == '.': + root_out_dir = '' + + found = _find_private_paths(linker_inputs, private_paths, root_out_dir) + + if args.allow_violation: + found, ignored_paths = _apply_allowlist(found, args.allow_violation) + if ignored_paths: + print('Ignoring {len(ignored_paths)} allowlisted private paths:') + _print_paths(sorted(ignored_paths), 10) + + if found: + limit = 10 if args.expect_failure else 1000 + print(f'Found {len(found)} private paths being linked into public code:') + _print_paths(found, limit) + elif args.expect_failure: + print('Expected to find a private path, but none were found.') + + sys.exit(0 if bool(found) == args.expect_failure else 1) + + +if __name__ == '__main__': + main() diff --git a/protoc_java.py b/protoc_java.py new file mode 100755 index 000000000000..8f25e3a5e6c7 --- /dev/null +++ b/protoc_java.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generate java source files from protobuf files. + +This is the action script for the proto_java_library template. + +It performs the following steps: +1. Deletes all old sources (ensures deleted classes are not part of new jars). +2. Creates source directory. +3. Generates Java files using protoc (output into either --java-out-dir or + --srcjar). +4. Creates a new stamp file. +""" + + +import argparse +import os +import shutil +import subprocess +import sys + +import action_helpers +import zip_helpers + +sys.path.append(os.path.join(os.path.dirname(__file__), 'android', 'gyp')) +from util import build_utils + + +def _HasJavaPackage(proto_lines): + return any(line.strip().startswith('option java_package') + for line in proto_lines) + + +def _EnforceJavaPackage(proto_srcs): + for proto_path in proto_srcs: + with open(proto_path) as in_proto: + if not _HasJavaPackage(in_proto.readlines()): + raise Exception('Proto files for java must contain a "java_package" ' + 'line: {}'.format(proto_path)) + + +def main(argv): + parser = argparse.ArgumentParser() + action_helpers.add_depfile_arg(parser) + parser.add_argument('--protoc', required=True, help='Path to protoc binary.') + parser.add_argument('--plugin', help='Path to plugin executable') + parser.add_argument('--proto-path', + required=True, + help='Path to proto directory.') + parser.add_argument('--java-out-dir', + help='Path to output directory for java files.') + parser.add_argument('--srcjar', help='Path to output srcjar.') + parser.add_argument('--stamp', help='File to touch on success.') + parser.add_argument( + '--import-dir', + action='append', + default=[], + help='Extra import directory for protos, can be repeated.') + parser.add_argument('protos', nargs='+', help='proto source files') + options = parser.parse_args(argv) + + if not options.java_out_dir and not options.srcjar: + raise Exception('One of --java-out-dir or --srcjar must be specified.') + + _EnforceJavaPackage(options.protos) + + with build_utils.TempDir() as temp_dir: + protoc_args = [] + + generator = 'java' + if options.plugin: + generator = 'plugin' + protoc_args += ['--plugin', 'protoc-gen-plugin=' + options.plugin] + + protoc_args += ['--proto_path', options.proto_path] + for path in options.import_dir: + protoc_args += ['--proto_path', path] + + protoc_args += ['--' + generator + '_out=lite:' + temp_dir] + + # Generate Java files using protoc. + build_utils.CheckOutput( + [options.protoc] + protoc_args + options.protos, + # protoc generates superfluous warnings about LITE_RUNTIME deprecation + # even though we are using the new non-deprecated method. + stderr_filter=lambda output: build_utils.FilterLines( + output, '|'.join([r'optimize_for = LITE_RUNTIME', r'java/lite\.md']) + )) + + if options.java_out_dir: + build_utils.DeleteDirectory(options.java_out_dir) + shutil.copytree(temp_dir, options.java_out_dir) + else: + with action_helpers.atomic_output(options.srcjar) as f: + zip_helpers.zip_directory(f, temp_dir) + + if options.depfile: + assert options.srcjar + deps = options.protos + [options.protoc] + action_helpers.write_depfile(options.depfile, options.srcjar, deps) + + if options.stamp: + build_utils.Touch(options.stamp) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/protoc_java.pydeps b/protoc_java.pydeps new file mode 100644 index 000000000000..467907f86a8e --- /dev/null +++ b/protoc_java.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build --output build/protoc_java.pydeps build/protoc_java.py +action_helpers.py +android/gyp/util/__init__.py +android/gyp/util/build_utils.py +gn_helpers.py +protoc_java.py +zip_helpers.py diff --git a/redirect_stdout.py b/redirect_stdout.py new file mode 100644 index 000000000000..16494fa6cf00 --- /dev/null +++ b/redirect_stdout.py @@ -0,0 +1,28 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import os +import subprocess +import sys + +# This script executes a command and redirects the stdout to a file. This is +# equivalent to |command... > output_file|. +# +# Usage: python redirect_stdout.py output_file command... + +if __name__ == '__main__': + if len(sys.argv) < 2: + print("Usage: %s output_file command..." % sys.argv[0], file=sys.stderr) + sys.exit(1) + + # This script is designed to run binaries produced by the current build. We + # may prefix it with "./" to avoid picking up system versions that might + # also be on the path. + path = sys.argv[2] + if not os.path.isabs(path): + path = './' + path + + with open(sys.argv[1], 'w') as fp: + sys.exit(subprocess.check_call([path] + sys.argv[3:], stdout=fp)) diff --git a/rm.py b/rm.py new file mode 100755 index 000000000000..11e8a6439900 --- /dev/null +++ b/rm.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Delete a file. + +This module works much like the rm posix command. +""" + + +import argparse +import os +import sys + + +def Main(): + parser = argparse.ArgumentParser() + parser.add_argument('files', nargs='+') + parser.add_argument('-f', '--force', action='store_true', + help="don't err on missing") + parser.add_argument('--stamp', required=True, help='touch this file') + args = parser.parse_args() + for f in args.files: + try: + os.remove(f) + except OSError: + if not args.force: + print("'%s' does not exist" % f, file=sys.stderr) + return 1 + + with open(args.stamp, 'w'): + os.utime(args.stamp, None) + + return 0 + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/rust/BUILD.gn b/rust/BUILD.gn new file mode 100644 index 000000000000..d7ae149c265f --- /dev/null +++ b/rust/BUILD.gn @@ -0,0 +1,84 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/buildflag_header.gni") +import("//build/config/rust.gni") + +if (toolchain_has_rust) { + config("edition_2021") { + rustflags = [ "--edition=2021" ] + } + + config("edition_2018") { + rustflags = [ "--edition=2018" ] + } + + config("edition_2015") { + rustflags = [ "--edition=2015" ] + } + + # The required dependencies for cxx-generated bindings, that must be included + # on the C++ side. + static_library("cxx_cppdeps") { + sources = [ + "//third_party/rust/cxx/v1/crate/include/cxx.h", + "//third_party/rust/cxx/v1/crate/src/cxx.cc", + ] + + defines = [ "RUST_CXX_NO_EXCEPTIONS" ] + + if (is_win) { + defines += [ "CXX_RS_EXPORT=__declspec(dllexport)" ] + } else { + defines += [ "CXX_RS_EXPORT=__attribute__((visibility(\"default\")))" ] + } + + # Depending on the C++ bindings side of cxx then requires also depending + # on the Rust bindings, since one calls the other. And the Rust bindings + # require the Rust standard library. + # Normally the Rust stdlib is brought in as a dependency by depending + # on any first-party Rust target. But in this case, it's conceivable + # that pure-C++ targets will not depend on any 1p Rust code so we'll add + # the Rust stdlib explicitly. + deps = [ ":cxx_rustdeps" ] + + if (use_local_std_by_default) { + deps += [ "//build/rust/std:link_local_std" ] + } else { + assert(prebuilt_libstd_supported, + "Prebuilt Rust stdlib is not available for this target") + deps += [ "//build/rust/std:link_prebuilt_std" ] + } + } + + # The required dependencies for cxx-generated bindings, that must be included + # on the Rust side. + group("cxx_rustdeps") { + public_deps = [ "//third_party/rust/cxx/v1:lib" ] + } +} + +# Enables code behind #[cfg(test)]. This should only be used for targets where +# testonly=true. +config("test") { + rustflags = [ + "--cfg", + "test", + ] +} + +# TODO(crbug.com/gn/104): GN rust_proc_macro targets are missing this +# command line flag, for the proc_macro crate which is provided by rustc for +# compiling proc-macros. +config("proc_macro_extern") { + rustflags = [ + "--extern", + "proc_macro", + ] +} + +# Forbids unsafe code in crates with this config. +config("forbid_unsafe") { + rustflags = [ "-Funsafe_code" ] +} diff --git a/rust/OWNERS b/rust/OWNERS new file mode 100644 index 000000000000..0e7aca6e184a --- /dev/null +++ b/rust/OWNERS @@ -0,0 +1,7 @@ +adetaylor@chromium.org +ajgo@chromium.org +collinbaker@chromium.org +danakj@chromium.org +lukasza@chromium.org +rsesek@chromium.org +thakis@chromium.org diff --git a/rust/analyze.gni b/rust/analyze.gni new file mode 100644 index 000000000000..36c06112d590 --- /dev/null +++ b/rust/analyze.gni @@ -0,0 +1,79 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/compute_inputs_for_analyze.gni") +import("//build/config/rust.gni") + +if (compute_inputs_for_analyze) { + template("analyze_rust") { + _target_name = target_name + assert(defined(invoker.crate_root)) + + action("${_target_name}_collect_sources") { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "inputs", + "script", + "sources", + "depfile", + "outputs", + "args", + ]) + forward_variables_from(invoker, [ "testonly" ]) + + script = "//build/rust/collect_rust_sources.py" + depfile = "${target_gen_dir}/${target_name}.verify.d" + outputs = [ depfile ] + + args = [ + "--generate-depfile", + "${rust_sysroot}/bin/rustc", + rebase_path(crate_root, root_build_dir), + rebase_path(depfile, root_build_dir), + "{{rustflags}}", + ] + } + + action(_target_name) { + forward_variables_from(invoker, [ "testonly" ]) + + # Constructs a depfile of all rust sources in the crate. + deps = [ ":${_target_name}_collect_sources" ] + + # This target is reached once during `gn gen` and then again during + # `gn analyze`. + # + # 1. When doing `gn gen`, the ':${_target_name}_collect_sources' + # target generates a depfile containing all the rust sources of + # the crate. The exec_script() below runs first, and it produces an + # empty result. + # 2. When doing `gn analyze`, the exec_script() reads the depfile that + # was written during `gn gen` and puts each Rust file in the crate + # into `inputs`. + depfile_path = [] + foreach(d, get_target_outputs(":${_target_name}_collect_sources")) { + depfile_path += [ rebase_path(d, root_build_dir) ] + } + + # Here we read the depfile from `gn gen` when doing `gn analyze`, and + # add all the rust files in the crate to `inputs`. This ensures that + # analyze considers them as affecting tests that depend on the crate. + rust_srcs = exec_script("//build/rust/collect_rust_sources.py", + [ "--read-depfile" ] + depfile_path, + "list lines") + inputs = [] + foreach(s, rust_srcs) { + inputs += [ rebase_path(s, "//", root_build_dir) ] + } + script = "//build/rust/collect_rust_sources.py" + args = [ + "--stamp", + rebase_path("${target_gen_dir}/${target_name}.verify.stamp", + root_build_dir), + ] + outputs = [ "${target_gen_dir}/${target_name}.verify.stamp" ] + } + } +} diff --git a/rust/cargo_crate.gni b/rust/cargo_crate.gni new file mode 100644 index 000000000000..0456a25cc289 --- /dev/null +++ b/rust/cargo_crate.gni @@ -0,0 +1,340 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_target.gni") + +# This template allows for building Cargo crates within gn. +# +# It is intended for use with pre-existing (third party) code and +# is none too efficient. (It will stall the build pipeline whilst +# it runs build scripts to work out what flags are needed). First +# party code should directly use first-class gn targets, such as +# //build/rust/rust_static_library.gni or similar. +# +# Because it's intended for third-party code, it automatically +# defaults to //build/config/compiler:no_chromium_code which +# suppresses some warnings. If you *do* use this for first party +# code, you should remove that config and add the equivalent +# //build/config/compiler:chromium_code config. +# +# Arguments: +# sources +# crate_root +# epoch +# deps +# aliased_deps +# features +# build_native_rust_unit_tests +# edition +# crate_name +# All just as in rust_static_library.gni +# library_configs/executable_configs +# All just as in rust_target.gni +# +# dev_deps +# Same meaning as test_deps in rust_static_library.gni, but called +# dev_deps to match Cargo.toml better. +# +# build_root (optional) +# Filename of build.rs build script. +# +# build_deps (optional) +# Build script dependencies +# +# build_sources (optional) +# List of sources for build script. Must be specified if +# build_root is specified. +# +# build_script_outputs (optional) +# List of .rs files generated by the build script, if any. +# Fine to leave undefined even if you have a build script. +# This doesn't directly correspond to any Cargo variable, +# but unfortunately is necessary for gn to build its dependency +# trees automatically. +# Many build scripts just output --cfg directives, in which case +# no source code is generated and this can remain empty. +# +# build_script_inputs (optional) +# If the build script reads any files generated by build_deps, +# as opposed to merely linking against them, add a list of such +# files here. Again, this doesn't correspond to a Cargo variable +# but is necessary for gn. +# +# crate_type "bin", "proc-macro" or "rlib" (optional) +# Whether to build an executable. The default is "rlib". +# At present others are not supported. +# +# cargo_pkg_authors +# cargo_pkg_version +# cargo_pkg_name +# cargo_pkg_description +# Strings as found within 'version' and similar fields within Cargo.toml. +# Converted to environment variables passed to rustc, in case the crate +# uses clap `crate_version!` or `crate_authors!` macros (fairly common in +# command line tool help) + +template("cargo_crate") { + _orig_target_name = target_name + + _crate_name = _orig_target_name + if (defined(invoker.crate_name)) { + _crate_name = invoker.crate_name + } + + # Executables need to have unique names. Work out a prefix. + if (defined(invoker.build_root)) { + _epochlabel = "vunknown" + if (defined(invoker.epoch)) { + _tempepoch = string_replace(invoker.epoch, ".", "_") + _epochlabel = "v${_tempepoch}" + } + + # This name includes the target name to ensure it's unique for each possible + # build target in the same BUILD.gn file. + _build_script_name = + "${_crate_name}_${target_name}_${_epochlabel}_build_script" + + # Where the OUT_DIR will point when running the build script exe, and + # compiling the crate library/binaries. This directory must include the + # target name to avoid collisions between multiple GN targets that exist + # in the same BUILD.gn. + _build_script_env_out_dir = "$target_gen_dir/$target_name" + } + + _rustenv = [] + if (defined(invoker.rustenv)) { + _rustenv = invoker.rustenv + } + if (defined(invoker.cargo_pkg_authors)) { + _rustenv += [ "CARGO_PKG_AUTHORS=${invoker.cargo_pkg_authors}" ] + } + if (defined(invoker.cargo_pkg_version)) { + _rustenv += [ "CARGO_PKG_VERSION=${invoker.cargo_pkg_version}" ] + } + if (defined(invoker.cargo_pkg_name)) { + _rustenv += [ "CARGO_PKG_NAME=${invoker.cargo_pkg_name}" ] + } + if (defined(invoker.cargo_pkg_description)) { + _rustenv += [ "CARGO_PKG_DESCRIPTION=${invoker.cargo_pkg_description}" ] + } + + # The main target, either a Rust source set or an executable. + rust_target(target_name) { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "build_root", + "build_deps", + "build_sources", + "build_script_inputs", + "build_script_outputs", + "unit_test_target", + "target_type", + "configs", + "rustenv", + ]) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + # Work out what we're building. + crate_type = "rlib" + if (defined(invoker.crate_type)) { + crate_type = invoker.crate_type + } + + # TODO(crbug.com/1422745): don't default to true. This requires changes to + # third_party.toml and gnrt when generating third-party build targets. + allow_unsafe = true + + if (!defined(rustflags)) { + rustflags = [] + } + rustenv = _rustenv + if (crate_type == "bin") { + target_type = "executable" + assert(!defined(invoker.epoch)) + } else if (crate_type == "proc-macro") { + target_type = "rust_proc_macro" + } else { + assert(crate_type == "rlib") + target_type = "rust_library" + } + + if (!defined(build_native_rust_unit_tests)) { + build_native_rust_unit_tests = true + } + + # The unit tests for each target, if generated, should be unique as well. + # a) It needs to be unique even if multiple build targets have the same + # `crate_name`, but different target names. + # b) It needs to be unique even if multiple build targets have the same + # `crate_name` and target name, but different epochs. + _unit_test_unique_target_name = "" + if (_crate_name != _orig_target_name) { + _unit_test_unique_target_name = "${_orig_target_name}_" + } + _unit_test_unique_epoch = "" + if (defined(invoker.epoch)) { + _epoch_str = string_replace(invoker.epoch, ".", "_") + _unit_test_unique_epoch = "v${_epoch_str}_" + } + if (defined(output_dir) && output_dir != "") { + unit_test_output_dir = output_dir + } + unit_test_target = "${_unit_test_unique_target_name}${_crate_name}_${_unit_test_unique_epoch}unittests" + + if ((!defined(output_dir) || output_dir == "") && crate_type == "rlib") { + # Cargo crate rlibs can be compiled differently for tests, and must not + # collide with the production outputs. This does *not* override the + # unit_test_output_dir, which is set above, as that target is not an rlib. + output_dir = "$target_out_dir/$_orig_target_name" + } + + if (defined(invoker.build_root)) { + # Uh-oh, we have a build script + if (!defined(deps)) { + deps = [] + } + if (!defined(sources)) { + sources = [] + } + if (defined(invoker.dev_deps)) { + test_deps = invoker.dev_deps + } + + # This... is a bit weird. We generate a file called cargo_flags.rs which + # does not actually contain Rust code, but instead some flags to add + # to the rustc command line. We need it to end in a .rs extension so that + # we can include it in the 'sources' line and thus have dependency + # calculation done correctly. data_deps won't work because targets don't + # require them to be present until runtime. + flags_file = "$_build_script_env_out_dir/cargo_flags.rs" + rustflags += [ "@" + rebase_path(flags_file, root_build_dir) ] + sources += [ flags_file ] + if (defined(invoker.build_script_outputs)) { + # Build scripts may output arbitrary files. They are usually included in + # the main Rust target using include! or include_str! and therefore the + # filename may be .rs or may be arbitrary. We want to educate ninja + # about the dependency either way. + foreach(extra_source, + filter_include(invoker.build_script_outputs, [ "*.rs" ])) { + sources += [ "$_build_script_env_out_dir/$extra_source" ] + } + inputs = [] + foreach(extra_source, + filter_exclude(invoker.build_script_outputs, [ "*.rs" ])) { + inputs += [ "$_build_script_env_out_dir/$extra_source" ] + } + } + deps += [ ":${_build_script_name}_output" ] + } + } + + if (defined(invoker.build_root)) { + # Extra targets required to make build script work + action("${_build_script_name}_output") { + script = rebase_path("//build/rust/run_build_script.py") + build_script_target = + ":${_build_script_name}($host_toolchain_no_sanitizers)" + deps = [ build_script_target ] + + # The build script output is always in the name-specific output dir. It + # may be built with a different toolchain when cross-compiling (the host + # toolchain) so we must find the path relative to that. + _build_script_target_out_dir = + get_label_info(build_script_target, "target_out_dir") + _build_script_exe = + "$_build_script_target_out_dir/$_orig_target_name/$_build_script_name" + if (is_win) { + _build_script_exe = "${_build_script_exe}.exe" + } + + _flags_file = "$_build_script_env_out_dir/cargo_flags.rs" + + inputs = [ _build_script_exe ] + outputs = [ _flags_file ] + args = [ + "--build-script", + rebase_path(_build_script_exe, root_build_dir), + "--output", + rebase_path(_flags_file, root_build_dir), + "--rust-prefix", + rebase_path("${rust_sysroot}/bin"), + "--out-dir", + rebase_path(_build_script_env_out_dir, root_build_dir), + "--src-dir", + rebase_path(get_path_info(invoker.build_root, "dir"), root_build_dir), + ] + if (defined(rust_abi_target) && rust_abi_target != "") { + args += [ + "--target", + rust_abi_target, + ] + } + if (defined(invoker.features)) { + args += [ "--features" ] + args += invoker.features + } + if (defined(invoker.build_script_outputs)) { + args += [ "--generated-files" ] + args += invoker.build_script_outputs + foreach(generated_file, invoker.build_script_outputs) { + outputs += [ "$_build_script_env_out_dir/$generated_file" ] + } + } + if (_rustenv != []) { + args += [ "--env" ] + args += _rustenv + } + if (defined(invoker.build_script_inputs)) { + inputs += invoker.build_script_inputs + } + } + + if (current_toolchain == host_toolchain_no_sanitizers) { + rust_target(_build_script_name) { + target_type = "executable" + sources = invoker.build_sources + crate_root = invoker.build_root + if (defined(invoker.build_deps)) { + deps = invoker.build_deps + } + + # An rlib's build script may be built differently for tests and for + # production, so they must be in a name specific to the GN target. The + # ${_build_script_name}_output target looks for the exe in this + # location. + output_dir = "$target_out_dir/$_orig_target_name" + rustenv = _rustenv + forward_variables_from(invoker, + [ + "features", + "edition", + "rustflags", + ]) + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + } + } else { + not_needed(invoker, + [ + "build_sources", + "build_deps", + "build_root", + "build_script_inputs", + "build_script_outputs", + ]) + } + } else { + not_needed([ + "_name_specific_output_dir", + "_orig_target_name", + ]) + } +} + +set_defaults("cargo_crate") { + library_configs = default_compiler_configs + executable_configs = default_executable_configs +} diff --git a/rust/collect_rust_sources.py b/rust/collect_rust_sources.py new file mode 100755 index 000000000000..48f2f1f52335 --- /dev/null +++ b/rust/collect_rust_sources.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +'''Is used to find all rust files in a crate, and write the result to a +depfile. Then, used again to read the same depfile and pull out just the +source files. Lastly, it is also used to write a stamp file at the same +location as the depfile.''' + +import argparse +import re +import subprocess +import sys + +FILE_REGEX = re.compile('^(.*):') + + +def main(): + parser = argparse.ArgumentParser( + description='Collect Rust sources for a crate') + parser.add_argument('--stamp', + action='store_true', + help='Generate a stamp file') + parser.add_argument('--generate-depfile', + action='store_true', + help='Generate a depfile') + parser.add_argument('--read-depfile', + action='store_true', + help='Read the previously generated depfile') + args, rest = parser.parse_known_args() + + if (args.stamp): + stampfile = rest[0] + with open(stampfile, "w") as f: + f.write("stamp") + elif (args.generate_depfile): + rustc = rest[0] + crate_root = rest[1] + depfile = rest[2] + rustflags = rest[3:] + + rustc_args = [ + "--emit=dep-info=" + depfile, "-Zdep-info-omit-d-target", crate_root + ] + subprocess.check_call([rustc] + rustc_args + rustflags) + elif (args.read_depfile): + depfile = rest[0] + try: + with open(depfile, "r") as f: + files = [FILE_REGEX.match(l) for l in f.readlines()] + for f in files: + if f: + print(f.group(1)) + except: + pass + else: + print("ERROR: Unknown action") + parser.print_help() + return 1 + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/rust/filter_clang_args.py b/rust/filter_clang_args.py new file mode 100644 index 000000000000..5a1843c0df07 --- /dev/null +++ b/rust/filter_clang_args.py @@ -0,0 +1,31 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +""" +Filters clang args to make them suitable for libclang. + +Rust involves several libclang-based tools that parse C++. +We pass such tools our complete {{cflags}}, but a few of the +arguments aren't appropriate for libclang (for example those +which load plugins). + +This function filters them out. +""" + + +def filter_clang_args(clangargs): + def do_filter(args): + i = 0 + while i < len(args): + # Intercept plugin arguments + if args[i] == '-Xclang': + i += 1 + if args[i] == '-add-plugin': + pass + elif args[i].startswith('-plugin-arg'): + i += 2 + else: + yield args[i] + i += 1 + + return list(do_filter(clangargs)) diff --git a/rust/rs_bindings_from_cc.gni b/rust/rs_bindings_from_cc.gni new file mode 100644 index 000000000000..9bd08cff86ab --- /dev/null +++ b/rust/rs_bindings_from_cc.gni @@ -0,0 +1,297 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/rust.gni") +import("//build/config/sysroot.gni") +import("//build/rust/mixed_static_library.gni") + +# Template to generate and build Rust bindings for a set of C++ headers using +# Crubit's `rs_bindings_from_cc` tool. +# +# This template expands to a `mixed_static_library` named "_rs_api" and +# containing the Rust side of the bindings (as well as internal C++ thunks +# needed to support the bindings). +# +# The generated out/.../gen/.../_rs_api.rs is machine-generated, but +# should be fairly readable (inspecting it might be useful to discover the +# imported bindings and their shape). +# +# Parameters: +# +# bindings_target: +# The C++ target (e.g. a `source_set`) that Rust bindings should be +# generated for. +# +# public_headers: +# The .h files to generate bindings for. +# +# Implementation note: This doesn't just take *all* the headers of the +# `bindings_target`, because typically only a *subset* of headers provides +# the *public* API that bindings are needed for. +# +# TODO(crbug.com/1329611): Internal headers should still to be included in +# the targets_and_headers metadata... +# +# deps: +# Other `rs_bindings_from_cc` targets that the bindings need to depend on +# (e.g. because APIs in the `public_headers` refer to `struct`s declared in +# those other targets. Note how in the usage example below bindings for +# `struct Goat` are provided by `goat_rs_api`, and that therefore the +# bindings for the `TeleportGoat` provided by `teleport_rs_api` depend on +# `goat_rs_api`). +# +# Oftentimes `deps` can be a copy of the `public_deps` of the +# `bindings_target`, but depending on targets with the suffix "_rs_api". +# Still, there are scenarios where `deps` don't parallel *all* entries from +# `public_deps`: +# * `public_deps` that don't expose Rust APIs (i.e. there are no +# "..._rs_api" targets to depend on). +# * `public_deps` that Crubit bindings don't depend on (dependencies that +# don't provide re-exportable C++ APIs, or that only provide items +# that are ignored by Crubit - e.g. `#define`s). +# +# Usage example: +# +# BUILD.gn: +# import("//build/rust/rs_bindings_from_cc.gni") +# import("//build/rust/rust_executable.gni") +# +# rust_executable("my_target") { +# crate_root = "main.rs" +# sources = [ "main.rs" ] +# deps = [ ":teleport_rs_api" ] +# ] +# +# # This will generate "teleport_rs_api" target that provides Rust +# # bindings for the "teleport.h" header from the ":teleport" source +# # set. +# rs_bindings_from_cc("teleport_rs_api") { +# bindings_target = ":teleport" +# public_headers = ["teleport.h"] +# deps = [ ":goat_rs_api" ] # Parallel's `public_deps` of ":teleport". +# } +# +# source_set("teleport") { +# sources = [ "teleport.h", ... ] +# public_deps = [ ":goat" ] +# } +# +# rs_bindings_from_cc("goat_rs_api") { +# bindings_target = ":goat" +# public_headers = ["goat.h"] +# } +# source_set("goat") { +# sources = [ "goat.h", ... ] +# } +# +# teleport.h: +# #include "goat.h" +# void TeleportGoat(const Goat& goat_to_teleport); +# +# goat.h: +# struct Goat { ... }; +# +# main.rs: +# fn main() { +# let g: goat_rs_api::Goat = ...; +# teleport_rs_api::TeleportGoat(&g); +# } +# +# Debugging and implementation notes: +# +# - Consider running the build while CRUBIT_DEBUG environment variable is set. +# This will generate additional `.ir` file and log extra information from +# the `run_rs_bindings_from_cc.py` script (e.g. full cmdlines for invoking +# `rs_bindings_from_cc`). +# +template("rs_bindings_from_cc") { + # Mandatory parameter: bindings_target. + assert(defined(invoker.bindings_target), + "Must specify the C target to make bindings for.") + _bindings_target = invoker.bindings_target + + # Mandatory/unavoidable parameter: target_name + _lib_target_name = target_name + _base_target_name = get_label_info(_bindings_target, "name") + assert(_lib_target_name == "${_base_target_name}_rs_api", + "The convention is that bindings for `foo` are named `foo_rs_api`") + + # Mandatory parameter: public_headers. + assert(defined(invoker.public_headers), + "Must specify the public C headers to make bindings for.") + _rebased_public_headers = [] + foreach(hdr, invoker.public_headers) { + _rebased_public_headers += [ rebase_path(hdr) ] + } + + # Optional parameter: testonly. + _testonly = false + if (defined(invoker.testonly)) { + _testonly = invoker.testonly + } + + # Optional parameter: visibility. + if (defined(invoker.visibility)) { + _visibility = invoker.visibility + } + + # Optional parameter: deps. + # + # TODO(crbug.com/1329611): Can we somehow assert that `_deps` only contains + # some "..._rs_api" targets crated via + # `mixed_static_library($_lib_target_name)` below? foreach(dep, _deps) { + # assert something } + _deps = [] + if (defined(invoker.deps)) { + _deps = invoker.deps + } + + # Various names and paths that are shared across multiple targets defined + # in the template here. + _gen_bindings_target_name = "${_lib_target_name}_gen_bindings" + _gen_metadata_target_name = "${_lib_target_name}_gen_metadata" + _metadata_target_name = "${_lib_target_name}_metadata" + _metadata_path = "${target_gen_dir}/${_lib_target_name}_meta.json" + _rs_out_path = "${target_gen_dir}/${_lib_target_name}.rs" + _cc_out_path = "${target_gen_dir}/${_lib_target_name}_impl.cc" + + # Calculating the --targets_and_headers snippet for the *current* target + # and putting it into GN's `metadata`. + group(_metadata_target_name) { + testonly = _testonly + visibility = [ + ":${_gen_metadata_target_name}", + ":${_lib_target_name}", + ] + deps = [] + + metadata = { + # The data below corresponds to a single-target entry inside + # `--targets_and_headers` cmdline argument of `rs_bindings_from_cc`. + crubit_target_and_headers = [ + { + # The `get_label_info` call below expands ":foo_rs_api" into + # something like "//dir/bar/baz:foo_rs_api". Crubit assumes that + # there is a colon + uses the after-colon-suffix as the name of the + # crate. + t = get_label_info(":${_lib_target_name}", "label_no_toolchain") + h = _rebased_public_headers + }, + ] + } + } + + # Gathering --targets-and-headers data from *all* transitive dependencies and + # putting them into the file at `_metadata_path`. + generated_file(_gen_metadata_target_name) { + testonly = _testonly + visibility = [ ":${_gen_bindings_target_name}" ] + + deps = [ ":${_metadata_target_name}" ] + deps += _deps + + testonly = _testonly + outputs = [ _metadata_path ] + output_conversion = "json" + data_keys = [ "crubit_target_and_headers" ] + + # `walk_keys` are used to limit how deep the transitive dependency goes. + # This is important, because Crubit doesn't care about all the `deps` or + # `public_deps` of the `_bindings_target`. (See also the doc comment about + # `rs_bindings_from_cc.deps` parameter at the top of this file.) + walk_keys = [ "crubit_metadata_deps" ] + } + + # Exposing the generated Rust bindings. + mixed_static_library(_lib_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + + sources = [ _cc_out_path ] + deps = _deps + deps += [ + ":${_gen_bindings_target_name}", + ":${_metadata_target_name}", + "//third_party/crubit:deps_of_rs_api_impl", + _bindings_target, + ] + + # Chromium already covers `chromium/src/` and `out/Release/gen` in the + # include path, but we need to explicitly add `out/Release` below. This + # is needed, because `--public_headers` passed to Crubit use paths relative + # to the `out/Release` directory. See also b/239238801. + include_dirs = [ root_build_dir ] + + rs_sources = [ _rs_out_path ] + rs_crate_name = _lib_target_name + rs_crate_root = _rs_out_path + rs_deps = _deps + rs_deps += [ + ":${_gen_bindings_target_name}", + "//third_party/crubit:deps_of_rs_api", + ] + + metadata = { + crubit_metadata_deps = _deps + [ ":${_metadata_target_name}" ] + } + } + + # Invoking Crubit's `rs_bindings_from_cc` tool to generate Rust bindings. + action(_gen_bindings_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + + script = "//build/rust/run_rs_bindings_from_cc.py" + inputs = [ "//third_party/rust-toolchain/bin/rs_bindings_from_cc" ] + sources = invoker.public_headers + outputs = [ + _rs_out_path, + _cc_out_path, + ] + + deps = [ ":${_gen_metadata_target_name}" ] + args = [ + # Target-specific outputs: + "--rs_out", + rebase_path(_rs_out_path), + "--cc_out", + rebase_path(_cc_out_path), + + # Target-specific inputs: + "--public_headers", + string_join(",", _rebased_public_headers), + "--targets_and_headers_from_gn", + rebase_path(_metadata_path), + ] + + # Several important compiler flags come from default_compiler_configs + configs = default_compiler_configs + if (defined(invoker.configs)) { + configs += invoker.configs + } + args += [ + "--", + "{{defines}}", + "{{include_dirs}}", + "{{cflags}}", + + # This path contains important C headers (e.g. stddef.h) and {{cflags}} + # does not include it. Normally this path is implicitly added by clang but + # it does not happen for libclang. + # + # Add it last so includes from deps and configs take precedence. + "-isystem" + rebase_path( + clang_base_path + "/lib/clang/" + clang_version + "/include", + root_build_dir), + + # Passes C comments through as rustdoc attributes. + "-fparse-all-comments", + ] + } +} diff --git a/rust/run_bindgen.py b/rust/run_bindgen.py new file mode 100755 index 000000000000..a77c555113d9 --- /dev/null +++ b/rust/run_bindgen.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 + +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import subprocess +import sys + +# Set up path to be able to import action_helpers. +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, 'build')) +import action_helpers + +from filter_clang_args import filter_clang_args + + +def atomic_copy(in_path, out_path): + with open(in_path, 'rb') as input: + with action_helpers.atomic_output(out_path) as output: + content = input.read() + output.write(content) + + +def copy_to_prefixed_filename(path, filename, prefix): + atomic_copy(os.path.join(path, filename), + os.path.join(path, prefix + "_" + filename)) + + +def main(): + parser = argparse.ArgumentParser("run_bindgen.py") + parser.add_argument("--exe", help="Path to bindgen", required=True), + parser.add_argument("--header", + help="C header file to generate bindings for", + required=True) + parser.add_argument("--depfile", + help="depfile to output with header dependencies") + parser.add_argument("--output", help="output .rs bindings", required=True) + parser.add_argument("--ld-library-path", + help="LD_LIBRARY_PATH (or DYLD_LIBRARY_PATH on Mac) to " + "set") + parser.add_argument("-I", "--include", help="include path", action="append") + parser.add_argument("--bindgen-flags", + help="flags to pass to bindgen", + nargs="*") + parser.add_argument( + "clangargs", + metavar="CLANGARGS", + help="arguments to pass to libclang (see " + "https://docs.rs/bindgen/latest/bindgen/struct.Builder.html#method.clang_args)", + nargs="*") + args = parser.parse_args() + + # Args passed to the actual bindgen cli + genargs = [] + genargs.append('--no-layout-tests') + if args.bindgen_flags is not None: + for flag in args.bindgen_flags: + genargs.append("--" + flag) + + # TODO(danakj): We need to point bindgen to + # //third_party/rust-toolchain/bin/rustfmt. + genargs.append('--no-rustfmt-bindings') + genargs += ['--rust-target', 'nightly'] + + if args.depfile: + genargs.append('--depfile') + genargs.append(args.depfile) + genargs.append('--output') + genargs.append(args.output) + genargs.append(args.header) + genargs.append('--') + genargs.extend(filter_clang_args(args.clangargs)) + env = os.environ + if args.ld_library_path: + if sys.platform == 'darwin': + env["DYLD_LIBRARY_PATH"] = args.ld_library_path + else: + env["LD_LIBRARY_PATH"] = args.ld_library_path + returncode = subprocess.run([args.exe, *genargs], env=env).returncode + if returncode != 0: + # Make sure we don't emit anything if bindgen failed. + try: + os.remove(args.output) + except FileNotFoundError: + pass + try: + os.remove(args.depfile) + except FileNotFoundError: + pass + return returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/rust/run_build_script.py b/rust/run_build_script.py new file mode 100755 index 000000000000..0db5cb56fa2c --- /dev/null +++ b/rust/run_build_script.py @@ -0,0 +1,164 @@ +#!/usr/bin/env vpython3 + +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This is a wrapper script which runs a Cargo build.rs build script +# executable in a Cargo-like environment. Build scripts can do arbitrary +# things and we can't support everything. Moreover, we do not WANT +# to support everything because that means the build is not deterministic. +# Code review processes must be applied to ensure that the build script +# depends upon only these inputs: +# +# * The environment variables set by Cargo here: +# https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts +# * Output from rustc commands, e.g. to figure out the Rust version. +# +# Similarly, the only allowable output from such a build script +# is currently: +# +# * Generated .rs files +# * cargo:rustc-cfg output. +# +# That's it. We don't even support the other standard cargo:rustc- +# output messages. + +import argparse +import io +import os +import platform +import re +import subprocess +import sys +import tempfile + +# Set up path to be able to import action_helpers +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, 'build')) +import action_helpers + + +RUSTC_VERSION_LINE = re.compile(r"(\w+): (.*)") + + +def rustc_name(): + if platform.system() == 'Windows': + return "rustc.exe" + else: + return "rustc" + + +def host_triple(rustc_path): + """ Works out the host rustc target. """ + args = [rustc_path, "-vV"] + known_vars = dict() + proc = subprocess.Popen(args, stdout=subprocess.PIPE) + for line in io.TextIOWrapper(proc.stdout, encoding="utf-8"): + m = RUSTC_VERSION_LINE.match(line.rstrip()) + if m: + known_vars[m.group(1)] = m.group(2) + return known_vars["host"] + + +RUSTC_CFG_LINE = re.compile("cargo:rustc-cfg=(.*)") + + +def main(): + parser = argparse.ArgumentParser(description='Run Rust build script.') + parser.add_argument('--build-script', + required=True, + help='build script to run') + parser.add_argument('--output', + required=True, + help='where to write output rustc flags') + parser.add_argument('--target', help='rust target triple') + parser.add_argument('--features', help='features', nargs='+') + parser.add_argument('--env', help='environment variable', nargs='+') + parser.add_argument('--rust-prefix', required=True, help='rust path prefix') + parser.add_argument('--generated-files', nargs='+', help='any generated file') + parser.add_argument('--out-dir', required=True, help='target out dir') + parser.add_argument('--src-dir', required=True, help='target source dir') + + args = parser.parse_args() + + rustc_path = os.path.join(args.rust_prefix, rustc_name()) + + # We give the build script an OUT_DIR of a temporary directory, + # and copy out only any files which gn directives say that it + # should generate. Mostly this is to ensure we can atomically + # create those files, but it also serves to avoid side-effects + # from the build script. + # In the future, we could consider isolating this build script + # into a chroot jail or similar on some platforms, but ultimately + # we are always going to be reliant on code review to ensure the + # build script is deterministic and trustworthy, so this would + # really just be a backup to humans. + with tempfile.TemporaryDirectory() as tempdir: + env = {} # try to avoid build scripts depending on other things + env["RUSTC"] = os.path.abspath(rustc_path) + env["OUT_DIR"] = tempdir + env["CARGO_MANIFEST_DIR"] = os.path.abspath(args.src_dir) + env["HOST"] = host_triple(rustc_path) + if args.target is None: + env["TARGET"] = env["HOST"] + else: + env["TARGET"] = args.target + target_components = env["TARGET"].split("-") + env["CARGO_CFG_TARGET_ARCH"] = target_components[0] + if args.features: + for f in args.features: + feature_name = f.upper().replace("-", "_") + env["CARGO_FEATURE_%s" % feature_name] = "1" + if args.env: + for e in args.env: + (k, v) = e.split("=") + env[k] = v + # Pass through a couple which are useful for diagnostics + if os.environ.get("RUST_BACKTRACE"): + env["RUST_BACKTRACE"] = os.environ.get("RUST_BACKTRACE") + if os.environ.get("RUST_LOG"): + env["RUST_LOG"] = os.environ.get("RUST_LOG") + + # In the future we should, set all the variables listed here: + # https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts + + proc = subprocess.run([os.path.abspath(args.build_script)], + env=env, + cwd=args.src_dir, + encoding='utf8', + capture_output=True) + + if proc.stderr.rstrip(): + print(proc.stderr.rstrip(), file=sys.stderr) + proc.check_returncode() + + flags = "" + for line in proc.stdout.split("\n"): + m = RUSTC_CFG_LINE.match(line.rstrip()) + if m: + flags = "%s--cfg\n%s\n" % (flags, m.group(1)) + + # AtomicOutput will ensure we only write to the file on disk if what we + # give to write() is different than what's currently on disk. + with action_helpers.atomic_output(args.output) as output: + output.write(flags.encode("utf-8")) + + # Copy any generated code out of the temporary directory, + # atomically. + if args.generated_files: + for generated_file in args.generated_files: + in_path = os.path.join(tempdir, generated_file) + out_path = os.path.join(args.out_dir, generated_file) + out_dir = os.path.dirname(out_path) + if not os.path.exists(out_dir): + os.makedirs(out_dir) + with open(in_path, 'rb') as input: + with action_helpers.atomic_output(out_path) as output: + content = input.read() + output.write(content) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/rust/run_rs_bindings_from_cc.py b/rust/run_rs_bindings_from_cc.py new file mode 100755 index 000000000000..0b6ed4aa8f11 --- /dev/null +++ b/rust/run_rs_bindings_from_cc.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 + +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import subprocess +import sys + +THIS_DIR = os.path.dirname(os.path.abspath(__file__)) +CHROMIUM_SRC_DIR = os.path.relpath(os.path.join(THIS_DIR, os.pardir, os.pardir)) +sys.path.append(THIS_DIR) +from run_bindgen import filter_clang_args + +RUST_TOOLCHAIN_DIR = os.path.join(CHROMIUM_SRC_DIR, "third_party", + "rust-toolchain") +RUSTFMT_EXE_PATH = os.path.join(RUST_TOOLCHAIN_DIR, "bin", "rustfmt") +RUSTFMT_CONFIG_PATH = os.path.join(CHROMIUM_SRC_DIR, ".rustfmt.toml") +RS_BINDINGS_FROM_CC_EXE_PATH = os.path.join(RUST_TOOLCHAIN_DIR, "bin", + "rs_bindings_from_cc") + + +def format_cmdline(args): + def quote_arg(x): + if ' ' not in x: return x + x = x.replace('"', '\\"') + return f"\"{x}\"" + + return " ".join([quote_arg(x) for x in args]) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--targets_and_headers_from_gn", + metavar="FILE", + help="File parsed into --targets_and_headers Crubit arg", + required=True), + parser.add_argument("--public_headers", + metavar="FILE", + help="Passed through to Crubit", + required=True), + parser.add_argument("--rs_out", + metavar="FILE", + help="Passed through to Crubit", + required=True), + parser.add_argument("--cc_out", + metavar="FILE", + help="Passed through to Crubit", + required=True), + parser.add_argument("clang_args", + metavar="CLANGARGS", + help="Arguments to forward to clang libraries", + nargs=argparse.REMAINDER) + args = parser.parse_args() + + # Output paths + generator_args = [] + generator_args.append("--rs_out={0}".format(os.path.relpath(args.rs_out))) + generator_args.append("--cc_out={0}".format(os.path.relpath(args.cc_out))) + if "CRUBIT_DEBUG" in os.environ: + generator_args.append("--ir_out={0}".format( + os.path.relpath(args.rs_out).replace(".rs", ".ir"))) + + # Public headers. + generator_args.append("--public_headers={0}".format(",".join( + [os.path.relpath(hdr) for hdr in args.public_headers.split(",")]))) + + # Targets to headers map. + with open(args.targets_and_headers_from_gn, "r") as f: + targets_and_headers = json.load(f) + for entry in targets_and_headers: + hdrs = entry["h"] + for i in range(len(hdrs)): + hdrs[i] = os.path.relpath(hdrs[i]) + generator_args.append("--targets_and_headers={0}".format( + json.dumps(targets_and_headers))) + + # All Crubit invocations in Chromium share the following cmdline args. + generator_args.append(f"--rustfmt_exe_path={RUSTFMT_EXE_PATH}") + generator_args.append(f"--rustfmt_config_path={RUSTFMT_CONFIG_PATH}") + generator_args.append( + "--crubit_support_path=third_party/crubit/src/rs_bindings_from_cc/support" + ) + + # Long cmdlines may not work - work around that by using Abseil's `--flagfile` + # https://abseil.io/docs/python/guides/flags#a-note-about---flagfile + # + # Note that `clang_args` are not written to the flag file, because Abseil's + # flag parsing code is only aware of `ABSL_FLAG`-declared flags and doesn't + # know about Clang args (e.g. `-W...` or `-I...`). + params_file_path = os.path.relpath(args.rs_out).replace(".rs", ".params") + with open(params_file_path, "w") as f: + for line in generator_args: + print(line, file=f) + + # Clang arguments. + # + # The call to `filter_clang_args` is needed to avoid the following error: + # error: unable to find plugin 'find-bad-constructs' + clang_args = [] + clang_args.extend(filter_clang_args(args.clang_args)) + # TODO(crbug.com/1329611): This warning needs to be suppressed, because + # otherwise Crubit/Clang complains as follows: + # error: .../third_party/rust-toolchain/bin/rs_bindings_from_cc: + # 'linker' input unused [-Werror,-Wunused-command-line-argument] + # Maybe `build/rust/rs_bindings_from_cc.gni` gives too much in `args`? But + # then `{{cflags}}` seems perfectly reasonable... + clang_args += ["-Wno-unused-command-line-argument"] + + # Print a copy&pastable final cmdline when asked for debugging help. + cmdline = [RS_BINDINGS_FROM_CC_EXE_PATH, f"--flagfile={params_file_path}"] + cmdline.extend(clang_args) + if "CRUBIT_DEBUG" in os.environ: + pretty_cmdline = format_cmdline(cmdline) + print(f"CRUBIT_DEBUG: CMDLINE: {pretty_cmdline}", file=sys.stderr) + + # TODO(crbug.com/1329611): run_bindgen.py removes the outputs when the tool + # fails. Maybe we need to do something similar here? OTOH in most failure + # modes Crubit will fail *before* generating its outputs... + return subprocess.run(cmdline).returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/rust/rust_bindgen.gni b/rust/rust_bindgen.gni new file mode 100644 index 000000000000..9d72169ba16b --- /dev/null +++ b/rust/rust_bindgen.gni @@ -0,0 +1,193 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/rust.gni") +import("//build/config/sysroot.gni") +import("//build/rust/rust_static_library.gni") + +if (is_win) { + import("//build/toolchain/win/win_toolchain_data.gni") +} + +_rustc_base_path = rust_sysroot + +# TODO(danakj): When we're using the Android prebuilt toolchain, there's no +# bindgen present. bindgen is for the host platform so using the linux one will +# work. +if (!use_chromium_rust_toolchain) { + _rustc_base_path = "//third_party/rust-toolchain" +} + +_bindgen_path = "${_rustc_base_path}/bin/bindgen" +if (is_win) { + _bindgen_path = "${_bindgen_path}.exe" +} + +# Template to build Rust/C bindings with bindgen. +# +# This template expands to a static_library containing the Rust side of the +# bindings. Simply treat it as a public dependency. +# +# Parameters: +# +# header: +# The .h file to generate bindings for. +# +# deps: (optional) +# C targets on which the headers depend in order to build successfully. +# +# configs: (optional) +# C compilation targets determine the correct list of -D and -I flags based +# on their dependencies and any configs applied. The same applies here. Set +# any configs here as if this were a C target. +# +# bindgen_flags: (optional) +# the additional bindgen flags which are passed to the executable +# +# Rust targets depending on the output must include! the generated file. +# +template("rust_bindgen") { + assert(defined(invoker.header), + "Must specify the C header file to make bindings for.") + action(target_name) { + # bindgen relies on knowing the {{defines}} and {{include_dirs}} required + # to build the C++ headers which it's parsing. These are passed to the + # script's args and are populated using deps and configs. + forward_variables_from(invoker, + TESTONLY_AND_VISIBILITY + [ + "deps", + "configs", + ]) + + sources = [ invoker.header ] + + if (!defined(configs)) { + configs = [] + } + + # Several important compiler flags come from default_compiler_configs + configs += default_compiler_configs + + output_dir = "$target_gen_dir" + out_gen_rs = "$output_dir/${target_name}.rs" + + script = rebase_path("//build/rust/run_bindgen.py") + inputs = [ _bindgen_path ] + + depfile = "$target_out_dir/${target_name}.d" + outputs = [ out_gen_rs ] + + lib_path = "" + if (is_linux) { + # Linux clang, and clang libs, use a shared libstdc++, which we must + # point to. + clang_ld_path = rebase_path(clang_base_path + "/lib", root_build_dir) + lib_path += "${clang_ld_path}:" + } + rust_ld_path = rebase_path(_rustc_base_path + "/lib", root_build_dir) + lib_path += "${rust_ld_path}" + + args = [ + "--exe", + rebase_path(_bindgen_path), + "--header", + rebase_path(invoker.header, root_build_dir), + "--depfile", + rebase_path(depfile, root_build_dir), + "--output", + rebase_path(out_gen_rs, root_build_dir), + "--ld-library-path", + lib_path, + ] + + if (defined(invoker.bindgen_flags)) { + args += [ "--bindgen-flags" ] + foreach(flag, invoker.bindgen_flags) { + args += [ flag ] + } + } + + args += [ + "--", + "{{defines}}", + "{{include_dirs}}", + "{{cflags}}", + "{{cflags_c}}", + ] + + # Clang ships with some headers, which are installed along side the binary, + # and which clang itself finds by default, but libclang does not (see also + # https://reviews.llvm.org/D95396 which would resolve this but was reverted). + clang_headers = rebase_path( + clang_base_path + "/lib/clang/" + clang_version + "/include", + root_build_dir) + if (is_win) { + args += [ "-imsvc" + clang_headers ] + } else { + args += [ "-isystem" + clang_headers ] + } + + if (is_win) { + # On Windows we fall back to using system headers from a sysroot from + # depot_tools. This is negotiated by python scripts and the result is + # available in //build/toolchain/win/win_toolchain_data.gni. From there + # we get the `include_flags_imsvc` which point to the system headers. + if (host_cpu == "x86") { + win_toolchain_data = win_toolchain_data_x86 + } else if (host_cpu == "x64") { + win_toolchain_data = win_toolchain_data_x64 + } else if (host_cpu == "arm64") { + win_toolchain_data = win_toolchain_data_arm64 + } else { + error("Unsupported host_cpu, add it to win_toolchain_data.gni") + } + args += [ "${win_toolchain_data.include_flags_imsvc}" ] + } + + # Passes C comments through as rustdoc attributes. + if (is_win) { + args += [ "/clang:-fparse-all-comments" ] + } else { + args += [ "-fparse-all-comments" ] + } + + # Default configs include "-fvisibility=hidden", and for some reason this + # causes bindgen not to emit function bindings. Override it. + if (!is_win) { + args += [ "-fvisibility=default" ] + } + + if (is_win) { + # We pass MSVC style flags to clang on Windows, and libclang needs to be + # told explicitly to accept them. + args += [ "--driver-mode=cl" ] + + # On Windows, libclang adds arguments that it then fails to understand. + # -fno-spell-checking + # -fallow-editor-placeholders + # These should not cause bindgen to fail. + args += [ "-Wno-unknown-argument" ] + + # Replace these two arguments with a version that clang-cl can parse. + args += [ + "/clang:-fno-spell-checking", + "/clang:-fallow-editor-placeholders", + ] + } + + if (!is_cfi) { + # LLVM searches for a default CFI ignorelist at (exactly) + # $(cwd)/lib/clang/$(llvm_version)/share/cfi_ignorelist.txt + # Even if we provide a custom -fsanitize-ignorelist, the absence + # of this default file will cause a fatal error. clang finds + # it within third_party/llvm-build, but for bindgen our cwd + # is the $out_dir. We _could_ create this file at the right + # location within the outdir using a "copy" target, but as + # we don't actually generate code within bindgen, the easier + # option is to tell bindgen to ignore all CFI ignorelists. + args += [ "-fno-sanitize-ignorelist" ] + } + } +} diff --git a/rust/rust_executable.gni b/rust/rust_executable.gni new file mode 100644 index 000000000000..ea22aa3b5bd0 --- /dev/null +++ b/rust/rust_executable.gni @@ -0,0 +1,70 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_target.gni") + +# Defines a Rust executable. +# +# This is identical to the built-in gn intrinsic 'executable' but +# supports some additional parameters, as below: +# +# edition (optional) +# Edition of the Rust language to be used. +# Options are "2015", "2018" and "2021". Defaults to "2021". +# +# test_deps (optional) +# List of GN targets on which this crate's tests depend, in addition +# to deps. +# +# build_native_rust_unit_tests (optional) +# Builds native unit tests (under #[cfg(test)]) written inside the Rust +# crate. This will create a `_unittests` executable in the output +# directory when set to true. +# Chromium code should not set this, and instead prefer to split the code +# into a library and write gtests against it. See how to do that in +# //testing/rust_gtest_interop/README.md. +# +# unit_test_target (optional) +# Overrides the default name for the unit tests target +# +# features (optional) +# A list of conditional compilation flags to enable. This can be used +# to set features for crates built in-tree which are also published to +# crates.io. Each feature in the list will be passed to rustc as +# '--cfg feature=XXX' +# +# inputs (optional) +# Additional input files needed for compilation (such as `include!`ed files) +# +# test_inputs (optional) +# Same as above but for the unit tests target +# +# Example of usage: +# +# rust_executable("foo_bar") { +# deps = [ +# "//boo/public/rust/bar", +# ] +# sources = [ "src/main.rs" ] +# } +# +# This template is intended to serve the same purpose as 'rustc_library' +# in Fuchsia. +template("rust_executable") { + exclude_forwards = TESTONLY_AND_VISIBILITY + [ "configs" ] + rust_target(target_name) { + forward_variables_from(invoker, "*", exclude_forwards) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (defined(invoker.configs)) { + library_configs = [] + library_configs = invoker.configs + } + target_type = "executable" + assert(!defined(cxx_bindings)) + } +} + +set_defaults("rust_executable") { + configs = default_executable_configs +} diff --git a/rust/rust_macro.gni b/rust/rust_macro.gni new file mode 100644 index 000000000000..427220b9f5e3 --- /dev/null +++ b/rust/rust_macro.gni @@ -0,0 +1,19 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_target.gni") + +# Template for generating a Rust proc-macro library. Such targets produce a +# dynamic library that is loaded during compilation and used to generate Rust +# code for compilation. +template("rust_macro") { + rust_target(target_name) { + forward_variables_from(invoker, "*", TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + + # Has rust_target generate a rust_proc_macro GN output. + target_type = "rust_proc_macro" + } +} diff --git a/rust/rust_shared_library.gni b/rust/rust_shared_library.gni new file mode 100644 index 000000000000..6bea51d09b8d --- /dev/null +++ b/rust/rust_shared_library.gni @@ -0,0 +1,26 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_target.gni") + +# Defines a shared_library containing just Rust code. Has the same variables +# available as a rust_static_library. See rust_static_library.gni for +# documentation. +template("rust_shared_library") { + exclude_forwards = TESTONLY_AND_VISIBILITY + [ "configs" ] + rust_target(target_name) { + forward_variables_from(invoker, "*", exclude_forwards) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (defined(invoker.configs)) { + library_configs = [] + library_configs = invoker.configs + } + target_type = "shared_library" + crate_type = "cdylib" + } +} + +set_defaults("rust_shared_library") { + configs = default_shared_library_configs +} diff --git a/rust/rust_static_library.gni b/rust/rust_static_library.gni new file mode 100644 index 000000000000..6512b3491292 --- /dev/null +++ b/rust/rust_static_library.gni @@ -0,0 +1,169 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_target.gni") + +# Defines a Rust static library which can be used by downstream Rust or C++ +# targets. This is a single Rust compilation unit consisting of potentially +# multiple .rs files. +# +# We term this 'rust_static_library' because it is used most analogously +# to a C++ 'static_library' in Chromium. Like the C++ one, it can be compiled +# independently into an intermediate linking target. The output contains the +# object file(s) of the GN target's sources, and not its dependencies. +# +# Parameters +# +# sources +# List of source files which this crate is allowed to compile, which is +# used to determine the impact of source code changes on other GN targets. +# This is not used by the Rust compiler, as it discovers source files by +# following `mod` declarations starting at the `crate_root`. The +# discovered source files must match this list. (This is not yet enforced, +# but will be.) +# +# epoch (optional) +# The major version of the library, which is used to differentiate between +# multiple versions of the same library name. This includes all leading 0s +# and the first non-zero value in the crate's version. This should be left +# as the default, which is "0", for first-party code unless there are +# multiple versions of a crate present. For third-party code, the version +# epoch (matching the directory it is found in) should be specified. +# +# Examples: +# 1.0.2 => epoch = "1" +# 4.2.0 => epoch = "4" +# 0.2.7 => epoch = "0.2" +# 0.0.3 => epoch = "0.0.3" +# +# edition (optional) +# Edition of the Rust language to be used. +# Options are "2015", "2018" and "2021". Defaults to "2021". +# +# allow_unsafe (optional) +# Set to true to allow unsafe code in this target. Defaults to false. +# +# configs (optional) +# A list of config labels (in the GN meaning) applying to this target. +# +# rustflags (optional) +# Explicit flags for rustc command line. (Use 'edition' or 'features' +# where possible). +# +# deps (optional) +# List of GN targets on which this crate depends. These may be Rust +# or non-Rust targets. +# +# public_deps (optional) +# List of GN targets on which this crate depends, and which are exported +# into the dependency list of any crate that depends on it. Dependency +# crates that appear in the public API should be included here. +# +# test_deps (optional) +# List of GN targets on which this crate's tests depend, in addition +# to deps. +# +# is_gtest_unittests (optional) +# Should only be set to true for rlibs of gtest unit tests. This ensures +# all objects in the rlib are linked into the final target, rather than +# pruning dead code, so that the tests themselves are not discarded by the +# linker. +# +# mutually_dependent_target (optional) +# mutually_dependent_public_deps (optional) +# These is for use by the mixed_target() template. +# +# If this Rust code is intrinsically paired with some C/C++ code, +# with bidirectional calls between the two, then this would +# be a circular dependency. GN does not allow circular dependencies, +# (other than for header files per allow_circular_includes_from). +# But this is common for a 'component' which has both Rust and C++ +# code. You should structure things such that the C++ code depends +# on the Rust code in the normal way: +# static_library("cpp_stuff") { +# deps = [ "rust_stuff" ] +# # .. +# } +# but that the Rust target also notes the C++ target using this +# 'mutually_dependent_target' parameter. +# rust_static_library("rust_stuff") { +# mutually_dependent_target = "cpp_stuff" +# mutually_dependent_public_deps = _cpp_stuff_public_deps +# # .. +# } +# +# This causes the Rust unit tests, if generated, to depend on the mutually +# dependent target, since depending on the Rust code only would be +# insufficient. And it allows any C++ bindings generated from the Rust code +# to include headers from the mutually_dependent_target by depending on its +# public_deps. +# +# build_native_rust_unit_tests (optional) +# Builds native unit tests (under #[cfg(test)]) written inside the Rust +# crate. This will create a `_unittests` executable in the output +# directory when set to true. +# +# unit_test_target (optional) +# Overrides the default name for the unit tests target +# +# crate_root (optional) +# Location of the crate root. +# This defaults to `./src/lib.rs` and should only be changed when +# absolutely necessary (such as in the case of generated code). +# +# features (optional) +# A list of conditional compilation flags to enable. This can be used +# to set features for crates built in-tree which are also published to +# crates.io. Each feature in the list will be passed to rustc as +# '--cfg feature=XXX' +# +# cxx_bindings (optional) +# A list of Rust files which contain #[cxx::bridge] mods and should +# therefore have C++ bindings generated. See https://cxx.rs. +# This will automatically add appropriate dependencies: there's no +# need to depend on the cxx crate or any generated bindings. +# +# visibility (optional) +# rustflags (optional) +# crate_name (optional) +# Per the usual gn meaning for Rust targets. +# +# inputs (optional) +# Additional input files needed for compilation (such as `include!`ed files) +# +# test_inputs (optional) +# Same as above but for the unit tests target +# +# Example of usage: +# +# rust_static_library("foo_bar") { +# deps = [ +# "//boo/public/rust/bar", +# "//third_party/rust/crates:argh", +# "//third_party/rust/crates:serde", +# "//third_party/rust/crates:slab", +# ] +# sources = [ "src/lib.rs" ] +# } +# +# This template is intended to serve the same purpose as 'rustc_library' +# in Fuchsia. +template("rust_static_library") { + exclude_forwards = TESTONLY_AND_VISIBILITY + [ "configs" ] + _target_name = target_name + + rust_target(_target_name) { + forward_variables_from(invoker, "*", exclude_forwards) + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + if (defined(invoker.configs)) { + library_configs = [] + library_configs = invoker.configs + } + target_type = "rust_library" + } +} + +set_defaults("rust_static_library") { + configs = default_compiler_configs +} diff --git a/rust/rust_target.gni b/rust/rust_target.gni new file mode 100644 index 000000000000..1a2bf1db5491 --- /dev/null +++ b/rust/rust_target.gni @@ -0,0 +1,448 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/analyze.gni") +import("//build/rust/rust_unit_test.gni") + +# The //build directory is re-used for non-Chromium products. We do not support +# cxx bindings in such contexts, because //third_party may be missing. +if (build_with_chromium) { + import("//third_party/rust/cxx/chromium_integration/rust_cxx.gni") +} + +# Creates a Rust target (rlib, executable, proc macro etc.) with ability to +# understand some handy variables such as "edition" and "features" and also to +# build any associated unit tests. +# +# Normally, you should not use this directly. Use either +# - cargo_crate.gni - for 3p crates only +# - rust_static_library.gni - for 1p Rust code +# +# Because the common use of this is rust_static_library, all the documentation +# for the supported options is given in rust_static_library.gni. Please refer +# over there. +# +# If you're using rust_target directly, you will also need to specify: +# target_type executable, rust_library etc. per GN norms +# +# There is one area where this differs from `rust_static_library`: configs. +# Here, you must specify `executable_configs` or `library_configs` depending on +# the type of thing you're generating. This is so that different defaults can +# be provided. + +template("rust_target") { + # Only one of `crate_root` or `generate_crate_root` can be specified, or + # neither. + assert(!defined(invoker.crate_root) || + !(defined(invoker.generate_crate_root) && invoker.generate_crate_root)) + + _target_name = target_name + _crate_name = target_name + if (defined(invoker.crate_name)) { + _crate_name = invoker.crate_name + } + + if (defined(invoker.output_dir) && invoker.output_dir != "") { + # This is where the build target (.exe, .rlib, etc) goes. + _output_dir = invoker.output_dir + } + + # This is where the OUT_DIR environment variable points to when running a + # build script and when compiling the build target, for consuming generated + # files. + _env_out_dir = "$target_gen_dir/$_target_name" + + _allow_unsafe = false + if (defined(invoker.allow_unsafe)) { + _allow_unsafe = invoker.allow_unsafe + } + + if (defined(invoker.generate_crate_root) && invoker.generate_crate_root) { + generated_file("${_target_name}_crate_root") { + outputs = [ "${target_gen_dir}/${target_name}.rs" ] + contents = [ + "// Generated crate root for ${_target_name}.", + "// @generated", + "", + ] + foreach(rs, invoker.sources) { + rs_path_from_root = rebase_path(rs, target_gen_dir) + contents += [ "#[path = \"${rs_path_from_root}\"]" ] + + # Drop the file extension from the module name. + rs_modname = string_replace(rs, ".rs", "") + + # Replace invalid "/" chars in the source file path. + rs_modname = string_replace(rs_modname, "/", "_") + + # Since source files are specified relative to the BUILD.gn they may + # also have ".." path components. + rs_modname = string_replace(rs_modname, "..", "dotdot") + contents += [ + "mod ${rs_modname};", + "", + ] + } + } + _crate_root = + string_join("", get_target_outputs(":${_target_name}_crate_root")) + } else if (defined(invoker.crate_root)) { + _crate_root = invoker.crate_root + } else if (invoker.target_type == "executable") { + _crate_root = "src/main.rs" + } else { + _crate_root = "src/lib.rs" + } + + _testonly = false + if (defined(invoker.testonly)) { + _testonly = invoker.testonly + } + if (defined(invoker.visibility)) { + _visibility = invoker.visibility + } + + _use_local_std = use_local_std_by_default + if (defined(invoker.use_local_std)) { + _use_local_std = invoker.use_local_std + } + + _rustflags = [] + if (defined(invoker.rustflags)) { + _rustflags += invoker.rustflags + } + if (defined(invoker.features)) { + foreach(i, invoker.features) { + _rustflags += [ "--cfg=feature=\"${i}\"" ] + } + } + _edition = "2021" + if (defined(invoker.edition)) { + _edition = invoker.edition + } + _configs = [ "//build/rust:edition_${_edition}" ] + _test_configs = [] + if (invoker.target_type == "executable") { + if (defined(invoker.executable_configs)) { + _configs += invoker.executable_configs + } + } else if (invoker.target_type == "rust_proc_macro") { + if (defined(invoker.proc_macro_configs)) { + _configs += invoker.proc_macro_configs + _test_configs += [ "//build/rust:proc_macro_extern" ] + } + } else { + if (defined(invoker.library_configs)) { + _configs += invoker.library_configs + } + } + _forward_to_host_toolchain = false + if (invoker.target_type == "rust_proc_macro") { + if (current_toolchain != host_toolchain_no_sanitizers) { + _forward_to_host_toolchain = true + } + _main_target_suffix = "${target_name}__proc_macro" + } else { + _main_target_suffix = "__rlib" + } + + _deps = [] + if (defined(invoker.deps)) { + _deps += invoker.deps + } + _public_deps = [] + if (defined(invoker.public_deps)) { + _public_deps += invoker.public_deps + } + if (defined(invoker.aliased_deps)) { + _aliased_deps = invoker.aliased_deps + } else { + _aliased_deps = { + } + } + + _is_data_dep = defined(invoker.is_data_dep) && invoker.is_data_dep + + _build_unit_tests = false + if (defined(invoker.build_native_rust_unit_tests)) { + _build_unit_tests = + invoker.build_native_rust_unit_tests && can_build_rust_unit_tests + } + + # Declares that the Rust crate generates bindings between C++ and Rust via the + # Cxx crate. It may generate C++ headers and/or use the cxx crate macros to + # generate Rust code internally, depending on what bindings are declared. If + # set, it's a set of rust files that include Cxx bindings declarations. + _cxx_bindings = [] + if (defined(invoker.cxx_bindings)) { + assert(build_with_chromium, + "cxx bindings are not supported when building rust targets " + + "outside the Chromium build.") + _cxx_bindings = invoker.cxx_bindings + } + _rustenv = [ "OUT_DIR=" + rebase_path(_env_out_dir) ] + if (defined(invoker.rustenv)) { + _rustenv += invoker.rustenv + } + + # TODO(danakj): This could be a hash generated from the input crate, such as + # from its path, in which case the BUILD.gn would not need to specify + # anything. But GN doesn't give us a hash function to make that easy. + _metadata = "0" + if (defined(invoker.epoch)) { + _metadata = invoker.epoch + } + + # We require that all source files are listed, even though this is + # not a requirement for rustc. The reason is to ensure that tools + # such as `gn deps` give the correct answer, and thus we trigger + # the right test suites etc. on code change. + # TODO(crbug.com/1256930) - verify this is correct + assert(defined(invoker.sources), "sources must be listed") + + if (_forward_to_host_toolchain) { + # Redirect to the host toolchain. + group(_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + public_deps = [ + ":${_target_name}${_main_target_suffix}($host_toolchain_no_sanitizers)", + ] + } + + not_needed(invoker, "*") + not_needed([ + "_allow_unsafe", + "_build_unit_tests", + "_crate_root", + "_crate_name", + "_cxx_bindings", + "_deps", + "_aliased_deps", + "_is_data_dep", + "_metadata", + "_out_dir", + "_public_deps", + "_rustenv", + "_rustflags", + "_support_use_from_cpp", + "_testonly", + "_use_local_std", + "_visibility", + ]) + } else { + group(_target_name) { + testonly = _testonly + if (defined(_visibility)) { + visibility = _visibility + } + + # Both the C++ bindings (if present) and the Rust crate should be treated + # like direct dependencies, so we expose them both in public_deps. + public_deps = [ ":${_target_name}${_main_target_suffix}" ] + + # TODO(danakj): This would not be needed if we stopped forwarding through + # a group in the common (non-procmacro) case. + if (_is_data_dep) { + data_deps = [ ":${_target_name}${_main_target_suffix}" ] + } + + if (_cxx_bindings != []) { + public_deps += [ ":${_target_name}_cxx_generated" ] + + # Additionally, C++ bindings generated by Cxx can include C++ types + # that come from the Cxx library, such as `rust::Str`. So any C++ + # target that depends on a rust target directly may need access to Cxx + # as well, which means it must appear in public_deps. + public_deps += [ "//build/rust:cxx_cppdeps" ] + + # cxx_cppdeps pulls in the default libstd, so make sure the default was + # not overridden. + assert( + _use_local_std == use_local_std_by_default, + "Rust targets with cxx bindings cannot override the default libstd") + } else if (!defined(invoker.no_std) || !invoker.no_std) { + # If C++ depends on and links in the library, we need to make sure C++ + # links in the Rust stdlib. This is orthogonal to if the library exports + # bindings for C++ to use. + if (_use_local_std) { + deps = [ "//build/rust/std:link_local_std" ] + } else { + assert(prebuilt_libstd_supported, + "Prebuilt Rust stdlib is not available for this target") + deps = [ "//build/rust/std:link_prebuilt_std" ] + } + } + } + + _rust_deps = _deps + _rust_aliased_deps = _aliased_deps + _rust_public_deps = _public_deps + _cxx_deps = _deps + + # The Rust target (and unit tests) need the Cxx crate when using it to + # generate bindings. + if (_cxx_bindings != []) { + _rust_deps += [ "//build/rust:cxx_rustdeps" ] + + # C++ targets can depend on the Rust target from the BUILD.gn file to + # access the headers generated from it + _rust_public_deps += [ ":${_target_name}_cxx_generated" ] + } + + if (!defined(invoker.no_std) || !invoker.no_std) { + if (_use_local_std) { + _rust_deps += [ "//build/rust/std:local_std_for_rustc" ] + } else { + _rust_deps += [ "//build/rust/std:prebuilt_std_for_rustc" ] + } + } else { + not_needed([ "_use_local_std" ]) + } + + # You must go through the groups above to get to these targets. + _visibility = [] + _visibility = [ ":${_target_name}" ] + + target(invoker.target_type, "${_target_name}${_main_target_suffix}") { + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "features", + "deps", + "aliased_deps", + "public_deps", + "rustflags", + "rustenv", + "configs", + "unit_test_output_dir", + "unit_test_target", + "test_inputs", + ]) + + testonly = _testonly + visibility = _visibility + crate_name = _crate_name + crate_root = _crate_root + configs = [] + configs = _configs + deps = _rust_deps + aliased_deps = _rust_aliased_deps + public_deps = _rust_public_deps + rustflags = _rustflags + rustflags += [ "-Cmetadata=${_metadata}" ] + rustenv = _rustenv + + # The Rust tool() declarations, like C++ ones, use the output_name and + # output_dir, so that GN targets can override these if needed. Here we + # give them their default values, or allow them to be overridden. + if (defined(_output_dir)) { + output_dir = _output_dir + } + if (!defined(output_name) || output_name == "") { + output_name = crate_name + } + + if (compute_inputs_for_analyze) { + deps += [ ":${_target_name}_analyze" ] + } + + if (!_allow_unsafe) { + configs += [ "//build/rust:forbid_unsafe" ] + } + } + + if (compute_inputs_for_analyze) { + # Find and depend on all rust files in the crate for the purpose of `gn + # analyze`. + analyze_rust("${_target_name}_analyze") { + forward_variables_from(invoker, "*", [ "crate_root" ]) + crate_root = _crate_root + } + } + + if (_cxx_bindings != []) { + rust_cxx("${_target_name}_cxx_generated") { + testonly = _testonly + visibility = [ ":${_target_name}${_main_target_suffix}" ] + if (defined(_visibility)) { + visibility += _visibility + } + sources = _cxx_bindings + deps = _cxx_deps + _public_deps + + if (is_component_build) { + # In a component_build the cxx bindings may be linked into a shared + # library at any point up the dependency tree, so always export. + export_symbols = true + } else if (invoker.target_type == "shared_library") { + export_symbols = true + } else { + export_symbols = false + } + } + } else { + not_needed([ "_cxx_deps" ]) + } + + if (_build_unit_tests) { + _unit_test_target = "${_target_name}_unittests" + if (defined(invoker.unit_test_target)) { + _unit_test_target = invoker.unit_test_target + } + + rust_unit_test(_unit_test_target) { + forward_variables_from(invoker, [ "sources" ]) + testonly = true + crate_root = _crate_root + rustflags = _rustflags + env_out_dir = _env_out_dir + if (defined(invoker.unit_test_output_dir)) { + output_dir = invoker.unit_test_output_dir + } + deps = _rust_deps + _public_deps + aliased_deps = _rust_aliased_deps + public_deps = [ ":${_target_name}" ] + if (defined(invoker.test_deps)) { + deps += invoker.test_deps + } + inputs = [] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + if (defined(invoker.test_inputs)) { + inputs += invoker.test_inputs + } + if (defined(invoker.executable_configs)) { + configs = [] + configs = invoker.executable_configs + } else if (!defined(configs)) { + configs = [] + } + configs += _test_configs + rustenv = _rustenv + + if (!_allow_unsafe) { + configs += [ "//build/rust:forbid_unsafe" ] + } + } + } else { + not_needed([ + "_crate_root", + "_crate_name", + "_metadata", + "_test_configs", + ]) + } + } +} + +set_defaults("rust_target") { + executable_configs = default_executable_configs + library_configs = default_compiler_configs + proc_macro_configs = default_rust_proc_macro_configs +} diff --git a/rust/rust_unit_test.gni b/rust/rust_unit_test.gni new file mode 100644 index 000000000000..9bb3055e74e6 --- /dev/null +++ b/rust/rust_unit_test.gni @@ -0,0 +1,138 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_unit_tests_group.gni") + +# Defines a Rust unit test. +# +# This generates an executable + a script that can be run on Chromium bots. +# Future iterations of this template may do something smarter with the test +# code in order to automatically contribute it to test steps on the bots. +# +# Parameters +# +# sources +# edition (optional) +# allow_unsafe (optional) +# configs (optional) +# deps (optional) +# crate_root (optional) +# features (optional) +# rustflags (optional) +# inputs (optional) +# All as in rust_static_library. +# +# Example of usage: +# +# rust_unit_test("foo_tests") { +# deps = [ +# "//third_party/rust/test_utils/v1:lib", +# ] +# sources = [ "src/lib.rs" ] +# } +# +# Implementation note: you might assume it makes sense to implement this +# in terms of rust_target in order to avoid the duplication of logic around +# features and editions. We don't do that because rust_target actually +# depends on this template in order to build embedded unit tests +# (and therefore depending on rust_target here would lead to an infinite +# import loop). + +template("rust_unit_test") { + assert(can_build_rust_unit_tests) + if (defined(invoker.crate_name)) { + _crate_name = invoker.crate_name + } else { + _crate_name = target_name + } + if (defined(invoker.crate_root)) { + _crate_root = invoker.crate_root + } else { + _crate_root = "src/lib.rs" + } + _rustflags = invoker.rustflags + if (defined(invoker.features)) { + foreach(i, invoker.features) { + _rustflags += [ "--cfg=feature=\"${i}\"" ] + } + } + _configs = invoker.configs + _edition = "2021" + if (defined(invoker.edition)) { + _edition = invoker.edition + } + _configs += [ "//build/rust:edition_${_edition}" ] + + # We require that all source files are listed, even though this is + # not a requirement for rustc. The reason is to ensure that tools + # such as `gn deps` give the correct answer, and thus we trigger + # the right test suites etc. on code change. + # TODO(crbug.com/1256930) - verify this is correct + assert(defined(invoker.sources), "sources must be listed") + + _exe_target_name = target_name + "_exe" + rust_unit_tests_group(target_name) { + deps = [ ":$_exe_target_name" ] + } + + # The OUT_DIR for a crate's tests should point to the same OUT_DIR that the + # library it's testing used. The `env_out_dir` variable can be used to specify + # that directory. + if (defined(invoker.env_out_dir)) { + _env_out_dir = invoker.env_out_dir + } else { + _env_out_dir = target_gen_dir + } + + # TODO(crbug.com/1229320): Arrange to run test executables on try bots. + # TODO(crbug.com/gn/146): Allow Rust executables to depend on C/C++ source + # sets. + # This is important in cases where Rust tests may depend upon C/C++ + # dependencies. + executable(_exe_target_name) { + testonly = true + forward_variables_from(invoker, + "*", + [ + "allow_unsafe", + "edition", + "features", + "rustflags", + "configs", + "crate_name", + "crate_root", + "env_out_dir", + ]) + if (!defined(output_name) || output_name == "") { + output_name = _crate_name + } + + rustflags = [ + "--cfg", + "feature=\"test\"", + "--test", + ] + rustflags += _rustflags + configs = [] + configs = _configs + crate_name = _crate_name + crate_root = _crate_root + if (!defined(rustenv)) { + rustenv = [] + } + + rustenv += [ "OUT_DIR=" + rebase_path(_env_out_dir) ] + metadata = { + # Consumed by "rust_unit_tests_group" gni template. + rust_unit_test_executables = [ _crate_name ] + } + } +} + +set_defaults("rust_unit_test") { + configs = default_executable_configs + deps = [] + rustflags = [] +} diff --git a/rust/rust_unit_tests_group.gni b/rust/rust_unit_tests_group.gni new file mode 100644 index 000000000000..c2cdfe4d97a9 --- /dev/null +++ b/rust/rust_unit_tests_group.gni @@ -0,0 +1,93 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Defines a Rust unit tests group. +# +# This generates a script that wraps 1 or more Rust unit test executables. +# Such script would typically wrap all Rust unit tests from a set of related +# crates (e.g. all crates under //base). +# +# The script is primarily useful to enable running the tests on Chromium bots, +# but it is also a convenience for having a single entry point for running +# the tests locally (without having to manually fan out to all the individual +# executables). +# +# Parameters: +# +# deps - Will be recursively traversed to discover all the Rust unit test +# executables. +# +# Example usage: +# +# # This will generate a script at out/Default/bin/run_foo_tests (or +# # run_foo_tests.bat on Windows) that wraps the executables containing +# # native Rust unit tests: +# # * out/Default/foo_crate1_unittests +# # * out/Default/foo_mixed_source_set2_rs_unittests +# # * out/Default/foo_mixed_source_set3_rs_unittests +# rust_unit_tests_group("foo_tests") { +# deps = [ +# "foo_crate1", +# "foo_mixed_source_set2", +# "foo_mixed_source_set3", +# ] +# } + +template("rust_unit_tests_group") { + assert(defined(invoker.deps), "deps must be listed") + + # As noted in the top-level comment of //testing/buildbot/gn_isolate_map.pyl + # the script *must* be in output_dir/bin/run_$target (or + # output_dir\bin\run_$target.bat on Windows). + bat = "" + if (is_win) { + bat = ".bat" + } + _script_filepath = "$root_out_dir/bin/run_${target_name}${bat}" + + # Gathering metadata provided by the rust_unit_test gni template from all of + # our dependencies. + _metadata_target_name = "${target_name}_metadata" + _metadata_filepath = "$root_build_dir/${target_name}__rust_unittest_exes.txt" + generated_file(_metadata_target_name) { + forward_variables_from(invoker, [ "deps" ], []) + + testonly = true + outputs = [ _metadata_filepath ] + data_keys = [ "rust_unit_test_executables" ] + } + + # Generating a script that can run all of the wrapped Rust unit test + # executables. + action(target_name) { + forward_variables_from(invoker, "*", []) + + testonly = true + script = "//testing/scripts/rust/generate_script.py" + inputs = [ _metadata_filepath ] + outputs = [ _script_filepath ] + + data = [ _script_filepath ] + + if (!defined(data_deps)) { + data_deps = [] + } + data_deps += [ "//testing/scripts/rust" ] + data_deps += deps + + deps += [ ":$_metadata_target_name" ] + + args = [ + "--rust-test-executables", + rebase_path(_metadata_filepath, root_build_dir), + "--exe-dir", + rebase_path(root_out_dir, root_build_dir), + "--script-path", + rebase_path(_script_filepath, root_build_dir), + ] + if (is_win) { + args += [ "--make-bat" ] + } + } +} diff --git a/rust/rustc_wrapper.py b/rust/rustc_wrapper.py new file mode 100755 index 000000000000..212ad44d023b --- /dev/null +++ b/rust/rustc_wrapper.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 + +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import pathlib +import subprocess +import os +import sys +import re + +# Set up path to be able to import action_helpers. +sys.path.append( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, + os.pardir, 'build')) +import action_helpers + +# This script wraps rustc for (currently) these reasons: +# * To work around some ldflags escaping performed by ninja/gn +# * To remove dependencies on some environment variables from the .d file. +# * To enable use of .rsp files. +# * To work around two gn bugs on Windows +# +# LDFLAGS ESCAPING +# +# This script performs a simple function to work around some of the +# parameter escaping performed by ninja/gn. +# +# rustc invocations are given access to {{rustflags}} and {{ldflags}}. +# We want to pass {{ldflags}} into rustc, using -Clink-args="{{ldflags}}". +# Unfortunately, ninja assumes that each item in {{ldflags}} is an +# independent command-line argument and will have escaped them appropriately +# for use on a bare command line, instead of in a string. +# +# This script converts such {{ldflags}} into individual -Clink-arg=X +# arguments to rustc. +# +# RUSTENV dependency stripping +# +# When Rust code depends on an environment variable at build-time +# (using the env! macro), rustc spots that and adds it to the .d file. +# Ninja then parses that .d file and determines that the environment +# dependency means that the target always needs to be rebuilt. +# +# That's all correct, but _we_ know that some of these environment +# variables (typically, all of them) are set by .gn files which ninja +# tracks independently. So we remove them from the .d file. +# +# RSP files: +# +# We want to put the ninja/gn variables {{rustdeps}} and {{externs}} +# in an RSP file. Unfortunately, they are space-separated variables +# but Rust requires a newline-separated input. This script duly makes +# the adjustment. This works around a gn issue: +# TODO(https://bugs.chromium.org/p/gn/issues/detail?id=249): fix this +# +# WORKAROUND WINDOWS BUGS: +# +# On Windows platforms, this temporarily works around some issues in gn. +# See comments inline, linking to the relevant gn fixes. +# +# Usage: +# rustc_wrapper.py --rustc --depfile +# -- LDFLAGS {{ldflags}} RUSTENV {{rustenv}} +# The LDFLAGS token is discarded, and everything after that is converted +# to being a series of -Clink-arg=X arguments, until or unless RUSTENV +# is encountered, after which those are interpreted as environment +# variables to pass to rustc (and which will be removed from the .d file). +# +# Both LDFLAGS and RUSTENV **MUST** be specified, in that order, even if +# the list following them is empty. +# +# TODO(https://github.com/rust-lang/rust/issues/73632): avoid using rustc +# for linking in the first place. Most of our binaries are linked using +# clang directly, but there are some types of Rust build product which +# must currently be created by rustc (e.g. unit test executables). As +# part of support for using non-rustc linkers, we should arrange to extract +# such functionality from rustc so that we can make all types of binary +# using our clang toolchain. That will remove the need for most of this +# script. + + +# Equivalent of python3.9 built-in +def remove_lib_suffix_from_l_args(text): + if text.startswith("-l") and text.endswith(".lib"): + return text[:-len(".lib")] + return text + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--rustc', required=True, type=pathlib.Path) + parser.add_argument('--depfile', type=pathlib.Path) + parser.add_argument('--rsp', type=pathlib.Path) + parser.add_argument('args', metavar='ARG', nargs='+') + + args = parser.parse_args() + + remaining_args = args.args + + ldflags_separator = remaining_args.index("LDFLAGS") + rustenv_separator = remaining_args.index("RUSTENV", ldflags_separator) + rustc_args = remaining_args[:ldflags_separator] + ldflags = remaining_args[ldflags_separator + 1:rustenv_separator] + rustenv = remaining_args[rustenv_separator + 1:] + + is_windows = os.name == 'nt' + + rustc_args.extend(["-Clink-arg=%s" % arg for arg in ldflags]) + + # Workaround for https://bugs.chromium.org/p/gn/issues/detail?id=249 + if args.rsp: + with open(args.rsp) as rspfile: + rsp_args = [l.rstrip() for l in rspfile.read().split(' ') if l.rstrip()] + if is_windows: + # Work around for hard-coded string in gn; full fix will come from + # https://gn-review.googlesource.com/c/gn/+/12460 + rsp_args = [arg for arg in rsp_args if not arg.endswith("-Bdynamic")] + # Work around for "-l.lib", where ".lib" suffix is undesirable. + # Full fix will come from https://gn-review.googlesource.com/c/gn/+/12480 + rsp_args = [remove_lib_suffix_from_l_args(arg) for arg in rsp_args] + with open(args.rsp, 'w') as rspfile: + rspfile.write("\n".join(rsp_args)) + rustc_args.append(f'@{args.rsp}') + + env = os.environ.copy() + fixed_env_vars = [] + for item in rustenv: + (k, v) = item.split("=", 1) + env[k] = v + fixed_env_vars.append(k) + + r = subprocess.run([args.rustc, *rustc_args], env=env, check=False) + if r.returncode != 0: + sys.exit(r.returncode) + + # Now edit the depfile produced + if args.depfile is not None: + env_dep_re = re.compile("# env-dep:(.*)=.*") + replacement_lines = [] + dirty = False + with open(args.depfile, encoding="utf-8") as d: + for line in d: + m = env_dep_re.match(line) + if m and m.group(1) in fixed_env_vars: + dirty = True # skip this line + else: + replacement_lines.append(line) + if dirty: # we made a change, let's write out the file + with action_helpers.atomic_output(args.depfile) as output: + output.write("\n".join(replacement_lines).encode("utf-8")) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/rust/std/BUILD.gn b/rust/std/BUILD.gn new file mode 100644 index 000000000000..6ae92a31f296 --- /dev/null +++ b/rust/std/BUILD.gn @@ -0,0 +1,346 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file provides the ability for our C++ toolchain to successfully +# link binaries containing arbitrary Rust code. +# +# By "arbitrary Rust code" I mean .rlib archives full of Rust code, which +# is actually a static archive. +# +# Those static libraries don't link as-is into a final executable because +# they're designed for downstream processing by further invocations of rustc +# which link into a final binary. That final invocation of rustc knows how +# to do two things: +# * Find the Rust standard library. +# * Remap some generic allocator symbols to the specific allocator symbols +# in use. +# This file takes care of equivalent tasks for our C++ toolchains. +# C++ targets should depend upon either link_local_std or +# link_prebuilt_std to ensure that Rust code can be linked into their +# C++ executables. +# +# This is obviously a bit fragile - rustc might do other magic in future. +# But, linking with a final C++ toolchain is something often needed, and +# https://github.com/rust-lang/rust/issues/64191 aims to make this +# officially possible. + +import("//build/config/compiler/compiler.gni") +import("//build/config/rust.gni") + +if (toolchain_has_rust) { + # Equivalent of allocator symbols injected by rustc. + source_set("remap_alloc") { + sources = [ + "immediate_crash.h", + "remap_alloc.cc", + ] + } + + # List of Rust stdlib rlibs which are present in the official Rust toolchain + # we are using from the Android team. This is usually a version or two behind + # nightly. Generally this matches the toolchain we build ourselves, but if + # they differ, append or remove libraries based on the + # `use_chromium_rust_toolchain` GN variable. + # + # If the build fails due to missing symbols, it would be because of a missing + # library that needs to be added here in a newer stdlib. + stdlib_files = [ + "std", # List first because it makes depfiles more debuggable (see below) + "addr2line", + "adler", + "alloc", + "cfg_if", + "compiler_builtins", + "core", + "getopts", + "gimli", + "hashbrown", + "libc", + "memchr", + "miniz_oxide", + "object", + "panic_abort", + "panic_unwind", + "rustc_demangle", + "std_detect", + "test", + "unicode_width", + "unwind", + ] + + if (is_win) { + # Our C++ builds already link against a wide variety of Windows API import libraries, + # but the Rust stdlib requires a few extra. + _windows_lib_deps = [ + "bcrypt.lib", + "ntdll.lib", + "userenv.lib", + ] + } + + # rlibs explicitly ignored when copying prebuilt sysroot libraries. + # find_std_rlibs.py rightfully errors out if an unexpected prebuilt lib is + # encountered, since it usually indicates we missed something. This ignore + # list is also passed to it. This has no effect on the local std build. + ignore_stdlib_files = [] + + # proc_macro is special: we only run proc macros on the host, so we only want + # it for our host toolchain. + if (current_toolchain == host_toolchain_no_sanitizers) { + # Directs the local_std_for_rustc target to depend on proc_macro, and + # includes proc_macro in the prebuilts copied in find_stdlib. Otherwise it + # is not built or copied. + stdlib_files += [ "proc_macro" ] + } else { + # Explicitly ignore it from the prebuilts. Nothing needs to be done for the + # local std build. + ignore_stdlib_files += [ "proc_macro" ] + } + + # Different Rust toolchains may add or remove files relative to the above + # list. That can be specified in gn args for anyone using (for instance) + # nightly or some other experimental toolchain, prior to it becoming official. + stdlib_files -= removed_rust_stdlib_libs + stdlib_files += added_rust_stdlib_libs + + # rlib files which are distributed alongside Rust's prebuilt stdlib, but we + # don't need to pass to the C++ linker because they're used for specialized + # purposes. + skip_stdlib_files = [ + "profiler_builtins", + "rustc_std_workspace_alloc", + "rustc_std_workspace_core", + "rustc_std_workspace_std", + ] + if (prebuilt_libstd_supported) { + action("find_stdlib") { + # Collect prebuilt Rust libraries from toolchain package and copy to a known + # location. + # + # The Rust toolchain contains prebuilt rlibs for the standard library and + # its dependencies. However, they have unstable names: an unpredictable + # metadata hash is appended to the known crate name. + # + # We must depend on these rlibs explicitly when rustc is not in charge of + # linking. However, it is difficult to construct GN rules to do so when the + # names can't be known statically. + # + # This action copies the prebuilt rlibs to a known location, removing the + # metadata part of the name. In the process it verifies we have all the + # libraries we expect and none that we don't. A depfile is generated so this + # step is re-run when any libraries change. The action script additionally + # verifies rustc matches the expected version, which is unrelated but this + # is a convenient place to do so. + # + # The action refers to `stdlib_files`, `skip_stdlib_files`, and the + # associated //build/config/rust.gni vars `removed_rust_stdlib_libs` and + # `added_rust_stdlib_libs` for which rlib files to expect. + # `extra_sysroot_libs` is also used to copy non-std libs, if any. + script = "find_std_rlibs.py" + depfile = "$target_out_dir/stdlib.d" + out_libdir = rebase_path(target_out_dir, root_build_dir) + out_depfile = rebase_path(depfile, root_build_dir) + + # For the rustc sysroot we must include even the rlibs we don't pass to the + # C++ linker. + all_stdlibs_to_copy = stdlib_files + skip_stdlib_files + args = [ + "--rust-bin-dir", + rebase_path("${rust_sysroot}/bin", root_build_dir), + "--output", + out_libdir, + "--depfile", + out_depfile, + + # Due to limitations in Ninja's handling of .d files, we have to pick + # *the first* of our outputs. To make diagnostics more obviously + # related to the Rust standard library, we ensure libstd.rlib is first. + "--depfile-target", + stdlib_files[0], + + # Create a dependency on the rustc version so this action is re-run when + # it changes. This argument is not actually read by the script. + "--rustc-revision", + rustc_revision, + ] + + if (!use_unverified_rust_toolchain) { + args += [ + "--stdlibs", + string_join(",", all_stdlibs_to_copy), + ] + + if (ignore_stdlib_files != []) { + args += [ + "--ignore-stdlibs", + string_join(",", ignore_stdlib_files), + ] + } + } + + if (extra_sysroot_libs != []) { + args += [ + "--extra-libs", + string_join(",", extra_sysroot_libs), + ] + } + + args += [ + "--target", + rust_abi_target, + ] + + outputs = [] + foreach(lib, all_stdlibs_to_copy) { + outputs += [ "$target_out_dir/lib$lib.rlib" ] + } + foreach(lib, extra_sysroot_libs) { + outputs += [ "$target_out_dir/$lib" ] + } + } + } else { + not_needed([ "ignore_stdlib_files" ]) + } + + # Construct sysroots for rustc invocations to better control what libraries + # are linked. We have two: one with copied prebuilt libraries, and one with + # our locally-built std. Both reside in root_out_dir: we must only have one of + # each per GN toolchain anyway. + + sysroot_lib_subdir = "lib/rustlib/$rust_abi_target/lib" + + if (prebuilt_libstd_supported) { + prebuilt_rustc_sysroot = "$root_out_dir/prebuilt_rustc_sysroot" + copy("prebuilt_rustc_sysroot") { + deps = [ ":find_stdlib" ] + sources = get_target_outputs(":find_stdlib") + outputs = + [ "$prebuilt_rustc_sysroot/$sysroot_lib_subdir/{{source_file_part}}" ] + } + + config("prebuilt_stdlib_for_rustc") { + # Match the output directory of :prebuilt_rustc_sysroot + sysroot = rebase_path(prebuilt_rustc_sysroot, root_build_dir) + rustflags = [ "--sysroot=$sysroot" ] + } + + # Use the sysroot generated by :prebuilt_rustc_sysroot. Almost all Rust targets should depend + # on this. + group("prebuilt_std_for_rustc") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":prebuilt_stdlib_for_rustc" ] + deps = [ ":prebuilt_rustc_sysroot" ] + } + + config("prebuilt_rust_stdlib_config") { + ldflags = [] + lib_dir = rebase_path("$prebuilt_rustc_sysroot/$sysroot_lib_subdir", + root_build_dir) + + # We're unable to make these files regular gn dependencies because + # they're prebuilt. Instead, we'll pass them in the ldflags. This doesn't + # work for all types of build because ldflags propagate differently from + # actual dependencies and therefore can end up in different targets from + # the remap_alloc.cc above. For example, in a component build, we might + # apply the remap_alloc.cc file and these ldlags to shared object A, + # while shared object B (that depends upon A) might get only the ldflags + # but not remap_alloc.cc, and thus the build will fail. There is + # currently no known solution to this for the prebuilt stdlib - this + # problem does not apply with configurations where we build the stdlib + # ourselves, which is what we'll use in production. + foreach(lib, stdlib_files) { + this_file = "$lib_dir/lib$lib.rlib" + ldflags += [ this_file ] + } + if (is_win) { + # TODO(crbug.com/1434092): This should really be `libs`, however that + # breaks. Normally, we specify lib files with the `.lib` suffix but + # then when rustc links an EXE, it invokes lld-link with `.lib.lib` + # instead. + # + # Omitting the `.lib` suffix breaks linking as well, when clang drives + # the linking step of a C++ EXE that depends on Rust. + ldflags += _windows_lib_deps + } + } + + # Provides std libs to non-rustc linkers. + group("link_prebuilt_std") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":prebuilt_rust_stdlib_config" ] + deps = [ + ":prebuilt_rustc_sysroot", + ":remap_alloc", + ] + } + } + + if (local_libstd_supported) { + local_rustc_sysroot = "$root_out_dir/local_rustc_sysroot" + + # All std targets starting with core build with our sysroot. It starts empty + # and is incrementally built. The directory must exist at the start. + generated_file("empty_sysroot_for_std_build") { + outputs = [ "$local_rustc_sysroot/$sysroot_lib_subdir/.empty" ] + contents = "" + } + + config("local_stdlib_for_rustc") { + sysroot = rebase_path(local_rustc_sysroot, root_build_dir) + rustflags = [ "--sysroot=$sysroot" ] + } + + # Target to be depended on by std build targets. Creates the initially empty + # sysroot. + group("std_build_deps") { + deps = [ ":empty_sysroot_for_std_build" ] + public_configs = [ ":local_stdlib_for_rustc" ] + } + + # Use the sysroot generated by :local_rustc_sysroot, which transitively builds + # std. Only for use in specific tests for now. + group("local_std_for_rustc") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":local_stdlib_for_rustc" ] + + deps = [] + foreach(libname, stdlib_files + skip_stdlib_files) { + deps += [ "rules:$libname" ] + } + } + + config("local_rust_stdlib_config") { + if (is_win) { + # TODO(crbug.com/1434092): This should really be `libs`, however that + # breaks. Normally, we specify lib files with the `.lib` suffix but + # then when rustc links an EXE, it invokes lld-link with `.lib.lib` + # instead. + # + # Omitting the `.lib` suffix breaks linking as well, when clang drives + # the linking step of a C++ EXE that depends on Rust. + ldflags = _windows_lib_deps + } + } + + # TODO(crbug.com/1368806): rework this so when using locally-built std, we + # don't link the prebuilt std as well. + + group("link_local_std") { + assert( + enable_rust, + "Some C++ target is including Rust code even though enable_rust=false") + all_dependent_configs = [ ":local_rust_stdlib_config" ] + deps = [ + ":local_std_for_rustc", + ":remap_alloc", + ] + } + } +} diff --git a/rust/std/fake_root/.cargo/config.toml b/rust/std/fake_root/.cargo/config.toml new file mode 100644 index 000000000000..72e14991cc22 --- /dev/null +++ b/rust/std/fake_root/.cargo/config.toml @@ -0,0 +1,5 @@ +[source.crates-io] +replace-with = 'vendored-sources' + +[source.vendored-sources] +directory = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor' diff --git a/rust/std/fake_root/.gitignore b/rust/std/fake_root/.gitignore new file mode 100644 index 000000000000..e9e21997b1ac --- /dev/null +++ b/rust/std/fake_root/.gitignore @@ -0,0 +1,2 @@ +/target/ +/Cargo.lock diff --git a/rust/std/fake_root/Cargo.toml b/rust/std/fake_root/Cargo.toml new file mode 100644 index 000000000000..55f3a079a337 --- /dev/null +++ b/rust/std/fake_root/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "fake_root" +version = "0.1.0" +edition = "2021" + +[dependencies] +test = { path = "../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/test" } + +[dependencies.std] +path = "../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/std" +features = ["backtrace", "profiler"] + +[patch.crates-io] +rustc-std-workspace-core = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core' } +rustc-std-workspace-alloc = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc' } +rustc-std-workspace-std = { path = '../../../../third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std' } diff --git a/rust/std/fake_root/README.md b/rust/std/fake_root/README.md new file mode 100644 index 000000000000..754a4b6b91bd --- /dev/null +++ b/rust/std/fake_root/README.md @@ -0,0 +1,2 @@ +This package is used to discover the libstd deps using `cargo metadata`. gnrt +uses it when generating libstd GN bindings. diff --git a/rust/std/fake_root/src/main.rs b/rust/std/fake_root/src/main.rs new file mode 100644 index 000000000000..2c54a522899c --- /dev/null +++ b/rust/std/fake_root/src/main.rs @@ -0,0 +1,3 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. diff --git a/rust/std/find_std_rlibs.py b/rust/std/find_std_rlibs.py new file mode 100755 index 000000000000..85ab477a9450 --- /dev/null +++ b/rust/std/find_std_rlibs.py @@ -0,0 +1,164 @@ +#!/usr/bin/env/python3 + +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# See BUILD.gn in this directory for an explanation of what this script is for. + +import argparse +import os +import stat +import sys +import shutil +import subprocess +import re + +from collections import defaultdict + +EXPECTED_STDLIB_INPUT_REGEX = re.compile(r"([0-9a-z_]+)(?:-([0-9]+))?$") +RLIB_NAME_REGEX = re.compile(r"lib([0-9a-z_]+)-([0-9a-f]+)\.rlib$") + + +def main(): + parser = argparse.ArgumentParser("find_std_rlibs.py") + parser.add_argument("--rust-bin-dir", + help="Path to Rust binaries", + required=True), + parser.add_argument("--target", help="Rust target triple", required=False), + parser.add_argument("--output", + help="Path to rlibs without suffixes", + required=True) + parser.add_argument("--depfile", help="Path to write depfile", required=True) + parser.add_argument("--depfile-target", + help="Target to key depfile around", + required=True) + parser.add_argument("--stdlibs", + help="Expected list of standard library libraries") + parser.add_argument("--ignore-stdlibs", + help="List of sysroot libraries to ignore") + parser.add_argument("--extra-libs", + help="List of extra non-libstd sysroot libraries") + parser.add_argument("--rustc-revision", + help="Not used, just passed from GN to add a dependency" + " on the rustc version.") + args = parser.parse_args() + + # Expected rlibs by concise name (the crate name, plus a disambiguating suffix + # e.g. "-2" when necessary). + if args.stdlibs: + rlibs_expected = set() + for lib in args.stdlibs.split(','): + # The version is only included if there's more than one of `name`, and + # even then is only included for the 2nd onward. + (name, version) = EXPECTED_STDLIB_INPUT_REGEX.match(lib).group(1, 2) + if version is None: + rlibs_expected.add(name) + else: + rlibs_expected.add(f"{name}-{version}") + ignore_rlibs = set() + if args.ignore_stdlibs is not None: + ignore_rlibs = set(args.ignore_stdlibs.split(',')) + else: + rlibs_expected = None + + extra_libs = set() + if args.extra_libs: + for lib in args.extra_libs.split(','): + extra_libs.add(lib) + + # Ask rustc where to find the stdlib for this target. + rustc = os.path.join(args.rust_bin_dir, "rustc") + rustc_args = [rustc, "--print", "target-libdir"] + if args.target: + rustc_args.extend(["--target", args.target]) + rustlib_dir = subprocess.check_output(rustc_args).rstrip().decode() + + # Copy the rlibs to a predictable location. Whilst we're doing so, + # also write a .d file so that ninja knows it doesn't need to do this + # again unless the source rlibs change. + # Format: + # /lib: + with open(args.depfile, 'w') as depfile: + # Ninja isn't versatile at understanding depfiles. We have to say that a + # single output depends on all the inputs. We choose any one of the + # output rlibs for that purpose. If any of the input rlibs change, ninja + # will run this script again and we'll copy them all afresh. + depfile.write( + "%s:" % (os.path.join(args.output, "lib%s.rlib" % args.depfile_target))) + + def copy_file(infile, outfile): + depfile.write(f" {infile}") + if (not os.path.exists(outfile) + or os.stat(infile).st_mtime != os.stat(outfile).st_mtime): + if os.path.exists(outfile): + st = os.stat(outfile) + os.chmod(outfile, st.st_mode | stat.S_IWUSR) + shutil.copy(infile, outfile) + + # Each rlib is named "lib-.rlib". The metadata + # disambiguates multiple crates of the same name. We want to throw away the + # metadata and use stable names. To do so, we replace the metadata bit with + # a simple number 1, 2, etc. It doesn't matter how we assign these numbers + # as long as it's consistent for a particular set of rlibs. + + # The rlib names present in the Rust distribution, including metadata. We + # sort this list so crates of the same name are ordered by metadata. Also + # filter out names that aren't rlibs. + rlibs_present = [ + name for name in os.listdir(rustlib_dir) if name.endswith('.rlib') + ] + rlibs_present.sort() + + # Keep a count of the instances a crate name, so we can disambiguate the + # rlibs with an incrementing number at the end. + rlibs_seen = defaultdict(lambda: 0) + + for f in rlibs_present: + # As standard Rust includes a hash on the end of each filename + # representing certain metadata, to ensure that clients will link + # against the correct version. As gn will be manually passing + # the correct file path to our linker invocations, we don't need + # that, and it would prevent us having the predictable filenames + # which we need for statically computable gn dependency rules. + (crate_name, metadata) = RLIB_NAME_REGEX.match(f).group(1, 2) + + # Use the number of times we've seen this name to disambiguate the output + # filenames. Since we sort the input filenames including the metadata, + # this will be the same every time. + # + # Only append the times seen if it is greater than 1. This allows the + # BUILD.gn file to avoid adding '-1' to every name if there's only one + # version of a particular one. + rlibs_seen[crate_name] += 1 + if rlibs_seen[crate_name] == 1: + concise_name = crate_name + else: + concise_name = "%s-%d" % (crate_name, rlibs_seen[crate_name]) + + output_filename = f"lib{concise_name}.rlib" + + if rlibs_expected is not None: + if concise_name in ignore_rlibs: + continue + if concise_name not in rlibs_expected: + raise Exception("Found stdlib rlib that wasn't expected: %s" % f) + rlibs_expected.remove(concise_name) + + infile = os.path.join(rustlib_dir, f) + outfile = os.path.join(args.output, output_filename) + copy_file(infile, outfile) + + for f in extra_libs: + infile = os.path.join(rustlib_dir, f) + outfile = os.path.join(args.output, f) + copy_file(infile, outfile) + + depfile.write("\n") + if rlibs_expected: + raise Exception("We failed to find all expected stdlib rlibs: %s" % + ','.join(rlibs_expected)) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/rust/std/gnrt_config.toml b/rust/std/gnrt_config.toml new file mode 100644 index 000000000000..6caab33cb865 --- /dev/null +++ b/rust/std/gnrt_config.toml @@ -0,0 +1,60 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Provides per-crate and overall configuration options to gnrt. + +[all] +# force-unstable-if-unmarked prevents normal crates from inadvertently using +# symbols from std-internal dependencies in the sysroot. This is normally passed +# during an x.py build, but we have to do it manually. +rustflags = ['-Zforce-unstable-if-unmarked'] + +# Override the GN output dir. We direct std targets to output directly to the +# sysroot we'll use later. This must stay in sync with `local_rustc_sysroot` in +# //build/rust/std/BUILD.gn +output_dir = '$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/' + +# This target provides setup needed for building std. +extra_gn_deps = ['//build/rust/std:std_build_deps'] + +[crate.libc] +# Many flags are set by libc's build.rs based on new enough rustc but we don't +# run build scripts for std crates. +# See https://github.com/rust-lang/libc/blob/master/build.rs +cfg = ['libc_priv_mod_use', 'libc_union', 'libc_const_size_of', 'libc_align', +'libc_int128', 'libc_core_cvoid', 'libc_packedN', 'libc_cfg_target_vendor', +'libc_non_exhaustive', 'libc_long_array', 'libc_ptr_addr_of', +'libc_underscore_const_names', 'libc_const_extern_fn' +] + +[crate.std] +# Requires: +# * cfg(backtrace_in_libstd) because it directly includes .rs files from the +# backtrace code rather than including it as a dependency. backtrace's +# implementation has special-purpose code to handle this. +# * STD_ENV_ARCH is referenced in architecture-dependent code. Note this is the +# target arch, and as such `$rust_target_arch` is passed literally to GN. This +# variable is set at build time in build/config/rust.gni +# +# See https://github.com/rust-lang/rust/blob/master/library/std/build.rs +cfg = ['backtrace_in_libstd'] +env = ['STD_ENV_ARCH=$rust_target_arch'] + +[crate.test] +# Requires: +# * CFG_DISABLE_UNSTABLE_FEATURES=0 to match how it's built by x.py. +env = ['CFG_DISABLE_UNSTABLE_FEATURES=0'] + +# test only depends on proc_macro as an internal detail of the Rust build, so +# it's implicitly included with std/test. However, we list the std crates and +# construct the sysroot explicitly. We don't need this, and we don't even want +# it during cross-compiles (since we will only build host proc_macro crates). +exclude_deps_in_gn = ['proc_macro'] + +[crate.unwind] +# The unwind crate has #[link] directives to the native unwind library, but we +# either get that automatically by linking with clang++, or build and link the +# lib from //buildtools/third_party/libunsind explicitly. Disable -lfoo linker +# flags from this crate. +rustflags = ['-Zlink-directives=false'] diff --git a/rust/std/immediate_crash.h b/rust/std/immediate_crash.h new file mode 100644 index 000000000000..bc273502c0b1 --- /dev/null +++ b/rust/std/immediate_crash.h @@ -0,0 +1,170 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is copied from //base/immediate_crash.h. + +#ifndef BUILD_RUST_STD_IMMEDIATE_CRASH_H_ +#define BUILD_RUST_STD_IMMEDIATE_CRASH_H_ + +#include "build/build_config.h" + +// Crashes in the fastest possible way with no attempt at logging. +// There are several constraints; see http://crbug.com/664209 for more context. +// +// - TRAP_SEQUENCE_() must be fatal. It should not be possible to ignore the +// resulting exception or simply hit 'continue' to skip over it in a debugger. +// - Different instances of TRAP_SEQUENCE_() must not be folded together, to +// ensure crash reports are debuggable. Unlike __builtin_trap(), asm volatile +// blocks will not be folded together. +// Note: TRAP_SEQUENCE_() previously required an instruction with a unique +// nonce since unlike clang, GCC folds together identical asm volatile +// blocks. +// - TRAP_SEQUENCE_() must produce a signal that is distinct from an invalid +// memory access. +// - TRAP_SEQUENCE_() must be treated as a set of noreturn instructions. +// __builtin_unreachable() is used to provide that hint here. clang also uses +// this as a heuristic to pack the instructions in the function epilogue to +// improve code density. +// +// Additional properties that are nice to have: +// - TRAP_SEQUENCE_() should be as compact as possible. +// - The first instruction of TRAP_SEQUENCE_() should not change, to avoid +// shifting crash reporting clusters. As a consequence of this, explicit +// assembly is preferred over intrinsics. +// Note: this last bullet point may no longer be true, and may be removed in +// the future. + +// Note: TRAP_SEQUENCE Is currently split into two macro helpers due to the fact +// that clang emits an actual instruction for __builtin_unreachable() on certain +// platforms (see https://crbug.com/958675). In addition, the int3/bkpt/brk will +// be removed in followups, so splitting it up like this now makes it easy to +// land the followups. + +#if defined(COMPILER_GCC) + +#if BUILDFLAG(IS_NACL) + +// Crash report accuracy is not guaranteed on NaCl. +#define TRAP_SEQUENCE1_() __builtin_trap() +#define TRAP_SEQUENCE2_() asm volatile("") + +#elif defined(ARCH_CPU_X86_FAMILY) + +// TODO(https://crbug.com/958675): In theory, it should be possible to use just +// int3. However, there are a number of crashes with SIGILL as the exception +// code, so it seems likely that there's a signal handler that allows execution +// to continue after SIGTRAP. +#define TRAP_SEQUENCE1_() asm volatile("int3") + +#if BUILDFLAG(IS_APPLE) +// Intentionally empty: __builtin_unreachable() is always part of the sequence +// (see IMMEDIATE_CRASH below) and already emits a ud2 on Mac. +#define TRAP_SEQUENCE2_() asm volatile("") +#else +#define TRAP_SEQUENCE2_() asm volatile("ud2") +#endif // BUILDFLAG(IS_APPLE) + +#elif defined(ARCH_CPU_ARMEL) + +// bkpt will generate a SIGBUS when running on armv7 and a SIGTRAP when running +// as a 32 bit userspace app on arm64. There doesn't seem to be any way to +// cause a SIGTRAP from userspace without using a syscall (which would be a +// problem for sandboxing). +// TODO(https://crbug.com/958675): Remove bkpt from this sequence. +#define TRAP_SEQUENCE1_() asm volatile("bkpt #0") +#define TRAP_SEQUENCE2_() asm volatile("udf #0") + +#elif defined(ARCH_CPU_ARM64) + +// This will always generate a SIGTRAP on arm64. +// TODO(https://crbug.com/958675): Remove brk from this sequence. +#define TRAP_SEQUENCE1_() asm volatile("brk #0") +#define TRAP_SEQUENCE2_() asm volatile("hlt #0") + +#else + +// Crash report accuracy will not be guaranteed on other architectures, but at +// least this will crash as expected. +#define TRAP_SEQUENCE1_() __builtin_trap() +#define TRAP_SEQUENCE2_() asm volatile("") + +#endif // ARCH_CPU_* + +#elif defined(COMPILER_MSVC) + +#if !defined(__clang__) + +// MSVC x64 doesn't support inline asm, so use the MSVC intrinsic. +#define TRAP_SEQUENCE1_() __debugbreak() +#define TRAP_SEQUENCE2_() + +#elif defined(ARCH_CPU_ARM64) + +// Windows ARM64 uses "BRK #F000" as its breakpoint instruction, and +// __debugbreak() generates that in both VC++ and clang. +#define TRAP_SEQUENCE1_() __debugbreak() +// Intentionally empty: __builtin_unreachable() is always part of the sequence +// (see IMMEDIATE_CRASH below) and already emits a ud2 on Win64, +// https://crbug.com/958373 +#define TRAP_SEQUENCE2_() __asm volatile("") + +#else + +#define TRAP_SEQUENCE1_() asm volatile("int3") +#define TRAP_SEQUENCE2_() asm volatile("ud2") + +#endif // __clang__ + +#else + +#error No supported trap sequence! + +#endif // COMPILER_GCC + +#define TRAP_SEQUENCE_() \ + do { \ + TRAP_SEQUENCE1_(); \ + TRAP_SEQUENCE2_(); \ + } while (false) + +// CHECK() and the trap sequence can be invoked from a constexpr function. +// This could make compilation fail on GCC, as it forbids directly using inline +// asm inside a constexpr function. However, it allows calling a lambda +// expression including the same asm. +// The side effect is that the top of the stacktrace will not point to the +// calling function, but to this anonymous lambda. This is still useful as the +// full name of the lambda will typically include the name of the function that +// calls CHECK() and the debugger will still break at the right line of code. +#if !defined(COMPILER_GCC) || defined(__clang__) + +#define WRAPPED_TRAP_SEQUENCE_() TRAP_SEQUENCE_() + +#else + +#define WRAPPED_TRAP_SEQUENCE_() \ + do { \ + [] { TRAP_SEQUENCE_(); }(); \ + } while (false) + +#endif // !defined(COMPILER_GCC) || defined(__clang__) + +#if defined(__clang__) || defined(COMPILER_GCC) + +// __builtin_unreachable() hints to the compiler that this is noreturn and can +// be packed in the function epilogue. +#define IMMEDIATE_CRASH() \ + ({ \ + WRAPPED_TRAP_SEQUENCE_(); \ + __builtin_unreachable(); \ + }) + +#else + +// This is supporting non-chromium user of logging.h to build with MSVC, like +// pdfium. On MSVC there is no __builtin_unreachable(). +#define IMMEDIATE_CRASH() WRAPPED_TRAP_SEQUENCE_() + +#endif // defined(__clang__) || defined(COMPILER_GCC) + +#endif // BUILD_RUST_STD_IMMEDIATE_CRASH_H_ diff --git a/rust/std/remap_alloc.cc b/rust/std/remap_alloc.cc new file mode 100644 index 000000000000..7f8aa1d7b6f8 --- /dev/null +++ b/rust/std/remap_alloc.cc @@ -0,0 +1,152 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include +#include +#include +#include + +#include "build/build_config.h" +#include "build/rust/std/immediate_crash.h" + +#if BUILDFLAG(IS_ANDROID) +#include +#endif + +// When linking a final binary, rustc has to pick between either: +// * The default Rust allocator +// * Any #[global_allocator] defined in *any rlib in its dependency tree* +// (https://doc.rust-lang.org/edition-guide/rust-2018/platform-and-target-support/global-allocators.html) +// +// In this latter case, this fact will be recorded in some of the metadata +// within the .rlib file. (An .rlib file is just a .a file, but does have +// additional metadata for use by rustc. This is, as far as I know, the only +// such metadata we would ideally care about.) +// +// In all the linked rlibs, +// * If 0 crates define a #[global_allocator], rustc uses its default allocator +// * If 1 crate defines a #[global_allocator], rustc uses that +// * If >1 crates define a #[global_allocator], rustc bombs out. +// +// Because rustc does these checks, it doesn't just have the __rust_alloc +// symbols defined anywhere (neither in the stdlib nor in any of these +// crates which have a #[global_allocator] defined.) +// +// Instead: +// Rust's final linking stage invokes dynamic LLVM codegen to create symbols +// for the basic heap allocation operations. It literally creates a +// __rust_alloc symbol at link time. Unless any crate has specified a +// #[global_allocator], it simply calls from __rust_alloc into +// __rdl_alloc, which is the default Rust allocator. The same applies to a +// few other symbols. +// +// We're not (always) using rustc for final linking. For cases where we're not +// Rustc as the final linker, we'll define those symbols here instead. +// +// The Rust stdlib on Windows uses GetProcessHeap() which will bypass +// PartitionAlloc, so we do not forward these functions back to the stdlib. +// Instead, we pass them to PartitionAlloc, while replicating functionality from +// the unix stdlib to allow them to provide their increased functionality on top +// of the system functions. +// +// In future, we may build a crate with a #[global_allocator] and +// redirect these symbols back to Rust in order to use to that crate instead. +// +// Instead of going through system functions like malloc() we may want to call +// into PA directly if we wished for Rust allocations to be in a different +// partition, or similar, in the future. +// +// They're weak symbols, because this file will sometimes end up in targets +// which are linked by rustc, and thus we would otherwise get duplicate +// definitions. The following definitions will therefore only end up being +// used in targets which are linked by our C++ toolchain. + +extern "C" { + +#ifdef COMPONENT_BUILD +#define REMAP_ALLOC_ATTRIBUTES \ + __attribute__((visibility("default"))) __attribute__((weak)) +#else +#define REMAP_ALLOC_ATTRIBUTES __attribute__((weak)) +#endif // COMPONENT_BUILD + +void* REMAP_ALLOC_ATTRIBUTES __rust_alloc(size_t size, size_t align) { + // This mirrors kMaxSupportedAlignment from + // base/allocator/partition_allocator/partition_alloc_constants.h. + // ParitionAlloc will crash if given an alignment larger than this. + constexpr size_t max_align = (1 << 21) / 2; + if (align > max_align) { + return nullptr; + } + + if (align <= alignof(std::max_align_t)) { + return malloc(size); + } else { + // Note: PartitionAlloc by default will route aligned allocations back to + // malloc() (the fast path) if they are for a small enough alignment. So we + // just unconditionally use aligned allocation functions here. + // https://source.chromium.org/chromium/chromium/src/+/refs/heads/main:base/allocator/partition_allocator/shim/allocator_shim_default_dispatch_to_partition_alloc.cc;l=219-226;drc=31d99ff4aa0cc0b75063325ff243e911516a5a6a + +#if defined(COMPILER_MSVC) + // Because we use PartitionAlloc() as the allocator, free() is able to find + // this allocation, instead of the usual requirement to use _aligned_free(). + return _aligned_malloc(size, align); +#elif BUILDFLAG(IS_ANDROID) + // Android has no posix_memalign() exposed: + // https://source.chromium.org/chromium/chromium/src/+/main:base/memory/aligned_memory.cc;l=24-30;drc=e4622aaeccea84652488d1822c28c78b7115684f + return memalign(align, size); +#else + // The `align` from Rust is always a power of 2: + // https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.from_size_align. + // + // We get here only if align > alignof(max_align_t), which guarantees that + // the alignment is both a power of 2 and even, which is required by + // posix_memalign(). + // + // The PartitionAlloc impl requires that the alignment is at least the same + // as pointer-alignment. std::max_align_t is at least pointer-aligned as + // well, so we satisfy that. + void* p; + auto ret = posix_memalign(&p, align, size); + return ret == 0 ? p : nullptr; +#endif + } +} + +void REMAP_ALLOC_ATTRIBUTES __rust_dealloc(void* p, size_t size, size_t align) { + free(p); +} + +void* REMAP_ALLOC_ATTRIBUTES __rust_realloc(void* p, + size_t old_size, + size_t align, + size_t new_size) { + if (align <= alignof(std::max_align_t)) { + return realloc(p, new_size); + } else { + void* out = __rust_alloc(align, new_size); + memcpy(out, p, std::min(old_size, new_size)); + return out; + } +} + +void* REMAP_ALLOC_ATTRIBUTES __rust_alloc_zeroed(size_t size, size_t align) { + if (align <= alignof(std::max_align_t)) { + return calloc(size, 1); + } else { + void* p = __rust_alloc(size, align); + memset(p, 0, size); + return p; + } +} + +void REMAP_ALLOC_ATTRIBUTES __rust_alloc_error_handler(size_t size, + size_t align) { + IMMEDIATE_CRASH(); +} + +extern const unsigned char REMAP_ALLOC_ATTRIBUTES + __rust_alloc_error_handler_should_panic = 0; + +} // extern "C" diff --git a/rust/std/rules/BUILD.gn b/rust/std/rules/BUILD.gn new file mode 100644 index 000000000000..ee52a6c56a14 --- /dev/null +++ b/rust/std/rules/BUILD.gn @@ -0,0 +1,878 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("addr2line") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/addr2line-0.17.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/addr2line-0.17.0/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.17.0" + cargo_pkg_name = "addr2line" + cargo_pkg_description = + "A cross-platform symbolication library written in Rust, using `gimli`" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":gimli", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("adler") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/adler-1.0.2/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/adler-1.0.2/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "1.0.2" + cargo_pkg_authors = "Jonas Schievink " + cargo_pkg_name = "adler" + cargo_pkg_description = + "A simple clean-room implementation of the Adler-32 checksum" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("alloc") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/alloc/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/alloc/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "alloc" + cargo_pkg_description = "The Rust core allocation and collections library" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":core", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("cfg_if") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/cfg-if-1.0.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/cfg-if-1.0.0/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "1.0.0" + cargo_pkg_authors = "Alex Crichton " + cargo_pkg_name = "cfg-if" + cargo_pkg_description = "A macro to ergonomically define an item depending on a large number of #[cfg] parameters. Structured like an if-else chain, the first matching branch is the item that gets emitted." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("compiler_builtins") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/compiler_builtins-0.1.89/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/compiler_builtins-0.1.89/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.1.89" + cargo_pkg_authors = "Jorge Aparicio " + cargo_pkg_name = "compiler_builtins" + cargo_pkg_description = "Compiler intrinsics used by the Rust compiler. Also available for other targets if necessary!" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler-builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("core") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/core/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/core/src/lib.rs", + ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "core" + cargo_pkg_description = "The Rust Core Library" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ "//build/rust/std:std_build_deps" ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("getopts") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/getopts-0.2.21/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/getopts-0.2.21/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.2.21" + cargo_pkg_authors = "The Rust Project Developers" + cargo_pkg_name = "getopts" + cargo_pkg_description = "getopts-like option parsing." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":rustc_std_workspace_core", + ":rustc_std_workspace_std", + ":unicode_width", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + std = ":rustc_std_workspace_std__rlib" + } + features = [ + "core", + "rustc-dep-of-std", + "std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("gimli") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/gimli-0.26.2/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/gimli-0.26.2/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "0.26.2" + cargo_pkg_name = "gimli" + cargo_pkg_description = + "A library for reading and writing the DWARF debugging format." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "read", + "read-core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("hashbrown") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/hashbrown-0.12.3/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/hashbrown-0.12.3/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.12.3" + cargo_pkg_authors = "Amanieu d'Antras " + cargo_pkg_name = "hashbrown" + cargo_pkg_description = "A Rust port of Google's SwissTable hash map" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "nightly", + "rustc-dep-of-std", + "rustc-internal-api", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("libc") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/libc-0.2.140/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/libc-0.2.140/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.2.140" + cargo_pkg_authors = "The Rust Project Developers" + cargo_pkg_name = "libc" + cargo_pkg_description = "Raw FFI bindings to platform libraries like libc." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + features = [ + "align", + "rustc-dep-of-std", + "rustc-std-workspace-core", + ] + rustflags = [ + "--cfg=libc_priv_mod_use", + "--cfg=libc_union", + "--cfg=libc_const_size_of", + "--cfg=libc_align", + "--cfg=libc_int128", + "--cfg=libc_core_cvoid", + "--cfg=libc_packedN", + "--cfg=libc_cfg_target_vendor", + "--cfg=libc_non_exhaustive", + "--cfg=libc_long_array", + "--cfg=libc_ptr_addr_of", + "--cfg=libc_underscore_const_names", + "--cfg=libc_const_extern_fn", + "-Zforce-unstable-if-unmarked", + ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("memchr") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/memchr-2.5.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/memchr-2.5.0/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "2.5.0" + cargo_pkg_authors = "Andrew Gallant , bluss" + cargo_pkg_name = "memchr" + cargo_pkg_description = "Safe interface to memchr." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("miniz_oxide") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/miniz_oxide-0.5.3/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/miniz_oxide-0.5.3/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "0.5.3" + cargo_pkg_authors = "Frommi , oyvindln " + cargo_pkg_name = "miniz_oxide" + cargo_pkg_description = "DEFLATE compression and decompression library rewritten in Rust based on miniz" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":adler", + ":compiler_builtins", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("object") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/object-0.29.0/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/object-0.29.0/src/lib.rs" ] + edition = "2018" + cargo_pkg_version = "0.29.0" + cargo_pkg_name = "object" + cargo_pkg_description = + "A unified interface for reading and writing object file formats." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":memchr", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "archive", + "coff", + "compiler_builtins", + "core", + "elf", + "macho", + "pe", + "read_core", + "rustc-dep-of-std", + "unaligned", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("panic_abort") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_abort/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_abort/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "panic_abort" + cargo_pkg_description = "Implementation of Rust panics via process aborts" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":alloc", + ":cfg_if", + ":compiler_builtins", + ":core", + ":libc", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("panic_unwind") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_unwind/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/panic_unwind/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "panic_unwind" + cargo_pkg_description = "Implementation of Rust panics via stack unwinding" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":alloc", + ":cfg_if", + ":compiler_builtins", + ":core", + ":libc", + ":unwind", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("proc_macro") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/proc_macro/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/proc_macro/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "proc_macro" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":core", + ":std", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("profiler_builtins") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/profiler_builtins/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/profiler_builtins/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "profiler_builtins" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":core", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_demangle") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/rustc-demangle-0.1.21/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/rustc-demangle-0.1.21/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.1.21" + cargo_pkg_authors = "Alex Crichton " + cargo_pkg_name = "rustc-demangle" + cargo_pkg_description = "Rust compiler symbol demangling." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_std_workspace_alloc") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-alloc/lib.rs" ] + edition = "2021" + cargo_pkg_version = "1.99.0" + cargo_pkg_name = "rustc-std-workspace-alloc" + cargo_pkg_description = "Hack for the compiler's own build system" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":alloc", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_std_workspace_core") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-core/lib.rs" ] + edition = "2021" + cargo_pkg_version = "1.99.0" + cargo_pkg_name = "rustc-std-workspace-core" + cargo_pkg_description = "Hack for the compiler's own build system" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":core", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("rustc_std_workspace_std") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/rustc-std-workspace-std/lib.rs" ] + edition = "2021" + cargo_pkg_version = "1.99.0" + cargo_pkg_name = "rustc-std-workspace-std" + cargo_pkg_description = "Hack for the compiler's own build system" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":std", + "//build/rust/std:std_build_deps", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("std") { + crate_type = "rlib" + crate_root = + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/std/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/std/src/lib.rs", + ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "std" + cargo_pkg_description = "The Rust Standard Library" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":addr2line", + ":alloc", + ":cfg_if", + ":compiler_builtins", + ":core", + ":hashbrown", + ":libc", + ":miniz_oxide", + ":object", + ":panic_abort", + ":panic_unwind", + ":profiler_builtins", + ":rustc_demangle", + ":std_detect", + ":unwind", + "//build/rust/std:std_build_deps", + ] + features = [ + "addr2line", + "backtrace", + "gimli-symbolize", + "miniz_oxide", + "object", + "panic_unwind", + "profiler", + "profiler_builtins", + "std_detect_dlsym_getauxval", + "std_detect_file_io", + ] + rustenv = [ "STD_ENV_ARCH=$rust_target_arch" ] + rustflags = [ + "--cfg=backtrace_in_libstd", + "-Zforce-unstable-if-unmarked", + ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("std_detect") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/stdarch/crates/std_detect/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/stdarch/crates/std_detect/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.1.5" + cargo_pkg_authors = "Alex Crichton , Andrew Gallant , Gonzalo Brito Gadeschi " + cargo_pkg_name = "std_detect" + cargo_pkg_description = + "`std::detect` - Rust's standard library run-time CPU feature detection." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":cfg_if", + ":compiler_builtins", + ":libc", + ":rustc_std_workspace_alloc", + ":rustc_std_workspace_core", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + alloc = ":rustc_std_workspace_alloc__rlib" + core = ":rustc_std_workspace_core__rlib" + } + features = [ + "alloc", + "compiler_builtins", + "core", + "libc", + "rustc-dep-of-std", + "std_detect_dlsym_getauxval", + "std_detect_file_io", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("test") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/test/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ + "//third_party/rust-toolchain/lib/rustlib/src/rust/library/test/src/lib.rs", + ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "test" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":core", + ":getopts", + ":panic_abort", + ":panic_unwind", + ":std", + "//build/rust/std:std_build_deps", + ] + rustenv = [ "CFG_DISABLE_UNSTABLE_FEATURES=0" ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("unicode_width") { + crate_type = "rlib" + crate_root = "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/unicode-width-0.1.10/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//build/rust/std/fake_root/../../../../third_party/rust-toolchain/lib/rustlib/src/rust/vendor/unicode-width-0.1.10/src/lib.rs" ] + edition = "2015" + cargo_pkg_version = "0.1.10" + cargo_pkg_authors = + "kwantam , Manish Goregaokar " + cargo_pkg_name = "unicode-width" + cargo_pkg_description = "Determine displayed width of `char` and `str` types according to Unicode Standard Annex #11 rules." + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":compiler_builtins", + ":rustc_std_workspace_core", + ":rustc_std_workspace_std", + "//build/rust/std:std_build_deps", + ] + aliased_deps = { + core = ":rustc_std_workspace_core__rlib" + std = ":rustc_std_workspace_std__rlib" + } + features = [ + "compiler_builtins", + "core", + "rustc-dep-of-std", + "std", + ] + rustflags = [ "-Zforce-unstable-if-unmarked" ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} +cargo_crate("unwind") { + crate_type = "rlib" + crate_root = "//third_party/rust-toolchain/lib/rustlib/src/rust/library/unwind/src/lib.rs" + no_std = true + + # Unit tests skipped. Generate with --with-tests to include them. + build_native_rust_unit_tests = false + sources = [ "//third_party/rust-toolchain/lib/rustlib/src/rust/library/unwind/src/lib.rs" ] + edition = "2021" + cargo_pkg_version = "0.0.0" + cargo_pkg_name = "unwind" + library_configs -= [ "//build/config/compiler:chromium_code" ] + library_configs += [ "//build/config/compiler:no_chromium_code" ] + executable_configs -= [ "//build/config/compiler:chromium_code" ] + executable_configs += [ "//build/config/compiler:no_chromium_code" ] + deps = [ + ":cfg_if", + ":compiler_builtins", + ":core", + ":libc", + "//build/rust/std:std_build_deps", + ] + rustflags = [ + "-Zlink-directives=false", + "-Zforce-unstable-if-unmarked", + ] + output_dir = + "$root_out_dir/local_rustc_sysroot/lib/rustlib/$rust_abi_target/lib/" +} diff --git a/rust/tests/BUILD.gn b/rust/tests/BUILD.gn new file mode 100644 index 000000000000..6224e27b8568 --- /dev/null +++ b/rust/tests/BUILD.gn @@ -0,0 +1,97 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_unit_tests_group.gni") + +# Build some minimal binaries to exercise the Rust toolchain +# only if that toolchain is enabled in gn args. +group("tests") { + testonly = true + + deps = [ ":deps" ] + if (can_build_rust_unit_tests) { + deps += [ ":build_rust_tests" ] + } +} + +group("deps") { + testonly = true + deps = [] + + # All the rest require Rust. + if (toolchain_has_rust) { + deps += [ + "bindgen_test", + "test_aliased_deps", + "test_aliased_deps:test_aliased_deps_exe", + "test_bin_crate", + "test_cpp_including_rust", + "test_rlib_crate:target1", + "test_rlib_crate:target2", + "test_rlib_crate:test_rlib_crate_associated_bin", + "test_rust_exe", + "test_rust_multiple_dep_versions_exe", + "test_rust_shared_library", + "test_rust_static_library", + "test_serde_json_lenient", + "test_simple_rust_exe", + + # TODO(https://crbug.com/1329611): Enable the additional target below + # once `rs_bindings_from_cc` is distributed via `gclient sync`. In the + # meantime see the instructions in + # `//build/rust/run_rs_bindings_from_cc.py`. + #"test_rs_bindings_from_cc:test_rs_bindings_from_cc", + ] + if (can_build_rust_unit_tests) { + deps += [ + "bindgen_test:bindgen_test_lib_unittests", + "test_aliased_deps:test_aliased_deps_unittests", + "test_cpp_including_rust:test_cpp_including_rust_unittests", + "test_rlib_crate:target1_test_rlib_crate_v0_2_unittests", + "test_rlib_crate:target2_test_rlib_crate_v0_2_unittests", + "test_rust_exe:test_rust_exe_unittests", + "test_rust_multiple_dep_versions_exe/v1:test_lib_v1_unittests", + "test_rust_multiple_dep_versions_exe/v2:test_lib_v2_unittests", + "test_rust_shared_library:test_rust_shared_library_unittests", + "test_rust_static_library:test_rust_static_library_unittests", + "test_rust_static_library_non_standard_arrangement:foo_tests", + "test_rust_unittests", + + # TODO(https://crbug.com/1329611): Enable the additional target below + # once `rs_bindings_from_cc` is distributed via `gclient sync`. In the + # meantime see the instructions in + # `//build/rust/run_rs_bindings_from_cc.py`. + #"test_rs_bindings_from_cc:test_rs_bindings_from_cc_unittests", + ] + if (current_toolchain == host_toolchain_no_sanitizers) { + # Build these proc macro tests only on toolchains where we'd build the + # proc macro itself. + deps += [ "test_proc_macro_crate:test_proc_macro_crate_v0_2_unittests" ] + } + } + + if (local_libstd_supported) { + deps += [ + "test_local_std", + "test_local_std:test_local_std_exe", + ] + if (can_build_rust_unit_tests) { + deps += [ "test_local_std:test_local_std_unittests" ] + } + } + + if (is_win) { + deps += [ "test_control_flow_guard" ] + } + } +} + +if (can_build_rust_unit_tests) { + # Generates a script that will run all the native Rust unit tests, in order + # to have them all part of a single test step on infra bots. + rust_unit_tests_group("build_rust_tests") { + deps = [ ":deps" ] + } +} diff --git a/rust/tests/bindgen_test/BUILD.gn b/rust/tests/bindgen_test/BUILD.gn new file mode 100644 index 000000000000..ce0fbc43539f --- /dev/null +++ b/rust/tests/bindgen_test/BUILD.gn @@ -0,0 +1,48 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_bindgen.gni") +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +source_set("c_lib_headers") { + sources = [ + "lib.h", + "lib2.h", + ] +} + +component("c_lib") { + sources = [ "lib.c" ] + + deps = [ ":c_lib_headers" ] + + defines = [ "COMPONENT_IMPLEMENTATION" ] +} + +rust_bindgen("c_lib_bindgen") { + header = "lib.h" + deps = [ ":c_lib_headers" ] +} + +rust_static_library("bindgen_test_lib") { + allow_unsafe = true + deps = [ + ":c_lib", + ":c_lib_bindgen", + ] + sources = [ "src/lib.rs" ] + build_native_rust_unit_tests = true + crate_root = "src/lib.rs" + + bindgen_output = get_target_outputs(":c_lib_bindgen") + inputs = bindgen_output + rustenv = [ "BINDGEN_RS_FILE=" + rebase_path(bindgen_output[0]) ] +} + +rust_executable("bindgen_test") { + deps = [ ":bindgen_test_lib" ] + sources = [ "main.rs" ] + crate_root = "main.rs" +} diff --git a/rust/tests/bindgen_test/lib.c b/rust/tests/bindgen_test/lib.c new file mode 100644 index 000000000000..3223772e1387 --- /dev/null +++ b/rust/tests/bindgen_test/lib.c @@ -0,0 +1,11 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/bindgen_test/lib.h" + +#include + +COMPONENT_EXPORT uint32_t add_two_numbers(uint32_t a, uint32_t b) { + return a + b; +} diff --git a/rust/tests/bindgen_test/lib.h b/rust/tests/bindgen_test/lib.h new file mode 100644 index 000000000000..a6d686e82707 --- /dev/null +++ b/rust/tests/bindgen_test/lib.h @@ -0,0 +1,45 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_BINDGEN_TEST_LIB_H_ +#define BUILD_RUST_TESTS_BINDGEN_TEST_LIB_H_ + +#include "build/rust/tests/bindgen_test/lib2.h" + +#include + +// The following is equivalent to //base/base_export.h. + +#if defined(COMPONENT_BUILD) +#if defined(WIN32) + +#if defined(COMPONENT_IMPLEMENTATION) +#define COMPONENT_EXPORT __declspec(dllexport) +#else +#define COMPONENT_EXPORT __declspec(dllimport) +#endif // defined(COMPONENT_IMPLEMENTATION) + +#else // defined(WIN32) +#if defined(COMPONENT_IMPLEMENTATION) +#define COMPONENT_EXPORT __attribute__((visibility("default"))) +#else +#define COMPONENT_EXPORT +#endif // defined(COMPONENT_IMPLEMENTATION) +#endif + +#else // defined(COMPONENT_BUILD) +#define COMPONENT_EXPORT +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +COMPONENT_EXPORT uint32_t add_two_numbers(uint32_t a, uint32_t b); + +#ifdef __cplusplus +} +#endif + +#endif // BUILD_RUST_TESTS_BINDGEN_TEST_LIB_H_ diff --git a/rust/tests/bindgen_test/lib2.h b/rust/tests/bindgen_test/lib2.h new file mode 100644 index 000000000000..f747a6f53b9f --- /dev/null +++ b/rust/tests/bindgen_test/lib2.h @@ -0,0 +1,10 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_BINDGEN_TEST_LIB2_H_ +#define BUILD_RUST_TESTS_BINDGEN_TEST_LIB2_H_ + +// This file does nothing, it just tests the include paths when running bindgen. + +#endif // BUILD_RUST_TESTS_BINDGEN_TEST_LIB2_H_ diff --git a/rust/tests/bindgen_test/main.rs b/rust/tests/bindgen_test/main.rs new file mode 100644 index 000000000000..499d93daf89b --- /dev/null +++ b/rust/tests/bindgen_test/main.rs @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use bindgen_test_lib::add_two_numbers_in_c; + +fn main() { + println!("{} + {} = {}", 3, 7, add_two_numbers_in_c(3, 7)); +} diff --git a/rust/tests/bindgen_test/src/lib.rs b/rust/tests/bindgen_test/src/lib.rs new file mode 100644 index 000000000000..c8672e06aab7 --- /dev/null +++ b/rust/tests/bindgen_test/src/lib.rs @@ -0,0 +1,25 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +mod c_ffi { + #![allow(dead_code)] + #![allow(non_snake_case)] + #![allow(non_camel_case_types)] + #![allow(non_upper_case_globals)] + include!(env!("BINDGEN_RS_FILE")); +} + +pub fn add_two_numbers_in_c(a: u32, b: u32) -> u32 { + unsafe { c_ffi::add_two_numbers(a, b) } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_add_two_numbers() { + assert_eq!(add_two_numbers_in_c(5, 10), 15); + } +} diff --git a/rust/tests/test_aliased_deps/BUILD.gn b/rust/tests/test_aliased_deps/BUILD.gn new file mode 100644 index 000000000000..45ad73f44d3a --- /dev/null +++ b/rust/tests/test_aliased_deps/BUILD.gn @@ -0,0 +1,30 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +rust_executable("test_aliased_deps_exe") { + crate_root = "main.rs" + sources = [ crate_root ] + deps = [ ":test_aliased_deps" ] +} + +rust_static_library("test_aliased_deps") { + crate_root = "lib.rs" + sources = [ crate_root ] + deps = [ ":real_name" ] + aliased_deps = { + # Unfortunately we have to know the `__rlib` suffix which is attached to the + # actual rlib in `rust_static_library()`. + other_name = ":real_name__rlib" + } + build_native_rust_unit_tests = true +} + +rust_static_library("real_name") { + crate_root = "real_name.rs" + sources = [ crate_root ] +} diff --git a/rust/tests/test_aliased_deps/lib.rs b/rust/tests/test_aliased_deps/lib.rs new file mode 100644 index 000000000000..dcaa3431c52e --- /dev/null +++ b/rust/tests/test_aliased_deps/lib.rs @@ -0,0 +1,11 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub use other_name; + +#[cfg(test)] +#[test] +fn test_add_from_renamed_dep() { + assert_eq!(other_name::add(2, 3), 5); +} diff --git a/rust/tests/test_aliased_deps/main.rs b/rust/tests/test_aliased_deps/main.rs new file mode 100644 index 000000000000..8f33abecefd8 --- /dev/null +++ b/rust/tests/test_aliased_deps/main.rs @@ -0,0 +1,7 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + test_aliased_deps::other_name::hello_world(); +} diff --git a/rust/tests/test_aliased_deps/real_name.rs b/rust/tests/test_aliased_deps/real_name.rs new file mode 100644 index 000000000000..15f084f3f412 --- /dev/null +++ b/rust/tests/test_aliased_deps/real_name.rs @@ -0,0 +1,11 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn hello_world() { + println!("hello world"); +} + +pub fn add(a: u32, b: u32) -> u32 { + a + b +} diff --git a/rust/tests/test_bin_crate/BUILD.gn b/rust/tests/test_bin_crate/BUILD.gn new file mode 100644 index 000000000000..ac47ee0a4cd7 --- /dev/null +++ b/rust/tests/test_bin_crate/BUILD.gn @@ -0,0 +1,15 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_bin_crate") { + crate_type = "bin" + crate_root = "crate/src/main.rs" + sources = [ "crate/src/main.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + + rustenv = [ "BUILD_SCRIPT_TEST_VARIABLE=123" ] +} diff --git a/rust/tests/test_bin_crate/crate/build.rs b/rust/tests/test_bin_crate/crate/build.rs new file mode 100644 index 000000000000..a1051eb6cd1a --- /dev/null +++ b/rust/tests/test_bin_crate/crate/build.rs @@ -0,0 +1,62 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use std::env; +use std::process::Command; +use std::str::{self, FromStr}; + +fn main() { + println!("cargo:rustc-cfg=build_script_ran"); + let minor = match rustc_minor_version() { + Some(minor) => minor, + None => return, + }; + + let target = env::var("TARGET").unwrap(); + + if minor >= 34 { + println!("cargo:rustc-cfg=is_new_rustc"); + } else { + println!("cargo:rustc-cfg=is_old_rustc"); + } + + if target.contains("android") { + println!("cargo:rustc-cfg=is_android"); + } + if target.contains("darwin") { + println!("cargo:rustc-cfg=is_mac"); + } + + // Check that we can get a `rustenv` variable from the build script. + let _ = env!("BUILD_SCRIPT_TEST_VARIABLE"); +} + +fn rustc_minor_version() -> Option { + let rustc = match env::var_os("RUSTC") { + Some(rustc) => rustc, + None => return None, + }; + + let output = match Command::new(rustc).arg("--version").output() { + Ok(output) => output, + Err(_) => return None, + }; + + let version = match str::from_utf8(&output.stdout) { + Ok(version) => version, + Err(_) => return None, + }; + + let mut pieces = version.split('.'); + if pieces.next() != Some("rustc 1") { + return None; + } + + let next = match pieces.next() { + Some(next) => next, + None => return None, + }; + + u32::from_str(next).ok() +} diff --git a/rust/tests/test_bin_crate/crate/src/main.rs b/rust/tests/test_bin_crate/crate/src/main.rs new file mode 100644 index 000000000000..08fff49b94c3 --- /dev/null +++ b/rust/tests/test_bin_crate/crate/src/main.rs @@ -0,0 +1,15 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("Hello, world!"); + #[cfg(is_new_rustc)] + println!("Is new rustc!"); + #[cfg(is_old_rustc)] + println!("Is old rustc!"); + #[cfg(is_android)] + println!("Is android!"); + #[cfg(is_mac)] + println!("Is darwin!"); +} diff --git a/rust/tests/test_control_flow_guard/BUILD.gn b/rust/tests/test_control_flow_guard/BUILD.gn new file mode 100644 index 000000000000..202c5b0f0f3c --- /dev/null +++ b/rust/tests/test_control_flow_guard/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_executable.gni") + +rust_executable("test_control_flow_guard") { + allow_unsafe = true + crate_root = "test_control_flow_guard.rs" + sources = [ crate_root ] + + # Used as a data dep by base_unittests. + is_data_dep = true +} diff --git a/rust/tests/test_control_flow_guard/test_control_flow_guard.rs b/rust/tests/test_control_flow_guard/test_control_flow_guard.rs new file mode 100644 index 000000000000..d303d3dc8328 --- /dev/null +++ b/rust/tests/test_control_flow_guard/test_control_flow_guard.rs @@ -0,0 +1,43 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +//! To test that CFG is working, build this executable on Windows and run it +//! as: +//! +//! `out\Release\cdb\cdb.exe -G -g -o .\out\Release\test_control_flow_guard.exe` +//! +//! Which should print: +//! ``` +//! (a2d4.bcd8): Security check failure or stack buffer overrun - code c0000409 +//! (!!! second chance !!!) +//! Subcode: 0xa FAST_FAIL_GUARD_ICALL_CHECK_FAILURE +//! ``` +//! +//! If cdb.exe is not present, first run `ninja -C out\Release cdb\cdb.exe`. + +use std::arch::asm; + +#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] +const NOP_INSTRUCTION_SIZE: usize = 1; +#[cfg(target_arch = "aarch64")] +const NOP_INSTRUCTION_SIZE: usize = 4; + +#[inline(never)] +fn nop_sled() { + unsafe { asm!("nop", "nop", "ret",) } +} + +#[inline(never)] +fn indirect_call(func: fn()) { + func(); +} + +fn main() { + let fptr = + unsafe { std::mem::transmute::(nop_sled as usize + NOP_INSTRUCTION_SIZE) }; + // Generates a FAST_FAIL_GUARD_ICALL_CHECK_FAILURE if CFG triggers. + indirect_call(fptr); + // Should only reach here if CFG is disabled. + eprintln!("failed"); +} diff --git a/rust/tests/test_cpp_including_rust/BUILD.gn b/rust/tests/test_cpp_including_rust/BUILD.gn new file mode 100644 index 000000000000..2157b79880c3 --- /dev/null +++ b/rust/tests/test_cpp_including_rust/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//base/allocator/allocator.gni") +import("//testing/test.gni") + +executable("test_cpp_including_rust") { + sources = [ "main.cc" ] + deps = [ "//build/rust/tests/test_rust_static_library" ] +} + +test("test_cpp_including_rust_unittests") { + sources = [ "unittests.cc" ] + deps = [ + "//base", + "//base/allocator:buildflags", + "//base/test:run_all_unittests", + "//build/rust/tests/test_rust_static_library", + "//testing/gmock", + "//testing/gtest", + ] +} diff --git a/rust/tests/test_cpp_including_rust/main.cc b/rust/tests/test_cpp_including_rust/main.cc new file mode 100644 index 000000000000..d515a3464459 --- /dev/null +++ b/rust/tests/test_cpp_including_rust/main.cc @@ -0,0 +1,11 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/test_rust_static_library/src/lib.rs.h" + +int main(int argc, char* argv[]) { + say_hello(); + add_two_ints_via_rust(3, 4); + return 0; +} diff --git a/rust/tests/test_cpp_including_rust/unittests.cc b/rust/tests/test_cpp_including_rust/unittests.cc new file mode 100644 index 000000000000..f3b65ad2f40c --- /dev/null +++ b/rust/tests/test_cpp_including_rust/unittests.cc @@ -0,0 +1,31 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include + +#include + +#include "base/allocator/buildflags.h" +#include "base/allocator/partition_allocator/address_pool_manager_bitmap.h" +#include "base/allocator/partition_allocator/partition_address_space.h" +#include "build/build_config.h" +#include "build/buildflag.h" +#include "testing/gtest/include/gtest/gtest.h" + +#include "build/rust/tests/test_rust_static_library/src/lib.rs.h" + +TEST(RustTest, CppCallingIntoRust_BasicFFI) { + EXPECT_EQ(7, add_two_ints_via_rust(3, 4)); +} + +TEST(RustTest, RustComponentUsesPartitionAlloc) { + // Verify that PartitionAlloc is consistently used in C++ and Rust. + auto cpp_allocated_int = std::make_unique(); + SomeStruct* rust_allocated_ptr = allocate_via_rust().into_raw(); + EXPECT_EQ(partition_alloc::IsManagedByPartitionAlloc( + reinterpret_cast(rust_allocated_ptr)), + partition_alloc::IsManagedByPartitionAlloc( + reinterpret_cast(cpp_allocated_int.get()))); + rust::Box::from_raw(rust_allocated_ptr); +} diff --git a/rust/tests/test_local_std/BUILD.gn b/rust/tests/test_local_std/BUILD.gn new file mode 100644 index 000000000000..499aebdd7dc4 --- /dev/null +++ b/rust/tests/test_local_std/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/rust.gni") +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +assert(local_libstd_supported) + +rust_static_library("test_local_std") { + sources = [ "lib.rs" ] + crate_root = "lib.rs" + build_native_rust_unit_tests = true + use_local_std = true +} + +rust_executable("test_local_std_exe") { + sources = [ "main.rs" ] + crate_root = "main.rs" + deps = [ ":test_local_std" ] + use_local_std = true +} diff --git a/rust/tests/test_local_std/lib.rs b/rust/tests/test_local_std/lib.rs new file mode 100644 index 000000000000..6328cf415d44 --- /dev/null +++ b/rust/tests/test_local_std/lib.rs @@ -0,0 +1,8 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#[test] +fn test_test() { + assert_eq!(1, 1); +} diff --git a/rust/tests/test_local_std/main.rs b/rust/tests/test_local_std/main.rs new file mode 100644 index 000000000000..746e0216ed2e --- /dev/null +++ b/rust/tests/test_local_std/main.rs @@ -0,0 +1,7 @@ +// Copyright 2023 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("hello world"); +} diff --git a/rust/tests/test_proc_macro_crate/BUILD.gn b/rust/tests/test_proc_macro_crate/BUILD.gn new file mode 100644 index 000000000000..c9b56a4232f2 --- /dev/null +++ b/rust/tests/test_proc_macro_crate/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_proc_macro_crate") { + crate_root = "crate/src/lib.rs" + crate_type = "proc-macro" + sources = [ "crate/src/lib.rs" ] + epoch = "0.2" +} diff --git a/rust/tests/test_proc_macro_crate/crate/src/lib.rs b/rust/tests/test_proc_macro_crate/crate/src/lib.rs new file mode 100644 index 000000000000..6d4025fd74bf --- /dev/null +++ b/rust/tests/test_proc_macro_crate/crate/src/lib.rs @@ -0,0 +1,10 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use proc_macro::TokenStream; + +#[proc_macro] +pub fn calculate_using_proc_macro(_item: TokenStream) -> TokenStream { + "(15 + 15)".parse().unwrap() +} diff --git a/rust/tests/test_rlib_crate/BUILD.gn b/rust/tests/test_rlib_crate/BUILD.gn new file mode 100644 index 000000000000..9410316a2da8 --- /dev/null +++ b/rust/tests/test_rlib_crate/BUILD.gn @@ -0,0 +1,55 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("target1") { + crate_name = "test_rlib_crate" + crate_root = "crate/src/lib.rs" + sources = [ "crate/src/lib.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + build_script_outputs = [ "generated/generated.rs" ] + epoch = "0.2" + features = [ + "my-feature_a", + "my-feature_b", + ] + rustflags = [ + "--cfg", + "test_a_and_b", + ] + rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ] +} + +# Test that we can build the same crate in multiple ways under different GN +# rules without conflicts. +cargo_crate("target2") { + crate_name = "test_rlib_crate" + crate_root = "crate/src/lib.rs" + sources = [ "crate/src/lib.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + build_script_outputs = [ "generated/generated.rs" ] + epoch = "0.2" + features = [ "my-feature_a" ] + rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ] +} + +# Exists to test the case that a single crate has both a library +# and a binary, to ensure that shared build products (e.g. the +# build script) don't conflict. +cargo_crate("test_rlib_crate_associated_bin") { + crate_root = "crate/src/main.rs" + crate_type = "bin" + sources = [ "crate/src/main.rs" ] + build_sources = [ "crate/build.rs" ] + build_root = "crate/build.rs" + features = [ + "my-feature_a", + "my-feature_b", + ] + rustenv = [ "ENV_VAR_FOR_BUILD_SCRIPT=42" ] + deps = [ ":target1" ] +} diff --git a/rust/tests/test_rlib_crate/crate/build.rs b/rust/tests/test_rlib_crate/crate/build.rs new file mode 100644 index 000000000000..037e2635bce2 --- /dev/null +++ b/rust/tests/test_rlib_crate/crate/build.rs @@ -0,0 +1,90 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use std::env; +use std::io::Write; +use std::path::Path; +use std::process::Command; +use std::str::{self, FromStr}; + +fn main() { + println!("cargo:rustc-cfg=build_script_ran"); + let minor = match rustc_minor_version() { + Some(minor) => minor, + None => return, + }; + + let target = env::var("TARGET").unwrap(); + + if minor >= 34 { + println!("cargo:rustc-cfg=is_new_rustc"); + } else { + println!("cargo:rustc-cfg=is_old_rustc"); + } + + if target.contains("android") { + println!("cargo:rustc-cfg=is_android"); + } + if target.contains("darwin") { + println!("cargo:rustc-cfg=is_mac"); + } + + let feature_a_enabled = env::var_os("CARGO_FEATURE_MY_FEATURE_A").is_some(); + if feature_a_enabled { + println!("cargo:rustc-cfg=has_feature_a"); + } + let feature_b_enabled = env::var_os("CARGO_FEATURE_MY_FEATURE_B").is_some(); + if feature_b_enabled { + println!("cargo:rustc-cfg=has_feature_b"); + } + + // Some tests as to whether we're properly emulating various cargo features. + assert!(Path::new(&env::var_os("CARGO_MANIFEST_DIR").unwrap()).join("build.rs").exists()); + assert!(Path::new("build.rs").exists()); + assert!(Path::new(&env::var_os("OUT_DIR").unwrap()).exists()); + // Confirm the following env var is set, but do not attempt to validate content + // since the whole point is that it will differ on different platforms. + env::var_os("CARGO_CFG_TARGET_ARCH").unwrap(); + + generate_some_code().unwrap(); +} + +fn generate_some_code() -> std::io::Result<()> { + let output_dir = Path::new(&env::var_os("OUT_DIR").unwrap()).join("generated"); + let _ = std::fs::create_dir_all(&output_dir); + // Test that environment variables from .gn files are passed to build scripts + let preferred_number = env::var("ENV_VAR_FOR_BUILD_SCRIPT").unwrap(); + let mut file = std::fs::File::create(output_dir.join("generated.rs"))?; + write!(file, "fn run_some_generated_code() -> u32 {{ {} }}", preferred_number)?; + Ok(()) +} + +fn rustc_minor_version() -> Option { + let rustc = match env::var_os("RUSTC") { + Some(rustc) => rustc, + None => return None, + }; + + let output = match Command::new(rustc).arg("--version").output() { + Ok(output) => output, + Err(_) => return None, + }; + + let version = match str::from_utf8(&output.stdout) { + Ok(version) => version, + Err(_) => return None, + }; + + let mut pieces = version.split('.'); + if pieces.next() != Some("rustc 1") { + return None; + } + + let next = match pieces.next() { + Some(next) => next, + None => return None, + }; + + u32::from_str(next).ok() +} diff --git a/rust/tests/test_rlib_crate/crate/src/lib.rs b/rust/tests/test_rlib_crate/crate/src/lib.rs new file mode 100644 index 000000000000..4fb672286310 --- /dev/null +++ b/rust/tests/test_rlib_crate/crate/src/lib.rs @@ -0,0 +1,56 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +include!(concat!(env!("OUT_DIR"), "/generated/generated.rs")); + +pub fn say_hello_from_crate() { + assert_eq!(run_some_generated_code(), 42); + #[cfg(is_new_rustc)] + println!("Is new rustc!"); + #[cfg(is_old_rustc)] + println!("Is old rustc!"); + #[cfg(is_android)] + println!("Is android!"); + #[cfg(is_mac)] + println!("Is darwin!"); + #[cfg(has_feature_a)] + println!("Has feature A!"); + #[cfg(not(has_feature_a))] + panic!("Wasn't passed feature a"); + #[cfg(not(has_feature_b))] + #[cfg(test_a_and_b)] + panic!("Wasn't passed feature b"); + #[cfg(has_feature_b)] + #[cfg(not(test_a_and_b))] + panic!("Was passed feature b"); +} + +#[cfg(test)] +mod tests { + /// Test features are passed through from BUILD.gn correctly. This test is + /// the target1 configuration. + #[test] + #[cfg(test_a_and_b)] + fn test_features_passed_target1() { + #[cfg(not(has_feature_a))] + panic!("Wasn't passed feature a"); + #[cfg(not(has_feature_b))] + panic!("Wasn't passed feature b"); + } + + /// This tests the target2 configuration is passed through correctly. + #[test] + #[cfg(not(test_a_and_b))] + fn test_features_passed_target2() { + #[cfg(not(has_feature_a))] + panic!("Wasn't passed feature a"); + #[cfg(has_feature_b)] + panic!("Was passed feature b"); + } + + #[test] + fn test_generated_code_works() { + assert_eq!(crate::run_some_generated_code(), 42); + } +} diff --git a/rust/tests/test_rlib_crate/crate/src/main.rs b/rust/tests/test_rlib_crate/crate/src/main.rs new file mode 100644 index 000000000000..ba0d6151b6ab --- /dev/null +++ b/rust/tests/test_rlib_crate/crate/src/main.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn main() { + test_rlib_crate::say_hello_from_crate(); +} diff --git a/rust/tests/test_rs_bindings_from_cc/BUILD.gn b/rust/tests/test_rs_bindings_from_cc/BUILD.gn new file mode 100644 index 000000000000..525faa38d9fd --- /dev/null +++ b/rust/tests/test_rs_bindings_from_cc/BUILD.gn @@ -0,0 +1,58 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rs_bindings_from_cc.gni") +import("//build/rust/rust_executable.gni") + +rust_executable("test_rs_bindings_from_cc") { + crate_root = "main.rs" + sources = [ "main.rs" ] + deps = [ + ":self_contained_target_rs_api", + ":target_depending_on_another_rs_api", + "//third_party/crubit:ctor", + ] + build_native_rust_unit_tests = true +} + +rs_bindings_from_cc("self_contained_target_rs_api") { + # This is the target that the bindings are for / call into. + bindings_target = ":self_contained_target" + + # Lists public headers from `sources` of `self_contained_target`. + public_headers = [ + "self_contained_target_header1.h", + "self_contained_target_header2.h", + ] +} + +source_set("self_contained_target") { + sources = [ + "self_contained_target_header1.h", + "self_contained_target_header2.cc", + "self_contained_target_header2.h", + ] +} + +rs_bindings_from_cc("target_depending_on_another_rs_api") { + # This is the target that the bindings are for / call into. + bindings_target = ":target_depending_on_another" + + # Lists public headers from `sources` of `target_depending_on_another`. + # + # TODO(crbug.com/1297592): Is there something we can do (a convention?) to + # avoid this duplication/repetition? + public_headers = [ "target_depending_on_another.h" ] + + # Parallels `public_deps` of `target_depending_on_another` + # + # TODO(crbug.com/1297592): Is there something we can do (a convention?) to + # avoid this duplication/repetition? + deps = [ ":self_contained_target_rs_api" ] +} + +source_set("target_depending_on_another") { + sources = [ "target_depending_on_another.h" ] + public_deps = [ ":self_contained_target" ] +} diff --git a/rust/tests/test_rs_bindings_from_cc/main.rs b/rust/tests/test_rs_bindings_from_cc/main.rs new file mode 100644 index 000000000000..d20f45c20511 --- /dev/null +++ b/rust/tests/test_rs_bindings_from_cc/main.rs @@ -0,0 +1,32 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("Hello world!"); + println!("AddViaCc(100,42) = {}", ::self_contained_target_rs_api::AddViaCc(100, 42)); + println!("MultiplyViaCc(100,42) = {}", ::self_contained_target_rs_api::MultiplyViaCc(100, 42)); +} + +#[cfg(test)] +mod tests { + #[test] + fn test_self_contained_target_function_call_basics() { + assert_eq!(100 + 42, ::self_contained_target_rs_api::AddViaCc(100, 42)); + assert_eq!(100 * 42, ::self_contained_target_rs_api::MultiplyViaCc(100, 42)); + } + + #[test] + fn test_self_contained_target_pod_struct_basics() { + let x = ::self_contained_target_rs_api::CcPodStruct { value: 123 }; + assert_eq!(x.value, 123); + } + + #[test] + fn test_target_depending_on_another() { + ctor::emplace! { + let x = ::target_depending_on_another_rs_api::CreateCcPodStructFromValue(456); + } + assert_eq!(x.value, 456); + } +} diff --git a/rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h b/rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h new file mode 100644 index 000000000000..13da6b111425 --- /dev/null +++ b/rust/tests/test_rs_bindings_from_cc/self_contained_target_header1.h @@ -0,0 +1,12 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER1_H_ +#define BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER1_H_ + +inline int MultiplyViaCc(int x, int y) { + return x * y; +} + +#endif // BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER1_H_ diff --git a/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc b/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc new file mode 100644 index 000000000000..002e0a5ab052 --- /dev/null +++ b/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.cc @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h" + +int AddViaCc(int x, int y) { + return x + y; +} diff --git a/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h b/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h new file mode 100644 index 000000000000..fabe75a01eda --- /dev/null +++ b/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h @@ -0,0 +1,14 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER2_H_ +#define BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER2_H_ + +int AddViaCc(int x, int y); + +struct CcPodStruct final { + int value; +}; + +#endif // BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_SELF_CONTAINED_TARGET_HEADER2_H_ diff --git a/rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h b/rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h new file mode 100644 index 000000000000..824282eb41e2 --- /dev/null +++ b/rust/tests/test_rs_bindings_from_cc/target_depending_on_another.h @@ -0,0 +1,14 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_TARGET_DEPENDING_ON_ANOTHER_H_ +#define BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_TARGET_DEPENDING_ON_ANOTHER_H_ + +#include "build/rust/tests/test_rs_bindings_from_cc/self_contained_target_header2.h" + +inline CcPodStruct CreateCcPodStructFromValue(int x) { + return CcPodStruct{.value = x}; +} + +#endif // BUILD_RUST_TESTS_TEST_RS_BINDINGS_FROM_CC_TARGET_DEPENDING_ON_ANOTHER_H_ diff --git a/rust/tests/test_rust_exe/BUILD.gn b/rust/tests/test_rust_exe/BUILD.gn new file mode 100644 index 000000000000..493854a496db --- /dev/null +++ b/rust/tests/test_rust_exe/BUILD.gn @@ -0,0 +1,17 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_executable.gni") + +rust_executable("test_rust_exe") { + crate_root = "main.rs" + sources = [ "main.rs" ] + deps = [ + "//build/rust/tests/test_proc_macro_crate", + "//build/rust/tests/test_rlib_crate:target1", + "//build/rust/tests/test_rust_static_library", + "//build/rust/tests/test_rust_static_library_non_standard_arrangement", + ] + build_native_rust_unit_tests = true +} diff --git a/rust/tests/test_rust_exe/main.rs b/rust/tests/test_rust_exe/main.rs new file mode 100644 index 000000000000..0409901f0f38 --- /dev/null +++ b/rust/tests/test_rust_exe/main.rs @@ -0,0 +1,32 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +use test_rlib_crate::say_hello_from_crate; + +fn main() { + assert_eq!(test_proc_macro_crate::calculate_using_proc_macro!(), 30); + assert_eq!(test_rust_static_library::add_two_ints_via_rust(3, 4), 7); + assert_eq!(test_rust_static_library_non_standard_arrangement::do_subtract(4, 3), 1); + say_hello_from_crate(); +} + +/// These tests are largely all to just test different permutations of builds, +/// e.g. calling into mixed_static_librarys, crates, proc macros, etc. +#[cfg(test)] +mod tests { + #[test] + fn test_call_to_rust() { + assert_eq!(test_rust_static_library::add_two_ints_via_rust(3, 4), 7); + } + + #[test] + fn test_call_to_rust_non_standard_arrangement() { + assert_eq!(test_rust_static_library_non_standard_arrangement::do_subtract(8, 4), 4); + } + + #[test] + fn test_proc_macro() { + assert_eq!(test_proc_macro_crate::calculate_using_proc_macro!(), 30) + } +} diff --git a/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn b/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn new file mode 100644 index 000000000000..c4d4785d09f5 --- /dev/null +++ b/rust/tests/test_rust_multiple_dep_versions_exe/BUILD.gn @@ -0,0 +1,25 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_executable.gni") +import("//build/rust/rust_static_library.gni") + +# The exe depends on lib v1.But it also transitively depends on lib v2. +# The code in the exe should use v1, and the code in the transitive lib should +# use v2. +rust_executable("test_rust_multiple_dep_versions_exe") { + crate_root = "main.rs" + sources = [ "main.rs" ] + deps = [ + ":transitive_v2", + "//build/rust/tests/test_rust_multiple_dep_versions_exe/v1:test_lib", + ] +} + +rust_static_library("transitive_v2") { + crate_root = "transitive_lib.rs" + sources = [ "transitive_lib.rs" ] + deps = + [ "//build/rust/tests/test_rust_multiple_dep_versions_exe/v2:test_lib" ] +} diff --git a/rust/tests/test_rust_multiple_dep_versions_exe/main.rs b/rust/tests/test_rust_multiple_dep_versions_exe/main.rs new file mode 100644 index 000000000000..e5471db246aa --- /dev/null +++ b/rust/tests/test_rust_multiple_dep_versions_exe/main.rs @@ -0,0 +1,8 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + test_lib::say_hello_from_v1(); + transitive_v2::transitively_say_hello(); +} diff --git a/rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs b/rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs new file mode 100644 index 000000000000..51806d79e7a3 --- /dev/null +++ b/rust/tests/test_rust_multiple_dep_versions_exe/transitive_lib.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn transitively_say_hello() { + test_lib::say_hello_from_v2(); +} diff --git a/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn b/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn new file mode 100644 index 000000000000..0704a1659cee --- /dev/null +++ b/rust/tests/test_rust_multiple_dep_versions_exe/v1/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_lib") { + # This crate has the same name as v2/test_lib, but a different epoch. The GN + # target for the unit tests should not collide. + epoch = "1" + sources = [ "src/lib.rs" ] +} diff --git a/rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs b/rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs new file mode 100644 index 000000000000..bc95a76a81fc --- /dev/null +++ b/rust/tests/test_rust_multiple_dep_versions_exe/v1/src/lib.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn say_hello_from_v1() { + println!("Hello, world - from lib version 1"); +} diff --git a/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn b/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn new file mode 100644 index 000000000000..3fada7b58ddd --- /dev/null +++ b/rust/tests/test_rust_multiple_dep_versions_exe/v2/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/cargo_crate.gni") + +cargo_crate("test_lib") { + # This crate has the same name as v1/test_lib, but a different epoch. The GN + # target for the unit tests should not collide. + epoch = "2" + sources = [ "src/lib.rs" ] +} diff --git a/rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs b/rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs new file mode 100644 index 000000000000..b8035a1d76b6 --- /dev/null +++ b/rust/tests/test_rust_multiple_dep_versions_exe/v2/src/lib.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub fn say_hello_from_v2() { + println!("Hello, world - from lib version 2"); +} diff --git a/rust/tests/test_rust_shared_library/BUILD.gn b/rust/tests/test_rust_shared_library/BUILD.gn new file mode 100644 index 000000000000..f2396c7618f4 --- /dev/null +++ b/rust/tests/test_rust_shared_library/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_shared_library.gni") + +rust_shared_library("test_rust_shared_library") { + allow_unsafe = true + sources = [ "src/lib.rs" ] + cxx_bindings = [ "src/lib.rs" ] + build_native_rust_unit_tests = true +} diff --git a/rust/tests/test_rust_shared_library/src/lib.rs b/rust/tests/test_rust_shared_library/src/lib.rs new file mode 100644 index 000000000000..eabfa274af6c --- /dev/null +++ b/rust/tests/test_rust_shared_library/src/lib.rs @@ -0,0 +1,41 @@ +// Copyright 2022 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Requires this allow since cxx generates unsafe code. +// +// TODO(crbug.com/1422745): patch upstream cxx to generate compatible code. +#[allow(unsafe_op_in_unsafe_fn)] +#[cxx::bridge] +mod ffi { + pub struct SomeStruct { + a: i32, + } + extern "Rust" { + fn say_hello(); + fn allocate_via_rust() -> Box; + fn add_two_ints_via_rust(x: i32, y: i32) -> i32; + } +} + +pub fn say_hello() { + println!( + "Hello, world - from a Rust library. Calculations suggest that 3+4={}", + add_two_ints_via_rust(3, 4) + ); +} + +#[test] +fn test_hello() { + assert_eq!(7, add_two_ints_via_rust(3, 4)); +} + +pub fn add_two_ints_via_rust(x: i32, y: i32) -> i32 { + x + y +} + +// The next function is used from the +// AllocatorTest.RustComponentUsesPartitionAlloc unit test. +pub fn allocate_via_rust() -> Box { + Box::new(ffi::SomeStruct { a: 43 }) +} diff --git a/rust/tests/test_rust_static_library/BUILD.gn b/rust/tests/test_rust_static_library/BUILD.gn new file mode 100644 index 000000000000..28a48cbc10cb --- /dev/null +++ b/rust/tests/test_rust_static_library/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_static_library.gni") + +rust_static_library("test_rust_static_library") { + allow_unsafe = true + sources = [ "src/lib.rs" ] + cxx_bindings = [ "src/lib.rs" ] + build_native_rust_unit_tests = true +} diff --git a/rust/tests/test_rust_static_library/src/lib.rs b/rust/tests/test_rust_static_library/src/lib.rs new file mode 100644 index 000000000000..1fcabe3f2c9c --- /dev/null +++ b/rust/tests/test_rust_static_library/src/lib.rs @@ -0,0 +1,48 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Requires this allow since cxx generates unsafe code. +// +// TODO(crbug.com/1422745): patch upstream cxx to generate compatible code. +#[allow(unsafe_op_in_unsafe_fn)] +#[cxx::bridge] +mod ffi { + pub struct SomeStruct { + a: i32, + } + extern "Rust" { + fn say_hello(); + fn allocate_via_rust() -> Box; + fn add_two_ints_via_rust(x: i32, y: i32) -> i32; + } +} + +pub fn say_hello() { + println!( + "Hello, world - from a Rust library. Calculations suggest that 3+4={}", + add_two_ints_via_rust(3, 4) + ); +} + +#[test] +fn test_hello() { + assert_eq!(7, add_two_ints_via_rust(3, 4)); +} + +pub fn add_two_ints_via_rust(x: i32, y: i32) -> i32 { + x + y +} + +// The next function is used from the +// AllocatorTest.RustComponentUsesPartitionAlloc unit test. +pub fn allocate_via_rust() -> Box { + Box::new(ffi::SomeStruct { a: 43 }) +} + +mod tests { + #[test] + fn test_in_mod() { + // Always passes; just to see if tests in modules are handled correctly. + } +} diff --git a/rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn b/rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn new file mode 100644 index 000000000000..6a85557a6325 --- /dev/null +++ b/rust/tests/test_rust_static_library_non_standard_arrangement/BUILD.gn @@ -0,0 +1,15 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_static_library.gni") + +rust_static_library("test_rust_static_library_non_standard_arrangement") { + sources = [ "foo.rs" ] + crate_root = "foo.rs" + unit_test_target = "foo_tests" + + # TODO(danakj): We should write a gtest binary instead of using native rust + # tests outside of a cargo_crate(). + build_native_rust_unit_tests = true +} diff --git a/rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs b/rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs new file mode 100644 index 000000000000..197333950a30 --- /dev/null +++ b/rust/tests/test_rust_static_library_non_standard_arrangement/foo.rs @@ -0,0 +1,12 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +pub extern "C" fn do_subtract(a: u32, b: u32) -> u32 { + a - b +} + +#[test] +fn test_ok() { + assert_eq!(do_subtract(12, 8), 4) +} diff --git a/rust/tests/test_rust_unittests/BUILD.gn b/rust/tests/test_rust_unittests/BUILD.gn new file mode 100644 index 000000000000..bdc1f84a69ef --- /dev/null +++ b/rust/tests/test_rust_unittests/BUILD.gn @@ -0,0 +1,11 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_unit_test.gni") + +rust_unit_test("test_rust_unittests") { + sources = [ "main.rs" ] + crate_root = "main.rs" + deps = [ "//build/rust/tests/test_rust_static_library" ] +} diff --git a/rust/tests/test_rust_unittests/main.rs b/rust/tests/test_rust_unittests/main.rs new file mode 100644 index 000000000000..a10b006d1a51 --- /dev/null +++ b/rust/tests/test_rust_unittests/main.rs @@ -0,0 +1,20 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#![feature(test)] +extern crate test; + +use test::Bencher; +use test_rust_static_library::add_two_ints_via_rust; + +#[test] +fn test_call_into_mixed_static_library() { + assert_eq!(add_two_ints_via_rust(5, 7), 12) +} + +#[allow(soft_unstable)] +#[bench] +fn test_benchmark(b: &mut Bencher) { + b.iter(|| 2 + 2); +} diff --git a/rust/tests/test_serde_json_lenient/BUILD.gn b/rust/tests/test_serde_json_lenient/BUILD.gn new file mode 100644 index 000000000000..dbc954e08ec2 --- /dev/null +++ b/rust/tests/test_serde_json_lenient/BUILD.gn @@ -0,0 +1,27 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/rust/rust_static_library.gni") +import("//testing/test.gni") + +rust_static_library("test_serde_json_lenient_rs") { + crate_root = "lib.rs" + allow_unsafe = true + sources = [ "lib.rs" ] + cxx_bindings = [ "lib.rs" ] + deps = [ "//third_party/rust/serde_json_lenient/v0_1:lib" ] +} + +# TODO(https://crbug.com/1278030) - convert to a pure- +# Rust unit test when that's supported on all platforms. +test("test_serde_json_lenient") { + sources = [ "unittests.cc" ] + deps = [ + ":test_serde_json_lenient_rs", + "//base", + "//base/test:run_all_unittests", + "//testing/gmock", + "//testing/gtest", + ] +} diff --git a/rust/tests/test_serde_json_lenient/lib.rs b/rust/tests/test_serde_json_lenient/lib.rs new file mode 100644 index 000000000000..92bfe8594b07 --- /dev/null +++ b/rust/tests/test_serde_json_lenient/lib.rs @@ -0,0 +1,29 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Demo library to ensure that serde_json_lenient is working independently of +// its integration with Chromium. + +use serde_json_lenient::{Result, Value}; + +#[cxx::bridge] +mod ffi { + extern "Rust" { + fn serde_works() -> bool; + } +} + +fn serde_works() -> bool { + parses_ok().unwrap_or_default() +} + +fn parses_ok() -> Result { + let data = r#" + { + "name": "Slartibartfast", + "planets": [ "Magrathea" ] + }"#; + let v: Value = serde_json_lenient::from_str(data)?; + Ok(v["name"] == "Slartibartfast" && v["planets"][0] == "Magrathea") +} diff --git a/rust/tests/test_serde_json_lenient/unittests.cc b/rust/tests/test_serde_json_lenient/unittests.cc new file mode 100644 index 000000000000..d6d9866a1f6c --- /dev/null +++ b/rust/tests/test_serde_json_lenient/unittests.cc @@ -0,0 +1,10 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "build/rust/tests/test_serde_json_lenient/lib.rs.h" +#include "testing/gtest/include/gtest/gtest.h" + +TEST(RustTest, SerdeJsonTest) { + EXPECT_EQ(true, serde_works()); +} diff --git a/rust/tests/test_simple_rust_exe/BUILD.gn b/rust/tests/test_simple_rust_exe/BUILD.gn new file mode 100644 index 000000000000..a800720fbcb2 --- /dev/null +++ b/rust/tests/test_simple_rust_exe/BUILD.gn @@ -0,0 +1,12 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This should be the simplest possible exe with no dependencies, +# to make it easy to investigate build problems. +# For this reason it uses 'executable' directly rather than +# //build/rust/rust_executable.gni. +executable("test_simple_rust_exe") { + crate_root = "main.rs" + deps = [ "//build/rust/std:local_std_for_rustc" ] +} diff --git a/rust/tests/test_simple_rust_exe/main.rs b/rust/tests/test_simple_rust_exe/main.rs new file mode 100644 index 000000000000..e03684fc7f76 --- /dev/null +++ b/rust/tests/test_simple_rust_exe/main.rs @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +fn main() { + println!("Hello, world!"); +} diff --git a/sample_arg_file.gn b/sample_arg_file.gn new file mode 100644 index 000000000000..91e90456e820 --- /dev/null +++ b/sample_arg_file.gn @@ -0,0 +1,6 @@ +# Build arguments go here. Here are some of the most commonly set ones. +# Run `gn args --list` for the full list. +# is_component_build = true +# is_debug = true +# symbol_level = 2 +# use_goma = false diff --git a/sanitize-mac-build-log.sed b/sanitize-mac-build-log.sed new file mode 100644 index 000000000000..23c579eb9a7c --- /dev/null +++ b/sanitize-mac-build-log.sed @@ -0,0 +1,33 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Use this sed script to reduce a Mac build log into something readable. + +# Drop uninformative lines. +/^distcc/d +/^Check dependencies/d +/^ setenv /d +/^ cd /d +/^make: Nothing to be done/d +/^$/d + +# Xcode prints a short "compiling foobar.o" line followed by the lengthy +# full command line. These deletions drop the command line. +\|^ /Developer/usr/bin/|d +\|^ /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d +\|^ /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d + +# Drop any goma command lines as well. +\|^ .*/gomacc |d + +# And, if you've overridden something from your own bin directory, remove those +# full command lines, too. +\|^ /Users/[^/]*/bin/|d + +# There's already a nice note for bindings, don't need the command line. +\|^python scripts/rule_binding\.py|d + +# Shorten the "compiling foobar.o" line. +s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1| +s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2| CC \1| diff --git a/sanitize-mac-build-log.sh b/sanitize-mac-build-log.sh new file mode 100755 index 000000000000..8bd56d7d564a --- /dev/null +++ b/sanitize-mac-build-log.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Copyright 2010 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/sanitize-win-build-log.sed b/sanitize-win-build-log.sed new file mode 100644 index 000000000000..6077e6c75325 --- /dev/null +++ b/sanitize-win-build-log.sed @@ -0,0 +1,15 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Use this sed script to reduce a Windows build log into something +# machine-parsable. + +# Drop uninformative lines. +/The operation completed successfully\./d + +# Drop parallelization indicators on lines. +s/^[0-9]+>// + +# Shorten bindings generation lines +s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/ idl_compiler \1/ diff --git a/sanitize-win-build-log.sh b/sanitize-win-build-log.sh new file mode 100755 index 000000000000..8bd56d7d564a --- /dev/null +++ b/sanitize-win-build-log.sh @@ -0,0 +1,5 @@ +#!/bin/sh +# Copyright 2010 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed diff --git a/sanitizers/OWNERS b/sanitizers/OWNERS new file mode 100644 index 000000000000..09f2efa99adb --- /dev/null +++ b/sanitizers/OWNERS @@ -0,0 +1,6 @@ +eugenis@chromium.org +glider@chromium.org +metzman@chromium.org +rnk@chromium.org +per-file tsan_suppressions.cc=* +per-file lsan_suppressions.cc=* diff --git a/sanitizers/asan_suppressions.cc b/sanitizers/asan_suppressions.cc new file mode 100644 index 000000000000..9fbcd8f0c465 --- /dev/null +++ b/sanitizers/asan_suppressions.cc @@ -0,0 +1,23 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the default suppressions for AddressSanitizer. +// It should only be used under very limited circumstances such as suppressing +// a report caused by an interceptor call in a system-installed library. + +#if defined(ADDRESS_SANITIZER) + +// Please make sure the code below declares a single string variable +// kASanDefaultSuppressions which contains ASan suppressions delimited by +// newlines. Entries should look, for example, like: +// +// // http://crbug.com/178677 +// "interceptor_via_lib:libsqlite3.so\n" +char kASanDefaultSuppressions[] = + + // End of suppressions. + // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS. + ""; // Please keep this semicolon. + +#endif // ADDRESS_SANITIZER diff --git a/sanitizers/lsan_suppressions.cc b/sanitizers/lsan_suppressions.cc new file mode 100644 index 000000000000..82e5df1711b9 --- /dev/null +++ b/sanitizers/lsan_suppressions.cc @@ -0,0 +1,110 @@ +// Copyright 2015 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the default suppressions for LeakSanitizer. +// You can also pass additional suppressions via LSAN_OPTIONS: +// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to +// http://dev.chromium.org/developers/testing/leaksanitizer for more info. + +#include "build/build_config.h" + +#if defined(LEAK_SANITIZER) + +// Please make sure the code below declares a single string variable +// kLSanDefaultSuppressions which contains LSan suppressions delimited by +// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer +// for the instructions on writing suppressions. +char kLSanDefaultSuppressions[] = + // Intentional leak used as sanity test for Valgrind/memcheck. + "leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n" + + // ================ Leaks in third-party code ================ + + // False positives in libfontconfig. http://crbug.com/39050 + "leak:libfontconfig\n" + // eglibc-2.19/string/strdup.c creates false positive leak errors because of + // the same reason as crbug.com/39050. The leak error stack trace, when + // unwind on malloc, includes a call to libfontconfig. But the default stack + // trace is too short in leak sanitizer bot to make the libfontconfig + // suppression works. http://crbug.com/605286 + "leak:__strdup\n" + + // Leaks in GL and Vulkan drivers and system libraries on Linux NVIDIA + "leak:libGL.so\n" + "leak:libGLX_nvidia.so\n" + "leak:libnvidia-cbl.so\n" + "leak:libnvidia-fatbinaryloader.so\n" + "leak:libnvidia-glcore.so\n" + "leak:libnvidia-rtcore.so\n" + "leak:nvidia0\n" + "leak:nvidiactl\n" + "leak:libdbus-1.so\n" + + // XRandR has several one time leaks. + "leak:libxrandr\n" + + // xrandr leak. http://crbug.com/119677 + "leak:XRRFindDisplay\n" + + // http://crbug.com/431213, http://crbug.com/416665 + "leak:gin/object_template_builder.h\n" + "leak:gin/function_template.h\n" + + // Leaks in swrast_dri.so. http://crbug.com/540042 + "leak:swrast_dri.so\n" + + // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE") + "leak:__gconv_lookup_cache\n" + + // Leak in libnssutil. crbug.com/1290634 + "leak:libnssutil3\n" + + // Suppress leaks from unknown third party modules. http://anglebug.com/6937 + "leak:\n" + + // ================ Leaks in Chromium code ================ + // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS. + // Instead, commits that introduce memory leaks should be reverted. + // Suppressing the leak is acceptable in some cases when reverting is + // impossible, i.e. when enabling leak detection for the first time for a + // test target with pre-existing leaks. + + // v8 leaks caused by weak ref not call + "leak:blink::DOMWrapperWorld::Create\n" + "leak:blink::ScriptState::Create\n" + + // Crash keys are intentionally leaked. + "leak:crash_reporter::(anonymous " + "namespace)::CrashKeyBaseSupport::Allocate\n" + + // Suppress leaks in CreateCdmInstance. https://crbug.com/961062 + "leak:media::CdmAdapter::CreateCdmInstance\n" + +#if BUILDFLAG(IS_CHROMEOS) + // Suppress leak in FileStream. crbug.com/1263374 + "leak:chromeos::PipeReader::StartIO\n" + // Supppress AnimationObserverToHideView leak. crbug.com/1261464 + "leak:ash::ShelfNavigationWidget::UpdateButtonVisibility\n" + // Suppress AnimationSequence leak. crbug.com/1265031 + "leak:ash::LockStateController::StartPostLockAnimation\n" + // Suppress leak in SurfaceDrawContext. crbug.com/1265033 + "leak:skgpu::v1::SurfaceDrawContext::drawGlyphRunList\n" + // Suppress leak in BluetoothServerSocket. crbug.com/1278970 + "leak:nearby::chrome::BluetoothServerSocket::" + "BluetoothServerSocket\n" + // Suppress leak in NearbyConnectionBrokerImpl. crbug.com/1279578 + "leak:ash::secure_channel::NearbyConnectionBrokerImpl\n" + // Suppress leak in NearbyEndpointFinderImpl. crbug.com/1288577 + "leak:ash::secure_channel::NearbyEndpointFinderImpl::~" + "NearbyEndpointFinderImpl\n" + // Suppress leak in DelayedCallbackGroup test. crbug.com/1279563 + "leak:DelayedCallbackGroup_TimeoutAndRun_Test\n" +#endif + + // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS. + + // End of suppressions. + ; // Please keep this semicolon. + +#endif // LEAK_SANITIZER diff --git a/sanitizers/sanitizer_options.cc b/sanitizers/sanitizer_options.cc new file mode 100644 index 000000000000..b2ee0204419d --- /dev/null +++ b/sanitizers/sanitizer_options.cc @@ -0,0 +1,189 @@ +// Copyright 2014 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// This file contains the default options for various compiler-based dynamic +// tools. + +#include "build/build_config.h" + +#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \ + defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \ + defined(UNDEFINED_SANITIZER) +// The callbacks we define here will be called from the sanitizer runtime, but +// aren't referenced from the Chrome executable. We must ensure that those +// callbacks are not sanitizer-instrumented, and that they aren't stripped by +// the linker. +#define SANITIZER_HOOK_ATTRIBUTE \ + extern "C" \ + __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \ + __attribute__((visibility("default"))) \ + __attribute__((used)) + +// Functions returning default options are declared weak in the tools' runtime +// libraries. To make the linker pick the strong replacements for those +// functions from this module, we explicitly force its inclusion by passing +// -Wl,-u_sanitizer_options_link_helper +// SANITIZER_HOOK_ATTRIBUTE instead of just `extern "C"` solely to make the +// symbol externally visible, for ToolsSanityTest.LinksSanitizerOptions. +SANITIZER_HOOK_ATTRIBUTE void _sanitizer_options_link_helper() {} +#endif + +#if defined(ADDRESS_SANITIZER) +// Default options for AddressSanitizer in various configurations: +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports +// fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder +// to print error reports. V8 doesn't generate debug info for the JIT code, +// so the slow unwinder may not work properly. +// detect_stack_use_after_return=1 - use fake stack to delay the reuse of +// stack allocations and detect stack-use-after-return errors. +// symbolize=1 - enable in-process symbolization. +// external_symbolizer_path=... - provides the path to llvm-symbolizer +// relative to the main executable +#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) | BUILDFLAG(IS_APPLE) +const char kAsanDefaultOptions[] = + "strip_path_prefix=/../../ fast_unwind_on_fatal=1 " + "detect_stack_use_after_return=1 symbolize=1 detect_leaks=0 " + "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" + "bin/llvm-symbolizer"; +#elif BUILDFLAG(IS_WIN) +const char* kAsanDefaultOptions = + "strip_path_prefix=\\..\\..\\ fast_unwind_on_fatal=1 " + "detect_stack_use_after_return=1 symbolize=1 " + "external_symbolizer_path=%d/../../third_party/" + "llvm-build/Release+Asserts/bin/llvm-symbolizer.exe"; +#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_APPLE) + +#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_APPLE) || \ + BUILDFLAG(IS_WIN) +// Allow NaCl to override the default asan options. +extern const char* kAsanDefaultOptionsNaCl; +__attribute__((weak)) const char* kAsanDefaultOptionsNaCl = nullptr; + +SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() { + if (kAsanDefaultOptionsNaCl) + return kAsanDefaultOptionsNaCl; + return kAsanDefaultOptions; +} + +extern char kASanDefaultSuppressions[]; + +SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() { + return kASanDefaultSuppressions; +} +#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_APPLE) + // || BUILDFLAG(IS_WIN) +#endif // ADDRESS_SANITIZER + +#if defined(THREAD_SANITIZER) && (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) +// Default options for ThreadSanitizer in various configurations: +// second_deadlock_stack=1 - more verbose deadlock reports. +// report_signal_unsafe=0 - do not report async-signal-unsafe functions +// called from signal handlers. +// report_thread_leaks=0 - do not report unjoined threads at the end of +// the program execution. +// print_suppressions=1 - print the list of matched suppressions. +// history_size=7 - make the history buffer proportional to 2^7 (the maximum +// value) to keep more stack traces. +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports. +// external_symbolizer_path=... - provides the path to llvm-symbolizer +// relative to the main executable +const char kTsanDefaultOptions[] = + "second_deadlock_stack=1 report_signal_unsafe=0 " + "report_thread_leaks=0 print_suppressions=1 history_size=7 " + "strip_path_prefix=/../../ external_symbolizer_path=%d/../../third_party/" + "llvm-build/Release+Asserts/bin/llvm-symbolizer"; + +SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() { + return kTsanDefaultOptions; +} + +extern char kTSanDefaultSuppressions[]; + +SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() { + return kTSanDefaultSuppressions; +} + +#endif // defined(THREAD_SANITIZER) && (BUILDFLAG(IS_LINUX) || + // BUILDFLAG(IS_CHROMEOS)) + +#if defined(MEMORY_SANITIZER) +// Default options for MemorySanitizer: +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports. +// external_symbolizer_path=... - provides the path to llvm-symbolizer +// relative to the main executable +const char kMsanDefaultOptions[] = + "strip_path_prefix=/../../ " + "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" + "bin/llvm-symbolizer"; + +SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() { + return kMsanDefaultOptions; +} + +#endif // MEMORY_SANITIZER + +#if defined(LEAK_SANITIZER) +// Default options for LeakSanitizer: +// strip_path_prefix=/../../ - prefixes up to and including this +// substring will be stripped from source file paths in symbolized reports. +// external_symbolizer_path=... - provides the path to llvm-symbolizer +// relative to the main executable +// use_poisoned=1 - Scan poisoned memory. This is useful for Oilpan (C++ +// garbage collection) which wants to exclude its managed memory from being +// reported as leaks (through root regions) and also temporarily poisons +// memory regions before calling destructors of objects to avoid destructors +// cross-referencing memory in other objects. Main thread termination in +// Blink is not graceful and leak checks may be emitted at any time, which +// means that the garbage collector may be in a state with poisoned memory, +// leading to false-positive reports. +const char kLsanDefaultOptions[] = + "strip_path_prefix=/../../ use_poisoned=1 " + +#if !BUILDFLAG(IS_FUCHSIA) + "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" + "bin/llvm-symbolizer " +#endif + +#if defined(ARCH_CPU_64_BITS) + // When pointer compression in V8 is enabled the external pointers in the + // heap are guaranteed to be only 4 bytes aligned. So we need this option + // in order to ensure that LSAN will find all the external pointers. + // TODO(crbug.com/328552): see updates from 2019. + "use_unaligned=1 " +#endif // ARCH_CPU_64_BITS + ; + +SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() { + return kLsanDefaultOptions; +} + +// TODO(https://fxbug.dev/102967): Remove when Fuchsia supports +// module-name-based and function-name-based suppression. +#if !BUILDFLAG(IS_FUCHSIA) + +extern char kLSanDefaultSuppressions[]; + +SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() { + return kLSanDefaultSuppressions; +} + +#endif // !BUILDFLAG(IS_FUCHSIA) +#endif // LEAK_SANITIZER + +#if defined(UNDEFINED_SANITIZER) +// Default options for UndefinedBehaviorSanitizer: +// print_stacktrace=1 - print the stacktrace when UBSan reports an error. +const char kUbsanDefaultOptions[] = + "print_stacktrace=1 strip_path_prefix=/../../ " + "external_symbolizer_path=%d/../../third_party/llvm-build/Release+Asserts/" + "bin/llvm-symbolizer"; + +SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() { + return kUbsanDefaultOptions; +} + +#endif // UNDEFINED_SANITIZER diff --git a/sanitizers/tsan_suppressions.cc b/sanitizers/tsan_suppressions.cc new file mode 100644 index 000000000000..d90546efca41 --- /dev/null +++ b/sanitizers/tsan_suppressions.cc @@ -0,0 +1,96 @@ +// Copyright 2014 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file contains the default suppressions for ThreadSanitizer. +// You can also pass additional suppressions via TSAN_OPTIONS: +// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to +// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2 +// for more info. + +#if defined(THREAD_SANITIZER) + +// Please make sure the code below declares a single string variable +// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines. +// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2 +// for the instructions on writing suppressions. +char kTSanDefaultSuppressions[] = + // False positives in libdbus.so, libdconfsettings.so, libflashplayer.so, + // libgio.so, libglib.so, libgobject.so, and libfontconfig.so.1. + // Since we don't instrument them, we cannot reason about the + // synchronization in them. + "race:libdbus*.so\n" + "race:libdconfsettings*.so\n" + "race:libflashplayer.so\n" + "race:libgio*.so\n" + "race:libglib*.so\n" + "race:libgobject*.so\n" + "race:libfontconfig.so.1\n" + + // Intentional race in ToolsSanityTest.DataRace in base_unittests. + "race:base/tools_sanity_unittest.cc\n" + + // Data race caused by swapping out the network change notifier with a mock + // [test-only]. http://crbug.com/927330. + "race:content/browser/net_info_browsertest.cc\n" + + // http://crbug.com/244856 + "race:libpulsecommon*.so\n" + + // http://crbug.com/476529 + "deadlock:cc::VideoLayerImpl::WillDraw\n" + + // http://crbug.com/328826 + "race:skia::(anonymous namespace)::g_pixel_geometry\n" + + // http://crbug.com/328868 + "race:PR_Lock\n" + + // False positive in libc's tzset_internal, http://crbug.com/379738. + "race:tzset_internal\n" + + // http://crbug.com/380554 + "deadlock:g_type_add_interface_static\n" + + // Lock inversion in third party code, won't fix. + // https://crbug.com/455638 + "deadlock:dbus::Bus::ShutdownAndBlock\n" + + // https://crbug.com/459429 + "race:randomnessPid\n" + + // http://crbug.com/691029 + "deadlock:libGLX.so*\n" + + // http://crbug.com/973947 + "deadlock:libnvidia-glsi.so*\n" + + // http://crbug.com/695929 + "race:base::i18n::IsRTL\n" + "race:base::i18n::SetICUDefaultLocale\n" + + // http://crbug.com/927330 + "race:net::(anonymous namespace)::g_network_change_notifier\n" + + // Harmless data races, see WTF::StringImpl::Release code comments. + "race:scoped_refptr::AddRef\n" + "race:scoped_refptr::Release\n" + + // Harmless data race in ipcz block allocation. See comments in + // ipcz::BlockAllocator::Allocate(). + "race:ipcz::BlockAllocator::Allocate\n" + + // https://crbug.com/1405439 + "race:*::perfetto_track_event::internal::g_category_state_storage\n" + "race:perfetto::DataSource*::static_state_\n" + "race:perfetto::*::ResetForTesting\n" + + // In V8 each global safepoint might lock isolate mutexes in a different + // order. This is allowed in this context as it is always guarded by a + // single global mutex. + "deadlock:GlobalSafepoint::EnterGlobalSafepointScope\n" + + // End of suppressions. + ; // Please keep this semicolon. + +#endif // THREAD_SANITIZER diff --git a/shim_headers.gni b/shim_headers.gni new file mode 100644 index 000000000000..3bef6c0de6ee --- /dev/null +++ b/shim_headers.gni @@ -0,0 +1,41 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +template("shim_headers") { + action_name = "gen_${target_name}" + config_name = "${target_name}_config" + shim_headers_path = "${root_gen_dir}/shim_headers/${target_name}" + + config(config_name) { + include_dirs = [ shim_headers_path ] + } + + action(action_name) { + script = "//tools/generate_shim_headers/generate_shim_headers.py" + args = [ + "--generate", + "--headers-root", + rebase_path(invoker.root_path), + "--output-directory", + rebase_path(shim_headers_path), + ] + if (defined(invoker.prefix)) { + args += [ + "--prefix", + invoker.prefix, + ] + } + args += invoker.headers + + outputs = [] + foreach(h, invoker.headers) { + outputs += [ shim_headers_path + "/" + rebase_path(invoker.root_path,"//") + "/" + h ] + } + } + + group(target_name) { + deps = [ ":${action_name}" ] + all_dependent_configs = [ ":${config_name}" ] + } +} diff --git a/skia_gold_common/.style.yapf b/skia_gold_common/.style.yapf new file mode 100644 index 000000000000..239e0a247f39 --- /dev/null +++ b/skia_gold_common/.style.yapf @@ -0,0 +1,6 @@ +[style] +based_on_style = pep8 + +column_limit = 80 +indent_width = 2 + diff --git a/skia_gold_common/OWNERS b/skia_gold_common/OWNERS new file mode 100644 index 000000000000..428f6102824e --- /dev/null +++ b/skia_gold_common/OWNERS @@ -0,0 +1 @@ +bsheedy@chromium.org diff --git a/skia_gold_common/PRESUBMIT.py b/skia_gold_common/PRESUBMIT.py new file mode 100644 index 000000000000..f3cc772d81cc --- /dev/null +++ b/skia_gold_common/PRESUBMIT.py @@ -0,0 +1,39 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Presubmit script for //build/skia_gold_common/. + +See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts +for more details on the presubmit API built into depot_tools. +""" + +USE_PYTHON3 = True + +PRESUBMIT_VERSION = '2.0.0' + + +def _GetSkiaGoldEnv(input_api): + """Gets the common environment for running Skia Gold tests.""" + build_path = input_api.os_path.join(input_api.PresubmitLocalPath(), '..') + skia_gold_env = dict(input_api.environ) + skia_gold_env.update({ + 'PYTHONPATH': build_path, + 'PYTHONDONTWRITEBYTECODE': '1', + }) + return skia_gold_env + + +def CheckSkiaGoldCommonUnittests(input_api, output_api): + """Runs the unittests for the build/skia_gold_common/ directory.""" + return input_api.canned_checks.RunUnitTestsInDirectory( + input_api, + output_api, + input_api.PresubmitLocalPath(), [r'^.+_unittest\.py$'], + env=_GetSkiaGoldEnv(input_api), + run_on_python2=False, + skip_shebang_check=True) + + +def CheckPylint(input_api, output_api): + """Runs pylint on all directory content and subdirectories.""" + return input_api.canned_checks.RunPylint(input_api, output_api, version='2.7') diff --git a/skia_gold_common/README.md b/skia_gold_common/README.md new file mode 100644 index 000000000000..ec7211174809 --- /dev/null +++ b/skia_gold_common/README.md @@ -0,0 +1,6 @@ +This directory contains Python code used for interacting with the Skia Gold +image diff service. It is used by multiple test harnesses, e.g. +`//build/android/test_runner.py` and +`//content/test/gpu/run_gpu_integration_test.py`. A place such as +`//testing/` would likely be a better location, but causes issues with +V8 since it imports `//build/` but not all of Chromium src. diff --git a/skia_gold_common/__init__.py b/skia_gold_common/__init__.py new file mode 100644 index 000000000000..7290ec4c7b08 --- /dev/null +++ b/skia_gold_common/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/skia_gold_common/output_managerless_skia_gold_session.py b/skia_gold_common/output_managerless_skia_gold_session.py new file mode 100644 index 000000000000..c91222ade8f2 --- /dev/null +++ b/skia_gold_common/output_managerless_skia_gold_session.py @@ -0,0 +1,62 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Implementation of skia_gold_session.py without output managers. + +Diff output is instead stored in a directory and pointed to with file:// URLs. +""" + +import os +import subprocess +import time +from typing import List, Tuple + +import six + +from skia_gold_common import skia_gold_session + + +class OutputManagerlessSkiaGoldSession(skia_gold_session.SkiaGoldSession): + def RunComparison(self, *args, **kwargs) -> skia_gold_session.StepRetVal: + # Passing True for the output manager is a bit of a hack, as we don't + # actually need an output manager and just need to get past the truthy + # check. + assert 'output_manager' not in kwargs, 'Cannot specify output_manager' + kwargs['output_manager'] = True + return super().RunComparison(*args, **kwargs) + + def _CreateDiffOutputDir(self, name: str) -> str: + # Do this instead of just making a temporary directory so that it's easier + # for users to look through multiple results. We intentionally do not clean + # this directory up since the user might need to look at it later. + timestamp = int(time.time()) + name = '%s_%d' % (name, timestamp) + filepath = os.path.join(self._local_png_directory, name) + os.makedirs(filepath) + return filepath + + def _StoreDiffLinks(self, image_name: str, _, output_dir: str) -> None: + results = self._comparison_results.setdefault(image_name, + self.ComparisonResults()) + # The directory should contain "input-.png", "closest-.png", + # and "diff.png". + for f in os.listdir(output_dir): + file_url = 'file://%s' % os.path.join(output_dir, f) + if f.startswith('input-'): + results.local_diff_given_image = file_url + elif f.startswith('closest-'): + results.local_diff_closest_image = file_url + elif f == 'diff.png': + results.local_diff_diff_image = file_url + + @staticmethod + def _RunCmdForRcAndOutput(cmd: List[str]) -> Tuple[int, str]: + try: + output = subprocess.check_output(cmd, + stderr=subprocess.STDOUT).decode('utf-8') + return 0, output + except subprocess.CalledProcessError as e: + output = e.output + if not isinstance(output, six.string_types): + output = output.decode('utf-8') + return e.returncode, output diff --git a/skia_gold_common/output_managerless_skia_gold_session_unittest.py b/skia_gold_common/output_managerless_skia_gold_session_unittest.py new file mode 100755 index 000000000000..11e8763083cc --- /dev/null +++ b/skia_gold_common/output_managerless_skia_gold_session_unittest.py @@ -0,0 +1,137 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +#pylint: disable=protected-access + +import os +import re +import sys +import tempfile +from typing import Any +import unittest + +import six + +if six.PY2: + import mock +else: + import unittest.mock as mock + +from pyfakefs import fake_filesystem_unittest + +from skia_gold_common import output_managerless_skia_gold_session as omsgs +from skia_gold_common import skia_gold_properties +from skia_gold_common import unittest_utils + +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs + + +def assertArgWith(test: unittest.TestCase, arg_list: list, arg: Any, + value: Any) -> None: + i = arg_list.index(arg) + test.assertEqual(arg_list[i + 1], value) + + +class GpuSkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase): + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + + @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession, + '_RunCmdForRcAndOutput') + def test_commandCommonArgs(self, cmd_mock: mock.MagicMock) -> None: + cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir, + sgp, + self._json_keys, + 'corpus', + instance='instance') + session.Diff('name', 'png_file', None) + call_args = cmd_mock.call_args[0][0] + self.assertIn('diff', call_args) + assertArgWith(self, call_args, '--corpus', 'corpus') + # TODO(skbug.com/10610): Remove the -public once we go back to using the + # non-public instance, or add a second test for testing that the correct + # instance is chosen if we decide to support both depending on what the + # user is authenticated for. + assertArgWith(self, call_args, '--instance', 'instance-public') + assertArgWith(self, call_args, '--input', 'png_file') + assertArgWith(self, call_args, '--test', 'name') + # TODO(skbug.com/10611): Re-add this assert and remove the check for the + # absence of the directory once we switch back to using the proper working + # directory. + # assertArgWith(self, call_args, '--work-dir', self._working_dir) + self.assertNotIn(self._working_dir, call_args) + i = call_args.index('--out-dir') + # The output directory should not be a subdirectory of the working + # directory. + self.assertNotIn(self._working_dir, call_args[i + 1]) + + @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession, '_StoreDiffLinks') + @mock.patch.object(omsgs.OutputManagerlessSkiaGoldSession, + '_RunCmdForRcAndOutput') + def test_explicitLocalPngDirectory(self, cmd_mock: mock.MagicMock, _) -> None: + cmd_mock.return_value = (0, '') + if sys.platform == 'win32': + local_png_dir = 'c:\\tmp\\foo' + else: + local_png_dir = '/tmp/foo' + args = createSkiaGoldArgs(git_revision='a', + skia_gold_local_png_write_directory=local_png_dir) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + _, _ = session.Diff('name', '', None) + self.assertEqual(cmd_mock.call_count, 1) + if six.PY3: + call_args = cmd_mock.call_args.args[0] + else: + call_args = cmd_mock.call_args[0][0] + self.assertIn('--out-dir', call_args) + output_dir = call_args[call_args.index('--out-dir') + 1] + # Directory should be a subdirectory of the directory we gave and be made + # up of the image name and a timestamp. + parent_dir, sub_dir = output_dir.rsplit(os.sep, 1) + self.assertEqual(parent_dir, local_png_dir) + sub_dir = os.path.normpath(sub_dir) + self.assertIsNotNone(re.match(r'^name_\d+$', sub_dir)) + + +class OutputManagerlessSkiaGoldSessionStoreDiffLinksTest( + fake_filesystem_unittest.TestCase): + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + + def test_outputManagerNotNeeded(self) -> None: + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = omsgs.OutputManagerlessSkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + input_filepath = os.path.join(self._working_dir, 'input-inputhash.png') + with open(input_filepath, 'w') as f: + f.write('') + closest_filepath = os.path.join(self._working_dir, + 'closest-closesthash.png') + with open(closest_filepath, 'w') as f: + f.write('') + diff_filepath = os.path.join(self._working_dir, 'diff.png') + with open(diff_filepath, 'w') as f: + f.write('') + + session._StoreDiffLinks('foo', None, self._working_dir) + self.assertEqual(session.GetGivenImageLink('foo'), + 'file://' + input_filepath) + self.assertEqual(session.GetClosestImageLink('foo'), + 'file://' + closest_filepath) + self.assertEqual(session.GetDiffImageLink('foo'), 'file://' + diff_filepath) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/skia_gold_common/run_pytype.py b/skia_gold_common/run_pytype.py new file mode 100755 index 000000000000..ad1829e66125 --- /dev/null +++ b/skia_gold_common/run_pytype.py @@ -0,0 +1,44 @@ +#!/usr/bin/env vpython3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Simple helper script to run pytype on Gold Python code.""" + +import os +import sys + +GOLD_DIR = os.path.abspath(os.path.dirname(__file__)) +CHROMIUM_SRC_DIR = os.path.realpath(os.path.join(GOLD_DIR, '..', '..')) + +sys.path.append(os.path.join(CHROMIUM_SRC_DIR, 'testing')) + +from pytype_common import pytype_runner # pylint: disable=wrong-import-position + +EXTRA_PATHS_COMPONENTS = [ + ('build', ), + ('testing', ), +] +EXTRA_PATHS = [ + os.path.join(CHROMIUM_SRC_DIR, *p) for p in EXTRA_PATHS_COMPONENTS +] +EXTRA_PATHS.append(GOLD_DIR) + +FILES_AND_DIRECTORIES_TO_CHECK = [ + '.', +] +FILES_AND_DIRECTORIES_TO_CHECK = [ + os.path.join(GOLD_DIR, f) for f in FILES_AND_DIRECTORIES_TO_CHECK +] + +TEST_NAME = 'gold_common_pytype' +TEST_LOCATION = '//build/skia_gold_common/run_pytype.py' + + +def main() -> int: + return pytype_runner.run_pytype(TEST_NAME, TEST_LOCATION, + FILES_AND_DIRECTORIES_TO_CHECK, EXTRA_PATHS, + GOLD_DIR) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/skia_gold_common/skia_gold_properties.py b/skia_gold_common/skia_gold_properties.py new file mode 100644 index 000000000000..91a24cbcb066 --- /dev/null +++ b/skia_gold_common/skia_gold_properties.py @@ -0,0 +1,192 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Class for storing Skia Gold comparison properties. + +Examples: +* git revision being tested +* Whether the test is being run locally or on a bot +* What the continuous integration system is +""" + +import argparse +import logging +import optparse +import os +from typing import Union + +ParsedCmdArgs = Union[argparse.Namespace, optparse.Values] + + +class SkiaGoldProperties(): + def __init__(self, args: ParsedCmdArgs): + """Abstract class to validate and store properties related to Skia Gold. + + Args: + args: The parsed arguments from an argparse.ArgumentParser. + """ + self._git_revision = None + self._issue = None + self._patchset = None + self._job_id = None + self._local_pixel_tests = None + self._no_luci_auth = None + self._service_account = None + self._bypass_skia_gold_functionality = None + self._code_review_system = None + self._continuous_integration_system = None + self._local_png_directory = None + + self._InitializeProperties(args) + + def IsTryjobRun(self) -> bool: + return self.issue is not None + + @property + def continuous_integration_system(self) -> str: + return self._continuous_integration_system or 'buildbucket' + + @property + def code_review_system(self) -> str: + return self._code_review_system or 'gerrit' + + @property + def git_revision(self) -> str: + return self._GetGitRevision() + + @property + def issue(self) -> int: + return self._issue + + @property + def job_id(self) -> str: + return self._job_id + + @property + def local_pixel_tests(self) -> bool: + return self._IsLocalRun() + + @property + def local_png_directory(self) -> str: + return self._local_png_directory + + @property + def no_luci_auth(self) -> bool: + return self._no_luci_auth + + @property + def service_account(self) -> str: + return self._service_account + + @property + def patchset(self) -> int: + return self._patchset + + @property + def bypass_skia_gold_functionality(self) -> bool: + return self._bypass_skia_gold_functionality + + @staticmethod + def _GetGitOriginMainHeadSha1() -> str: + raise NotImplementedError() + + def _GetGitRevision(self) -> str: + if not self._git_revision: + # Automated tests should always pass the revision, so assume we're on + # a workstation and try to get the local origin/master HEAD. + if not self._IsLocalRun(): + raise RuntimeError( + '--git-revision was not passed when running on a bot') + revision = self._GetGitOriginMainHeadSha1() + if not revision or len(revision) != 40: + raise RuntimeError( + '--git-revision not passed and unable to determine from git') + self._git_revision = revision + return self._git_revision + + def _IsLocalRun(self) -> bool: + if self._local_pixel_tests is None: + # Look for the presence of the SWARMING_SERVER environment variable as a + # heuristic to determine whether we're running on a workstation or a bot. + # This should always be set on swarming, but would be strange to be set on + # a workstation. + # However, since Skylab technically isn't swarming, we need to look for + # an alternative environment variable there. + in_swarming = 'SWARMING_SERVER' in os.environ + in_skylab = bool(int(os.environ.get('RUNNING_IN_SKYLAB', '0'))) + self._local_pixel_tests = not (in_swarming or in_skylab) + if self._local_pixel_tests: + logging.warning( + 'Automatically determined that test is running on a workstation') + else: + logging.warning( + 'Automatically determined that test is running on a bot') + return self._local_pixel_tests + + @staticmethod + def AddCommandLineArguments(parser: argparse.ArgumentParser) -> None: + """ Add command line arguments to an ArgumentParser instance + + Args: + parser: ArgumentParser instance + + Returns: + None + """ + parser.add_argument('--git-revision', type=str, help='Git revision') + parser.add_argument('--gerrit-issue', type=int, help='Gerrit issue number') + parser.add_argument('--gerrit-patchset', + type=int, + help='Gerrit patchset number') + parser.add_argument('--buildbucket-id', + type=int, + help='Buildbucket ID of builder') + parser.add_argument('--code-review-system', + type=str, + help='Code review system') + parser.add_argument('--continuous-integration-system', + type=str, + help='Continuous integration system') + + def _InitializeProperties(self, args: ParsedCmdArgs) -> None: + if hasattr(args, 'local_pixel_tests'): + # If not set, will be automatically determined later if needed. + self._local_pixel_tests = args.local_pixel_tests + + if hasattr(args, 'skia_gold_local_png_write_directory'): + self._local_png_directory = args.skia_gold_local_png_write_directory + + if hasattr(args, 'no_luci_auth'): + self._no_luci_auth = args.no_luci_auth + + if hasattr(args, 'service_account'): + self._service_account = args.service_account + if self._service_account: + self._no_luci_auth = True + + if hasattr(args, 'bypass_skia_gold_functionality'): + self._bypass_skia_gold_functionality = args.bypass_skia_gold_functionality + + if hasattr(args, 'code_review_system'): + self._code_review_system = args.code_review_system + + if hasattr(args, 'continuous_integration_system'): + self._continuous_integration_system = args.continuous_integration_system + + # Will be automatically determined later if needed. + if not hasattr(args, 'git_revision') or not args.git_revision: + return + self._git_revision = args.git_revision + + # Only expected on tryjob runs. + if not hasattr(args, 'gerrit_issue') or not args.gerrit_issue: + return + self._issue = args.gerrit_issue + if not hasattr(args, 'gerrit_patchset') or not args.gerrit_patchset: + raise RuntimeError( + '--gerrit-issue passed, but --gerrit-patchset not passed.') + self._patchset = args.gerrit_patchset + if not hasattr(args, 'buildbucket_id') or not args.buildbucket_id: + raise RuntimeError( + '--gerrit-issue passed, but --buildbucket-id not passed.') + self._job_id = args.buildbucket_id diff --git a/skia_gold_common/skia_gold_properties_unittest.py b/skia_gold_common/skia_gold_properties_unittest.py new file mode 100755 index 000000000000..e333e3396858 --- /dev/null +++ b/skia_gold_common/skia_gold_properties_unittest.py @@ -0,0 +1,202 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +#pylint: disable=protected-access + +import os +import sys +import unittest + +if sys.version_info[0] == 2: + import mock +else: + import unittest.mock as mock + +from skia_gold_common import skia_gold_properties +from skia_gold_common import unittest_utils + +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs + + +class SkiaGoldPropertiesInitializationTest(unittest.TestCase): + """Tests that SkiaGoldProperties initializes (or doesn't) when expected.""" + + def verifySkiaGoldProperties( + self, instance: skia_gold_properties.SkiaGoldProperties, + expected: dict) -> None: + self.assertEqual(instance._local_pixel_tests, + expected.get('local_pixel_tests')) + self.assertEqual(instance._no_luci_auth, expected.get('no_luci_auth')) + self.assertEqual(instance._code_review_system, + expected.get('code_review_system')) + self.assertEqual(instance._continuous_integration_system, + expected.get('continuous_integration_system')) + self.assertEqual(instance._git_revision, expected.get('git_revision')) + self.assertEqual(instance._issue, expected.get('gerrit_issue')) + self.assertEqual(instance._patchset, expected.get('gerrit_patchset')) + self.assertEqual(instance._job_id, expected.get('buildbucket_id')) + self.assertEqual(instance._bypass_skia_gold_functionality, + expected.get('bypass_skia_gold_functionality')) + + def test_initializeSkiaGoldAttributes_unsetLocal(self) -> None: + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {}) + + def test_initializeSkiaGoldAttributes_explicitLocal(self) -> None: + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': True}) + + def test_initializeSkiaGoldAttributes_explicitNonLocal(self) -> None: + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'local_pixel_tests': False}) + + def test_initializeSkiaGoldAttributes_explicitNoLuciAuth(self) -> None: + args = createSkiaGoldArgs(no_luci_auth=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'no_luci_auth': True}) + + def test_initializeSkiaGoldAttributes_explicitServiceAccount(self) -> None: + args = createSkiaGoldArgs(service_account='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, { + 'service_account': 'a', + 'no_luci_auth': True + }) + + def test_initializeSkiaGoldAttributes_explicitCrs(self) -> None: + args = createSkiaGoldArgs(code_review_system='foo') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'code_review_system': 'foo'}) + + def test_initializeSkiaGoldAttributes_explicitCis(self) -> None: + args = createSkiaGoldArgs(continuous_integration_system='foo') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'continuous_integration_system': 'foo'}) + + def test_initializeSkiaGoldAttributes_bypassExplicitTrue(self) -> None: + args = createSkiaGoldArgs(bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'bypass_skia_gold_functionality': True}) + + def test_initializeSkiaGoldAttributes_explicitGitRevision(self) -> None: + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {'git_revision': 'a'}) + + def test_initializeSkiaGoldAttributes_tryjobArgsIgnoredWithoutRevision( + self) -> None: + args = createSkiaGoldArgs(gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties(sgp, {}) + + def test_initializeSkiaGoldAttributes_tryjobArgs(self) -> None: + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.verifySkiaGoldProperties( + sgp, { + 'git_revision': 'a', + 'gerrit_issue': 1, + 'gerrit_patchset': 2, + 'buildbucket_id': 3 + }) + + def test_initializeSkiaGoldAttributes_tryjobMissingPatchset(self) -> None: + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + buildbucket_id=3) + with self.assertRaises(RuntimeError): + skia_gold_properties.SkiaGoldProperties(args) + + def test_initializeSkiaGoldAttributes_tryjobMissingBuildbucket(self) -> None: + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2) + with self.assertRaises(RuntimeError): + skia_gold_properties.SkiaGoldProperties(args) + + +class SkiaGoldPropertiesCalculationTest(unittest.TestCase): + """Tests that SkiaGoldProperties properly calculates certain properties.""" + + def testLocalPixelTests_determineTrue(self) -> None: + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.dict(os.environ, {}, clear=True): + self.assertTrue(sgp.local_pixel_tests) + with mock.patch.dict(os.environ, {'RUNNING_IN_SKYLAB': '0'}, clear=True): + self.assertTrue(sgp.local_pixel_tests) + + def testLocalPixelTests_determineFalse(self) -> None: + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.dict(os.environ, {'SWARMING_SERVER': ''}, clear=True): + self.assertFalse(sgp.local_pixel_tests) + with mock.patch.dict(os.environ, {'RUNNING_IN_SKYLAB': '1'}, clear=True): + self.assertFalse(sgp.local_pixel_tests) + + def testIsTryjobRun_noIssue(self) -> None: + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.assertFalse(sgp.IsTryjobRun()) + + def testIsTryjobRun_issue(self) -> None: + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.assertTrue(sgp.IsTryjobRun()) + + def testGetGitRevision_revisionSet(self) -> None: + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + self.assertEqual(sgp.git_revision, 'a') + + def testGetGitRevision_findValidRevision(self) -> None: + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.object(skia_gold_properties.SkiaGoldProperties, + '_GetGitOriginMainHeadSha1') as patched_head: + expected = 'a' * 40 + patched_head.return_value = expected + self.assertEqual(sgp.git_revision, expected) + # Should be cached. + self.assertEqual(sgp._git_revision, expected) + + def testGetGitRevision_noExplicitOnBot(self) -> None: + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with self.assertRaises(RuntimeError): + _ = sgp.git_revision + + def testGetGitRevision_findEmptyRevision(self) -> None: + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.object(skia_gold_properties.SkiaGoldProperties, + '_GetGitOriginMainHeadSha1') as patched_head: + patched_head.return_value = '' + with self.assertRaises(RuntimeError): + _ = sgp.git_revision + + def testGetGitRevision_findMalformedRevision(self) -> None: + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + with mock.patch.object(skia_gold_properties.SkiaGoldProperties, + '_GetGitOriginMainHeadSha1') as patched_head: + patched_head.return_value = 'a' * 39 + with self.assertRaises(RuntimeError): + _ = sgp.git_revision + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/skia_gold_common/skia_gold_session.py b/skia_gold_common/skia_gold_session.py new file mode 100644 index 000000000000..b0b54a2b3ee3 --- /dev/null +++ b/skia_gold_common/skia_gold_session.py @@ -0,0 +1,576 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Class for interacting with the Skia Gold image diffing service.""" + +import logging +import os +import platform +import shutil +import sys +import tempfile +import time +from typing import Any, Dict, List, Optional, Tuple + +from skia_gold_common import skia_gold_properties + +CHROMIUM_SRC = os.path.realpath( + os.path.join(os.path.dirname(__file__), '..', '..')) + +GOLDCTL_BINARY = os.path.join(CHROMIUM_SRC, 'tools', 'skia_goldctl') +if sys.platform == 'win32': + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'win', 'goldctl') + '.exe' +elif sys.platform == 'darwin': + machine = platform.machine().lower() + if any(machine.startswith(m) for m in ('arm64', 'aarch64')): + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'mac_arm64', 'goldctl') + else: + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'mac_amd64', 'goldctl') +else: + GOLDCTL_BINARY = os.path.join(GOLDCTL_BINARY, 'linux', 'goldctl') + + +StepRetVal = Tuple[int, Optional[str]] + + +class SkiaGoldSession(): + class StatusCodes(): + """Status codes for RunComparison.""" + SUCCESS = 0 + AUTH_FAILURE = 1 + INIT_FAILURE = 2 + COMPARISON_FAILURE_REMOTE = 3 + COMPARISON_FAILURE_LOCAL = 4 + LOCAL_DIFF_FAILURE = 5 + NO_OUTPUT_MANAGER = 6 + + class ComparisonResults(): + """Struct-like object for storing results of an image comparison.""" + + def __init__(self): + self.public_triage_link: Optional[str] = None + self.internal_triage_link: Optional[str] = None + self.triage_link_omission_reason: Optional[str] = None + self.local_diff_given_image: Optional[str] = None + self.local_diff_closest_image: Optional[str] = None + self.local_diff_diff_image: Optional[str] = None + + def __init__(self, + working_dir: str, + gold_properties: skia_gold_properties.SkiaGoldProperties, + keys_file: str, + corpus: str, + instance: str, + bucket: Optional[str] = None): + """Abstract class to handle all aspects of image comparison via Skia Gold. + + A single SkiaGoldSession is valid for a single instance/corpus/keys_file + combination. + + Args: + working_dir: The directory to store config files, etc. + gold_properties: A skia_gold_properties.SkiaGoldProperties instance for + the current test run. + keys_file: A path to a JSON file containing various comparison config data + such as corpus and debug information like the hardware/software + configuration the images will be produced on. + corpus: The corpus that images that will be compared belong to. + instance: The name of the Skia Gold instance to interact with. + bucket: Overrides the formulaic Google Storage bucket name generated by + goldctl + """ + self._working_dir = working_dir + self._gold_properties = gold_properties + self._corpus = corpus + self._instance = instance + self._bucket = bucket + self._local_png_directory = (self._gold_properties.local_png_directory + or tempfile.mkdtemp()) + with tempfile.NamedTemporaryFile(suffix='.txt', + dir=working_dir, + delete=False) as triage_link_file: + self._triage_link_file = triage_link_file.name + # A map of image name (string) to ComparisonResults for that image. + self._comparison_results = {} + self._authenticated = False + self._initialized = False + + # Copy the given keys file to the working directory in case it ends up + # getting deleted before we try to use it. + self._keys_file = os.path.join(working_dir, 'gold_keys.json') + shutil.copy(keys_file, self._keys_file) + + def RunComparison(self, + name: str, + png_file: str, + output_manager: Any, + inexact_matching_args: Optional[List[str]] = None, + use_luci: bool = True, + service_account: Optional[str] = None, + optional_keys: Optional[Dict[str, str]] = None, + force_dryrun: bool = False) -> StepRetVal: + """Helper method to run all steps to compare a produced image. + + Handles authentication, itnitialization, comparison, and, if necessary, + local diffing. + + Args: + name: The name of the image being compared. + png_file: A path to a PNG file containing the image to be compared. + output_manager: An output manager to use to store diff links. The + argument's type depends on what type a subclasses' _StoreDiffLinks + implementation expects. Can be None even if _StoreDiffLinks expects + a valid input, but will fail if it ever actually needs to be used. + inexact_matching_args: A list of strings containing extra command line + arguments to pass to Gold for inexact matching. Can be omitted to use + exact matching. + use_luci: If true, authentication will use the service account provided by + the LUCI context. If false, will attempt to use whatever is set up in + gsutil, which is only supported for local runs. + service_account: If set, uses the provided service account instead of + LUCI_CONTEXT or whatever is set in gsutil. + optional_keys: A dict containing optional key/value pairs to pass to Gold + for this comparison. Optional keys are keys unrelated to the + configuration the image was produced on, e.g. a comment or whether + Gold should treat the image as ignored. + force_dryrun: A boolean denoting whether dryrun should be forced on + regardless of whether this is a local comparison or not. + + Returns: + A tuple (status, error). |status| is a value from + SkiaGoldSession.StatusCodes signifying the result of the comparison. + |error| is an error message describing the status if not successful. + """ + auth_rc, auth_stdout = self.Authenticate(use_luci=use_luci, + service_account=service_account) + if auth_rc: + return self.StatusCodes.AUTH_FAILURE, auth_stdout + + init_rc, init_stdout = self.Initialize() + if init_rc: + return self.StatusCodes.INIT_FAILURE, init_stdout + + compare_rc, compare_stdout = self.Compare( + name=name, + png_file=png_file, + inexact_matching_args=inexact_matching_args, + optional_keys=optional_keys, + force_dryrun=force_dryrun) + if not compare_rc: + return self.StatusCodes.SUCCESS, None + + logging.error('Gold comparison failed: %s', compare_stdout) + if not self._gold_properties.local_pixel_tests: + return self.StatusCodes.COMPARISON_FAILURE_REMOTE, compare_stdout + + if not output_manager: + return (self.StatusCodes.NO_OUTPUT_MANAGER, + 'No output manager for local diff images') + + diff_rc, diff_stdout = self.Diff(name=name, + png_file=png_file, + output_manager=output_manager) + if diff_rc: + return self.StatusCodes.LOCAL_DIFF_FAILURE, diff_stdout + return self.StatusCodes.COMPARISON_FAILURE_LOCAL, compare_stdout + + def Authenticate(self, + use_luci: bool = True, + service_account: Optional[str] = None) -> StepRetVal: + """Authenticates with Skia Gold for this session. + + Args: + use_luci: If true, authentication will use the service account provided + by the LUCI context. If false, will attempt to use whatever is set up + in gsutil, which is only supported for local runs. + service_account: If set, uses the provided service account instead of + LUCI_CONTEXT or whatever is set in gsutil. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + authentication process. |output| is the stdout + stderr of the + authentication process. + """ + if self._authenticated: + return 0, None + if self._gold_properties.bypass_skia_gold_functionality: + logging.warning('Not actually authenticating with Gold due to ' + '--bypass-skia-gold-functionality being present.') + return 0, None + assert not (use_luci and service_account) + + auth_cmd = [GOLDCTL_BINARY, 'auth', '--work-dir', self._working_dir] + if use_luci: + auth_cmd.append('--luci') + elif service_account: + auth_cmd.extend(['--service-account', service_account]) + elif not self._gold_properties.local_pixel_tests: + raise RuntimeError( + 'Cannot authenticate to Skia Gold with use_luci=False without a ' + 'service account unless running local pixel tests') + + rc, stdout = self._RunCmdForRcAndOutput(auth_cmd) + if rc == 0: + self._authenticated = True + return rc, stdout + + def Initialize(self) -> StepRetVal: + """Initializes the working directory if necessary. + + This can technically be skipped if the same information is passed to the + command used for image comparison, but that is less efficient under the + hood. Doing it that way effectively requires an initialization for every + comparison (~250 ms) instead of once at the beginning. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + initialization process. |output| is the stdout + stderr of the + initialization process. + """ + if self._initialized: + return 0, None + if self._gold_properties.bypass_skia_gold_functionality: + logging.warning('Not actually initializing Gold due to ' + '--bypass-skia-gold-functionality being present.') + return 0, None + + init_cmd = [ + GOLDCTL_BINARY, + 'imgtest', + 'init', + '--passfail', + '--instance', + self._instance, + '--corpus', + self._corpus, + '--keys-file', + self._keys_file, + '--work-dir', + self._working_dir, + '--failure-file', + self._triage_link_file, + '--commit', + self._gold_properties.git_revision, + ] + if self._bucket: + init_cmd.extend(['--bucket', self._bucket]) + if self._gold_properties.IsTryjobRun(): + init_cmd.extend([ + '--issue', + str(self._gold_properties.issue), + '--patchset', + str(self._gold_properties.patchset), + '--jobid', + str(self._gold_properties.job_id), + '--crs', + str(self._gold_properties.code_review_system), + '--cis', + str(self._gold_properties.continuous_integration_system), + ]) + + rc, stdout = self._RunCmdForRcAndOutput(init_cmd) + if rc == 0: + self._initialized = True + return rc, stdout + + def Compare(self, + name: str, + png_file: str, + inexact_matching_args: Optional[List[str]] = None, + optional_keys: Optional[Dict[str, str]] = None, + force_dryrun: bool = False) -> StepRetVal: + """Compares the given image to images known to Gold. + + Triage links can later be retrieved using GetTriageLinks(). + + Args: + name: The name of the image being compared. + png_file: A path to a PNG file containing the image to be compared. + inexact_matching_args: A list of strings containing extra command line + arguments to pass to Gold for inexact matching. Can be omitted to use + exact matching. + optional_keys: A dict containing optional key/value pairs to pass to Gold + for this comparison. Optional keys are keys unrelated to the + configuration the image was produced on, e.g. a comment or whether + Gold should treat the image as ignored. + force_dryrun: A boolean denoting whether dryrun should be forced on + regardless of whether this is a local comparison or not. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + comparison process. |output| is the stdout + stderr of the comparison + process. + """ + if self._gold_properties.bypass_skia_gold_functionality: + logging.warning('Not actually comparing with Gold due to ' + '--bypass-skia-gold-functionality being present.') + return 0, None + + compare_cmd = [ + GOLDCTL_BINARY, + 'imgtest', + 'add', + '--test-name', + name, + '--png-file', + png_file, + '--work-dir', + self._working_dir, + ] + if self._gold_properties.local_pixel_tests or force_dryrun: + compare_cmd.append('--dryrun') + if inexact_matching_args: + logging.info('Using inexact matching arguments for image %s: %s', name, + inexact_matching_args) + compare_cmd.extend(inexact_matching_args) + + optional_keys = optional_keys or {} + for k, v in optional_keys.items(): + compare_cmd.extend([ + '--add-test-optional-key', + '%s:%s' % (k, v), + ]) + + self._ClearTriageLinkFile() + rc, stdout = self._RunCmdForRcAndOutput(compare_cmd) + + self._comparison_results[name] = self.ComparisonResults() + if rc == 0: + self._comparison_results[name].triage_link_omission_reason = ( + 'Comparison succeeded, no triage link') + elif self._gold_properties.IsTryjobRun(): + cl_triage_link = ('https://{instance}-gold.skia.org/cl/{crs}/{issue}') + cl_triage_link = cl_triage_link.format( + instance=self._instance, + crs=self._gold_properties.code_review_system, + issue=self._gold_properties.issue) + self._comparison_results[name].internal_triage_link = cl_triage_link + self._comparison_results[name].public_triage_link =\ + self._GeneratePublicTriageLink(cl_triage_link) + else: + try: + with open(self._triage_link_file) as tlf: + triage_link = tlf.read().strip() + if not triage_link: + self._comparison_results[name].triage_link_omission_reason = ( + 'Gold did not provide a triage link. This is likely a bug on ' + "Gold's end.") + self._comparison_results[name].internal_triage_link = None + self._comparison_results[name].public_triage_link = None + else: + self._comparison_results[name].internal_triage_link = triage_link + self._comparison_results[name].public_triage_link =\ + self._GeneratePublicTriageLink(triage_link) + except IOError: + self._comparison_results[name].triage_link_omission_reason = ( + 'Failed to read triage link from file') + return rc, stdout + + def Diff(self, name: str, png_file: str, output_manager: Any) -> StepRetVal: + """Performs a local image diff against the closest known positive in Gold. + + This is used for running tests on a workstation, where uploading data to + Gold for ingestion is not allowed, and thus the web UI is not available. + + Image links can later be retrieved using Get*ImageLink(). + + Args: + name: The name of the image being compared. + png_file: The path to a PNG file containing the image to be diffed. + output_manager: An output manager to use to store diff links. The + argument's type depends on what type a subclasses' _StoreDiffLinks + implementation expects. + + Returns: + A tuple (return_code, output). |return_code| is the return code of the + diff process. |output| is the stdout + stderr of the diff process. + """ + # Instead of returning that everything is okay and putting in dummy links, + # just fail since this should only be called when running locally and + # --bypass-skia-gold-functionality is only meant for use on the bots. + if self._gold_properties.bypass_skia_gold_functionality: + raise RuntimeError( + '--bypass-skia-gold-functionality is not supported when running ' + 'tests locally.') + + output_dir = self._CreateDiffOutputDir(name) + # TODO(skbug.com/10611): Remove this temporary work dir and instead just use + # self._working_dir once `goldctl diff` stops clobbering the auth files in + # the provided work directory. + temp_work_dir = tempfile.mkdtemp() + # shutil.copytree() fails if the destination already exists, so use a + # subdirectory of the temporary directory. + temp_work_dir = os.path.join(temp_work_dir, 'diff_work_dir') + try: + shutil.copytree(self._working_dir, temp_work_dir) + diff_cmd = [ + GOLDCTL_BINARY, + 'diff', + '--corpus', + self._corpus, + '--instance', + self._GetDiffGoldInstance(), + '--input', + png_file, + '--test', + name, + '--work-dir', + temp_work_dir, + '--out-dir', + output_dir, + ] + rc, stdout = self._RunCmdForRcAndOutput(diff_cmd) + self._StoreDiffLinks(name, output_manager, output_dir) + return rc, stdout + finally: + shutil.rmtree(os.path.realpath(os.path.join(temp_work_dir, '..'))) + + def GetTriageLinks(self, name: str) -> Tuple[str, str]: + """Gets the triage links for the given image. + + Args: + name: The name of the image to retrieve the triage link for. + + Returns: + A tuple (public, internal). |public| is a string containing the triage + link for the public Gold instance if it is available, or None if it is not + available for some reason. |internal| is the same as |public|, but + containing a link to the internal Gold instance. The reason for links not + being available can be retrieved using GetTriageLinkOmissionReason. + """ + comparison_results = self._comparison_results.get(name, + self.ComparisonResults()) + return (comparison_results.public_triage_link, + comparison_results.internal_triage_link) + + def GetTriageLinkOmissionReason(self, name: str) -> str: + """Gets the reason why a triage link is not available for an image. + + Args: + name: The name of the image whose triage link does not exist. + + Returns: + A string containing the reason why a triage link is not available. + """ + if name not in self._comparison_results: + return 'No image comparison performed for %s' % name + results = self._comparison_results[name] + # This method should not be called if there is a valid triage link. + assert results.public_triage_link is None + assert results.internal_triage_link is None + if results.triage_link_omission_reason: + return results.triage_link_omission_reason + if results.local_diff_given_image: + return 'Gold only used to do a local image diff' + raise RuntimeError( + 'Somehow have a ComparisonResults instance for %s that should not ' + 'exist' % name) + + def GetGivenImageLink(self, name: str) -> str: + """Gets the link to the given image used for local diffing. + + Args: + name: The name of the image that was diffed. + + Returns: + A string containing the link to where the image is saved, or None if it + does not exist. + """ + assert name in self._comparison_results + return self._comparison_results[name].local_diff_given_image + + def GetClosestImageLink(self, name: str) -> str: + """Gets the link to the closest known image used for local diffing. + + Args: + name: The name of the image that was diffed. + + Returns: + A string containing the link to where the image is saved, or None if it + does not exist. + """ + assert name in self._comparison_results + return self._comparison_results[name].local_diff_closest_image + + def GetDiffImageLink(self, name: str) -> str: + """Gets the link to the diff between the given and closest images. + + Args: + name: The name of the image that was diffed. + + Returns: + A string containing the link to where the image is saved, or None if it + does not exist. + """ + assert name in self._comparison_results + return self._comparison_results[name].local_diff_diff_image + + def _GeneratePublicTriageLink(self, internal_link: str) -> str: + """Generates a public triage link given an internal one. + + Args: + internal_link: A string containing a triage link pointing to an internal + Gold instance. + + Returns: + A string containing a triage link pointing to the public mirror of the + link pointed to by |internal_link|. + """ + return internal_link.replace('%s-gold' % self._instance, + '%s-public-gold' % self._instance) + + def _ClearTriageLinkFile(self) -> None: + """Clears the contents of the triage link file. + + This should be done before every comparison since goldctl appends to the + file instead of overwriting its contents, which results in multiple triage + links getting concatenated together if there are multiple failures. + """ + open(self._triage_link_file, 'w').close() + + def _CreateDiffOutputDir(self, _name: str) -> str: + # We don't use self._local_png_directory here since we want it to be + # automatically cleaned up with the working directory. Any subclasses that + # want to keep it around can override this method. + return tempfile.mkdtemp(dir=self._working_dir) + + def _GetDiffGoldInstance(self) -> str: + """Gets the Skia Gold instance to use for the Diff step. + + This can differ based on how a particular instance is set up, mainly + depending on whether it is set up for internal results or not. + """ + # TODO(skbug.com/10610): Decide whether to use the public or + # non-public instance once authentication is fixed for the non-public + # instance. + return str(self._instance) + '-public' + + def _StoreDiffLinks(self, image_name: str, output_manager: Any, + output_dir: str) -> None: + """Stores the local diff files as links. + + The ComparisonResults entry for |image_name| should have its *_image fields + filled after this unless corresponding images were not found on disk. + + Args: + image_name: A string containing the name of the image that was diffed. + output_manager: An output manager used used to surface links to users, + if necessary. The expected argument type depends on each subclasses' + implementation of this method. + output_dir: A string containing the path to the directory where diff + output image files where saved. + """ + raise NotImplementedError() + + @staticmethod + def _RunCmdForRcAndOutput(cmd: List[str]) -> Tuple[int, str]: + """Runs |cmd| and returns its returncode and output. + + Args: + cmd: A list containing the command line to run. + + Returns: + A tuple (rc, output), where, |rc| is the returncode of the command and + |output| is the stdout + stderr of the command. + """ + raise NotImplementedError() diff --git a/skia_gold_common/skia_gold_session_manager.py b/skia_gold_common/skia_gold_session_manager.py new file mode 100644 index 000000000000..976a72ed5147 --- /dev/null +++ b/skia_gold_common/skia_gold_session_manager.py @@ -0,0 +1,130 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Class for managing multiple SkiaGoldSessions.""" + +import json +import tempfile +from typing import Optional, Type, Union + +from skia_gold_common import skia_gold_properties +from skia_gold_common import skia_gold_session + +KeysInputType = Union[dict, str] + + +class SkiaGoldSessionManager(): + def __init__(self, working_dir: str, + gold_properties: skia_gold_properties.SkiaGoldProperties): + """Abstract class to manage one or more skia_gold_session.SkiaGoldSessions. + + A separate session is required for each instance/corpus/keys_file + combination, so this class will lazily create them as necessary. + + Args: + working_dir: The working directory under which each individual + SkiaGoldSessions' working directory will be created. + gold_properties: A SkiaGoldProperties instance that will be used to create + any SkiaGoldSessions. + """ + self._working_dir = working_dir + self._gold_properties = gold_properties + self._sessions = {} + + def GetSkiaGoldSession(self, + keys_input: KeysInputType, + corpus: Optional[str] = None, + instance: Optional[str] = None, + bucket: Optional[str] = None): + """Gets a SkiaGoldSession for the given arguments. + + Lazily creates one if necessary. + + Args: + keys_input: A way of retrieving various comparison config data such as + corpus and debug information like the hardware/software configuration + the image was produced on. Can be either a dict or a filepath to a + file containing JSON to read. + corpus: A string containing the corpus the session is for. If None, the + corpus will be determined using available information. + instance: The name of the Skia Gold instance to interact with. If None, + will use whatever default the subclass sets. + bucket: Overrides the formulaic Google Storage bucket name generated by + goldctl + """ + instance = instance or self._GetDefaultInstance() + keys_dict = _GetKeysAsDict(keys_input) + keys_string = json.dumps(keys_dict, sort_keys=True) + if corpus is None: + corpus = keys_dict.get('source_type', instance) + # Use the string representation of the keys JSON as a proxy for a hash since + # dicts themselves are not hashable. + session = self._sessions.setdefault(instance, + {}).setdefault(corpus, {}).setdefault( + keys_string, None) + if not session: + working_dir = tempfile.mkdtemp(dir=self._working_dir) + keys_file = _GetKeysAsJson(keys_input, working_dir) + session = self.GetSessionClass()(working_dir, self._gold_properties, + keys_file, corpus, instance, bucket) + self._sessions[instance][corpus][keys_string] = session + return session + + @staticmethod + def _GetDefaultInstance() -> str: + """Gets the default Skia Gold instance. + + Returns: + A string containing the default instance. + """ + return 'chrome' + + @staticmethod + def GetSessionClass() -> Type[skia_gold_session.SkiaGoldSession]: + """Gets the SkiaGoldSession class to use for session creation. + + Returns: + A reference to a SkiaGoldSession class. + """ + raise NotImplementedError + + +def _GetKeysAsDict(keys_input: KeysInputType) -> dict: + """Converts |keys_input| into a dictionary. + + Args: + keys_input: A dictionary or a string pointing to a JSON file. The contents + of either should be Skia Gold config data. + + Returns: + A dictionary containing the Skia Gold config data. + """ + if isinstance(keys_input, dict): + return keys_input + assert isinstance(keys_input, str) + with open(keys_input) as f: + return json.load(f) + + +def _GetKeysAsJson(keys_input: KeysInputType, session_work_dir: str) -> str: + """Converts |keys_input| into a JSON file on disk. + + Args: + keys_input: A dictionary or a string pointing to a JSON file. The contents + of either should be Skia Gold config data. + session_work_dir: The working directory under which each individual + SkiaGoldSessions' working directory will be created. + + Returns: + A string containing a filepath to a JSON file with containing |keys_input|'s + data. + """ + if isinstance(keys_input, str): + return keys_input + assert isinstance(keys_input, dict) + keys_file = tempfile.NamedTemporaryFile(suffix='.json', + dir=session_work_dir, + delete=False).name + with open(keys_file, 'w') as f: + json.dump(keys_input, f) + return keys_file diff --git a/skia_gold_common/skia_gold_session_manager_unittest.py b/skia_gold_common/skia_gold_session_manager_unittest.py new file mode 100755 index 000000000000..5752ab278a68 --- /dev/null +++ b/skia_gold_common/skia_gold_session_manager_unittest.py @@ -0,0 +1,184 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +#pylint: disable=protected-access + +import json +import os +import sys +import tempfile +import typing +import unittest + +if sys.version_info[0] == 2: + import mock +else: + import unittest.mock as mock + +from pyfakefs import fake_filesystem_unittest + +from skia_gold_common import skia_gold_properties +from skia_gold_common import skia_gold_session +from skia_gold_common import skia_gold_session_manager +from skia_gold_common import unittest_utils + +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs + + +class SkiaGoldSessionManagerGetSessionTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSessionManager.GetSkiaGoldSession.""" + + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._patcher = mock.patch.object( + skia_gold_session_manager.SkiaGoldSessionManager, 'GetSessionClass') + self._session_class_mock = self._patcher.start() + self._session_class_mock.return_value = skia_gold_session.SkiaGoldSession + self.addCleanup(self._patcher.stop) + + def test_ArgsForwardedToSession(self) -> None: + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'corpus') + self.assertEqual(session._instance, 'instance') + # Make sure the session's working directory is a subdirectory of the + # manager's working directory. + self.assertEqual(os.path.dirname(session._working_dir), self._working_dir) + + def test_corpusFromJson(self) -> None: + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({'source_type': 'foobar'}, None, + 'instance') + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'foobar') + self.assertEqual(session._instance, 'instance') + + def test_corpusDefaultsToInstance(self) -> None: + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({}, None, 'instance') + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'instance') + self.assertEqual(session._instance, 'instance') + + @mock.patch.object(skia_gold_session_manager.SkiaGoldSessionManager, + '_GetDefaultInstance') + def test_getDefaultInstance(self, + default_instance_mock: mock.MagicMock) -> None: + default_instance_mock.return_value = 'default' + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session = sgsm.GetSkiaGoldSession({}, None, None) + self.assertTrue(session._keys_file.startswith(self._working_dir)) + self.assertEqual(session._corpus, 'default') + self.assertEqual(session._instance, 'default') + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_matchingSessionReused(self, session_mock: mock.MagicMock) -> None: + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + session2 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + self.assertEqual(session1, session2) + # For some reason, session_mock.assert_called_once() always passes, + # so check the call count directly. + self.assertEqual(session_mock.call_count, 1) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_separateSessionsFromKeys(self, session_mock: mock.MagicMock) -> None: + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance') + session2 = sgsm.GetSkiaGoldSession({'something_different': 1}, 'corpus', + 'instance') + self.assertNotEqual(session1, session2) + self.assertEqual(session_mock.call_count, 2) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_separateSessionsFromCorpus(self, + session_mock: mock.MagicMock) -> None: + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus1', 'instance') + session2 = sgsm.GetSkiaGoldSession({}, 'corpus2', 'instance') + self.assertNotEqual(session1, session2) + self.assertEqual(session_mock.call_count, 2) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '__init__') + def test_separateSessionsFromInstance(self, + session_mock: mock.MagicMock) -> None: + session_mock.return_value = None + args = createSkiaGoldArgs() + sgp = skia_gold_properties.SkiaGoldProperties(args) + self._working_dir = tempfile.mkdtemp() + sgsm = skia_gold_session_manager.SkiaGoldSessionManager( + self._working_dir, sgp) + session1 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance1') + session2 = sgsm.GetSkiaGoldSession({}, 'corpus', 'instance2') + self.assertNotEqual(session1, session2) + self.assertEqual(session_mock.call_count, 2) + + +class SkiaGoldSessionManagerKeyConversionTest(fake_filesystem_unittest.TestCase + ): + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + def test_getKeysAsDict(self) -> None: + keys_dict = {'foo': 'bar'} + keys_file_contents = {'bar': 'baz'} + keys_file = tempfile.NamedTemporaryFile(delete=False).name + with open(keys_file, 'w') as f: + json.dump(keys_file_contents, f) + + self.assertEqual(skia_gold_session_manager._GetKeysAsDict(keys_dict), + keys_dict) + self.assertEqual(skia_gold_session_manager._GetKeysAsDict(keys_file), + keys_file_contents) + with self.assertRaises(AssertionError): + skia_gold_session_manager._GetKeysAsDict(typing.cast(dict, 1)) + + def test_getKeysAsJson(self) -> None: + keys_dict = {'foo': 'bar'} + keys_file_contents = {'bar': 'baz'} + keys_file = tempfile.NamedTemporaryFile(delete=False).name + with open(keys_file, 'w') as f: + json.dump(keys_file_contents, f) + + self.assertEqual(skia_gold_session_manager._GetKeysAsJson(keys_file, ''), + keys_file) + keys_dict_as_json = skia_gold_session_manager._GetKeysAsJson( + keys_dict, self._working_dir) + self.assertTrue(keys_dict_as_json.startswith(self._working_dir)) + with open(keys_dict_as_json) as f: + self.assertEqual(json.load(f), keys_dict) + with self.assertRaises(AssertionError): + skia_gold_session_manager._GetKeysAsJson(typing.cast(dict, 1), '') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/skia_gold_common/skia_gold_session_unittest.py b/skia_gold_common/skia_gold_session_unittest.py new file mode 100755 index 000000000000..de104f97ea02 --- /dev/null +++ b/skia_gold_common/skia_gold_session_unittest.py @@ -0,0 +1,815 @@ +#!/usr/bin/env vpython3 +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +#pylint: disable=protected-access + +import json +import os +import sys +import tempfile +from typing import Any +import unittest + +if sys.version_info[0] == 2: + import mock +else: + import unittest.mock as mock + +from pyfakefs import fake_filesystem_unittest + +from skia_gold_common import skia_gold_properties +from skia_gold_common import skia_gold_session +from skia_gold_common import unittest_utils + +createSkiaGoldArgs = unittest_utils.createSkiaGoldArgs + + +def assertArgWith(test: unittest.TestCase, arg_list: list, arg: Any, + value: Any) -> None: + i = arg_list.index(arg) + test.assertEqual(arg_list[i + 1], value) + + +class SkiaGoldSessionRunComparisonTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.RunComparison.""" + + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + with open(self._json_keys, 'w') as f: + json.dump({}, f) + + self.auth_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Authenticate') + self.init_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Initialize') + self.compare_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Compare') + self.diff_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + 'Diff') + + self.auth_mock = self.auth_patcher.start() + self.init_mock = self.init_patcher.start() + self.compare_mock = self.compare_patcher.start() + self.diff_mock = self.diff_patcher.start() + + self.addCleanup(self.auth_patcher.stop) + self.addCleanup(self.init_patcher.stop) + self.addCleanup(self.compare_patcher.stop) + self.addCleanup(self.diff_patcher.stop) + + def test_comparisonSuccess(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, _ = session.RunComparison('', '', None) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_authFailure(self) -> None: + self.auth_mock.return_value = (1, 'Auth failed') + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.AUTH_FAILURE) + self.assertEqual(error, 'Auth failed') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 0) + self.assertEqual(self.compare_mock.call_count, 0) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_initFailure(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (1, 'Init failed') + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.INIT_FAILURE) + self.assertEqual(error, 'Init failed') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 0) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_compareFailureRemote(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) + self.assertEqual( + status, + skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_REMOTE) + self.assertEqual(error, 'Compare failed') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + + def test_compareFailureLocal(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') + self.diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', + 'Definitely an output manager') + self.assertEqual( + status, + skia_gold_session.SkiaGoldSession.StatusCodes.COMPARISON_FAILURE_LOCAL) + self.assertEqual(error, 'Compare failed') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 1) + + def test_compareInexactMatching(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + self.diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, _ = session.RunComparison('', + '', + None, + inexact_matching_args=['--inexact']) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + self.compare_mock.assert_called_with(name='', + png_file=mock.ANY, + inexact_matching_args=['--inexact'], + optional_keys=None, + force_dryrun=False) + + def test_compareOptionalKeys(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + self.diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, _ = session.RunComparison('', + '', + None, + optional_keys={'foo': 'bar'}) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + self.compare_mock.assert_called_with(name='', + png_file=mock.ANY, + inexact_matching_args=None, + optional_keys={'foo': 'bar'}, + force_dryrun=False) + + def test_compareForceDryrun(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (0, None) + self.diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, _ = session.RunComparison('', '', None, force_dryrun=True) + self.assertEqual(status, + skia_gold_session.SkiaGoldSession.StatusCodes.SUCCESS) + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + self.compare_mock.assert_called_with(name='', + png_file=mock.ANY, + inexact_matching_args=None, + optional_keys=None, + force_dryrun=True) + + def test_diffFailure(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') + self.diff_mock.return_value = (1, 'Diff failed') + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', + 'Definitely an output manager') + self.assertEqual( + status, + skia_gold_session.SkiaGoldSession.StatusCodes.LOCAL_DIFF_FAILURE) + self.assertEqual(error, 'Diff failed') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.init_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 1) + + def test_noOutputManagerLocal(self) -> None: + self.auth_mock.return_value = (0, None) + self.init_mock.return_value = (0, None) + self.compare_mock.return_value = (1, 'Compare failed') + self.diff_mock.return_value = (0, None) + args = createSkiaGoldArgs(local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + status, error = session.RunComparison('', '', None) + self.assertEqual( + status, skia_gold_session.SkiaGoldSession.StatusCodes.NO_OUTPUT_MANAGER) + self.assertEqual(error, 'No output manager for local diff images') + self.assertEqual(self.auth_mock.call_count, 1) + self.assertEqual(self.compare_mock.call_count, 1) + self.assertEqual(self.diff_mock.call_count, 0) + + +class SkiaGoldSessionAuthenticateTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Authenticate.""" + + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + + def test_commandOutputReturned(self) -> None: + self.cmd_mock.return_value = (1, 'Something bad :(') + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + rc, stdout = session.Authenticate() + self.assertEqual(self.cmd_mock.call_count, 1) + self.assertEqual(rc, 1) + self.assertEqual(stdout, 'Something bad :(') + + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + rc, _ = session.Authenticate() + self.assertEqual(rc, 0) + self.cmd_mock.assert_not_called() + + def test_shortCircuitAlreadyAuthenticated(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session._authenticated = True + rc, _ = session.Authenticate() + self.assertEqual(rc, 0) + self.cmd_mock.assert_not_called() + + def test_successSetsShortCircuit(self) -> None: + self.cmd_mock.return_value = (0, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + self.assertFalse(session._authenticated) + rc, _ = session.Authenticate() + self.assertEqual(rc, 0) + self.assertTrue(session._authenticated) + self.cmd_mock.assert_called_once() + + def test_failureDoesNotSetShortCircuit(self) -> None: + self.cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + self.assertFalse(session._authenticated) + rc, _ = session.Authenticate() + self.assertEqual(rc, 1) + self.assertFalse(session._authenticated) + self.cmd_mock.assert_called_once() + + def test_commandWithUseLuciTrue(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Authenticate(use_luci=True) + self.assertIn('--luci', self.cmd_mock.call_args[0][0]) + + def test_commandWithUseLuciFalse(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Authenticate(use_luci=False) + self.assertNotIn('--luci', self.cmd_mock.call_args[0][0]) + + def test_commandWithUseLuciFalseNotLocal(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + with self.assertRaises(RuntimeError): + session.Authenticate(use_luci=False) + + def test_commandWithUseLuciAndServiceAccount(self) -> None: + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + with self.assertRaises(AssertionError): + session.Authenticate(use_luci=True, service_account='a') + + def test_commandWithServiceAccount(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Authenticate(use_luci=False, service_account='service_account') + call_args = self.cmd_mock.call_args[0][0] + self.assertNotIn('--luci', call_args) + assertArgWith(self, call_args, '--service-account', 'service_account') + + def test_commandCommonArgs(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Authenticate() + call_args = self.cmd_mock.call_args[0][0] + self.assertIn('auth', call_args) + assertArgWith(self, call_args, '--work-dir', self._working_dir) + + +class SkiaGoldSessionInitializeTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Initialize.""" + + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + rc, _ = session.Initialize() + self.assertEqual(rc, 0) + self.cmd_mock.assert_not_called() + + def test_shortCircuitAlreadyInitialized(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session._initialized = True + rc, _ = session.Initialize() + self.assertEqual(rc, 0) + self.cmd_mock.assert_not_called() + + def test_successSetsShortCircuit(self) -> None: + self.cmd_mock.return_value = (0, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + self.assertFalse(session._initialized) + rc, _ = session.Initialize() + self.assertEqual(rc, 0) + self.assertTrue(session._initialized) + self.cmd_mock.assert_called_once() + + def test_failureDoesNotSetShortCircuit(self) -> None: + self.cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + self.assertFalse(session._initialized) + rc, _ = session.Initialize() + self.assertEqual(rc, 1) + self.assertFalse(session._initialized) + self.cmd_mock.assert_called_once() + + def test_commandCommonArgs(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, + sgp, + self._json_keys, + 'corpus', + instance='instance', + bucket='bucket') + session.Initialize() + call_args = self.cmd_mock.call_args[0][0] + self.assertIn('imgtest', call_args) + self.assertIn('init', call_args) + self.assertIn('--passfail', call_args) + assertArgWith(self, call_args, '--instance', 'instance') + assertArgWith(self, call_args, '--bucket', 'bucket') + assertArgWith(self, call_args, '--corpus', 'corpus') + # The keys file should have been copied to the working directory. + assertArgWith(self, call_args, '--keys-file', + os.path.join(self._working_dir, 'gold_keys.json')) + assertArgWith(self, call_args, '--work-dir', self._working_dir) + assertArgWith(self, call_args, '--failure-file', session._triage_link_file) + assertArgWith(self, call_args, '--commit', 'a') + + def test_commandTryjobArgs(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Initialize() + call_args = self.cmd_mock.call_args[0][0] + assertArgWith(self, call_args, '--issue', '1') + assertArgWith(self, call_args, '--patchset', '2') + assertArgWith(self, call_args, '--jobid', '3') + assertArgWith(self, call_args, '--crs', 'gerrit') + assertArgWith(self, call_args, '--cis', 'buildbucket') + + def test_commandTryjobArgsNonDefaultCrs(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(code_review_system='foo', + git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Initialize() + call_args = self.cmd_mock.call_args[0][0] + assertArgWith(self, call_args, '--issue', '1') + assertArgWith(self, call_args, '--patchset', '2') + assertArgWith(self, call_args, '--jobid', '3') + assertArgWith(self, call_args, '--crs', 'foo') + assertArgWith(self, call_args, '--cis', 'buildbucket') + + def test_commandTryjobArgsMissing(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Initialize() + call_args = self.cmd_mock.call_args[0][0] + self.assertNotIn('--issue', call_args) + self.assertNotIn('--patchset', call_args) + self.assertNotIn('--jobid', call_args) + self.assertNotIn('--crs', call_args) + self.assertNotIn('--cis', call_args) + + +class SkiaGoldSessionCompareTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Compare.""" + + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + + def test_commandOutputReturned(self) -> None: + self.cmd_mock.return_value = (1, 'Something bad :(') + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + rc, stdout = session.Compare('', '') + self.assertEqual(self.cmd_mock.call_count, 1) + self.assertEqual(rc, 1) + self.assertEqual(stdout, 'Something bad :(') + + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + rc, _ = session.Compare('', '') + self.assertEqual(rc, 0) + self.cmd_mock.assert_not_called() + + def test_commandWithLocalPixelTestsTrue(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Compare('', '') + self.assertIn('--dryrun', self.cmd_mock.call_args[0][0]) + + def test_commandWithForceDryrunTrue(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Compare('', '', force_dryrun=True) + self.assertIn('--dryrun', self.cmd_mock.call_args[0][0]) + + def test_commandWithLocalPixelTestsFalse(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Compare('', '') + self.assertNotIn('--dryrun', self.cmd_mock.call_args[0][0]) + + def test_commandWithInexactArgs(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Compare('', '', inexact_matching_args=['--inexact', 'foobar']) + self.assertIn('--inexact', self.cmd_mock.call_args[0][0]) + self.assertIn('foobar', self.cmd_mock.call_args[0][0]) + + def test_commandCommonArgs(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, + sgp, + self._json_keys, + 'corpus', + instance='instance') + session.Compare('name', 'png_file') + call_args = self.cmd_mock.call_args[0][0] + self.assertIn('imgtest', call_args) + self.assertIn('add', call_args) + assertArgWith(self, call_args, '--test-name', 'name') + assertArgWith(self, call_args, '--png-file', 'png_file') + assertArgWith(self, call_args, '--work-dir', self._working_dir) + + def test_noLinkOnSuccess(self) -> None: + self.cmd_mock.return_value = (0, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 0) + comparison_result = session._comparison_results['name'] + self.assertEqual(comparison_result.public_triage_link, None) + self.assertEqual(comparison_result.internal_triage_link, None) + self.assertNotEqual(comparison_result.triage_link_omission_reason, None) + + def test_clLinkOnTrybot(self) -> None: + self.cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a', + gerrit_issue=1, + gerrit_patchset=2, + buildbucket_id=3) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, + sgp, + self._json_keys, + '', + instance='instance') + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 1) + comparison_result = session._comparison_results['name'] + self.assertNotEqual(comparison_result.public_triage_link, None) + self.assertNotEqual(comparison_result.internal_triage_link, None) + internal_link = 'https://instance-gold.skia.org/cl/gerrit/1' + public_link = 'https://instance-public-gold.skia.org/cl/gerrit/1' + self.assertEqual(comparison_result.internal_triage_link, internal_link) + self.assertEqual(comparison_result.public_triage_link, public_link) + self.assertEqual(comparison_result.triage_link_omission_reason, None) + self.assertEqual(session.GetTriageLinks('name'), + (public_link, internal_link)) + + def test_individualLinkOnCi(self) -> None: + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, + sgp, + self._json_keys, + '', + instance='foobar') + + internal_link = 'foobar-gold.skia.org' + public_link = 'foobar-public-gold.skia.org' + + def WriteTriageLinkFile(_): + with open(session._triage_link_file, 'w') as f: + f.write(internal_link) + return (1, None) + + self.cmd_mock.side_effect = WriteTriageLinkFile + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 1) + comparison_result = session._comparison_results['name'] + self.assertNotEqual(comparison_result.public_triage_link, None) + self.assertNotEqual(comparison_result.internal_triage_link, None) + self.assertEqual(comparison_result.internal_triage_link, internal_link) + self.assertEqual(comparison_result.public_triage_link, public_link) + self.assertEqual(comparison_result.triage_link_omission_reason, None) + self.assertEqual(session.GetTriageLinks('name'), + (public_link, internal_link)) + + def test_validOmissionOnMissingLink(self) -> None: + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + + def WriteTriageLinkFile(_): + with open(session._triage_link_file, 'w'): + pass + return (1, None) + + self.cmd_mock.side_effect = WriteTriageLinkFile + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 1) + comparison_result = session._comparison_results['name'] + self.assertEqual(comparison_result.public_triage_link, None) + self.assertEqual(comparison_result.internal_triage_link, None) + self.assertIn('Gold did not provide a triage link', + comparison_result.triage_link_omission_reason) + + def test_validOmissionOnIoError(self) -> None: + self.cmd_mock.return_value = (1, None) + args = createSkiaGoldArgs(git_revision='a') + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + + def DeleteTriageLinkFile(_): + os.remove(session._triage_link_file) + return (1, None) + + self.cmd_mock.side_effect = DeleteTriageLinkFile + rc, _ = session.Compare('name', 'png_file') + self.assertEqual(rc, 1) + comparison_result = session._comparison_results['name'] + self.assertEqual(comparison_result.public_triage_link, None) + self.assertEqual(comparison_result.internal_triage_link, None) + self.assertNotEqual(comparison_result.triage_link_omission_reason, None) + self.assertIn('Failed to read', + comparison_result.triage_link_omission_reason) + + def test_optionalKeysPassedToGoldctl(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + session.Compare('', '', optional_keys={'foo': 'bar'}) + assertArgWith(self, self.cmd_mock.call_args[0][0], + '--add-test-optional-key', 'foo:bar') + + +class SkiaGoldSessionDiffTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.Diff.""" + + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + self._json_keys = tempfile.NamedTemporaryFile(delete=False).name + + self.cmd_patcher = mock.patch.object(skia_gold_session.SkiaGoldSession, + '_RunCmdForRcAndOutput') + self.cmd_mock = self.cmd_patcher.start() + self.addCleanup(self.cmd_patcher.stop) + + @mock.patch.object(skia_gold_session.SkiaGoldSession, '_StoreDiffLinks') + def test_commandOutputReturned(self, _) -> None: + self.cmd_mock.return_value = (1, 'Something bad :(') + args = createSkiaGoldArgs(git_revision='a', local_pixel_tests=False) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + rc, stdout = session.Diff('', '', None) + self.assertEqual(self.cmd_mock.call_count, 1) + self.assertEqual(rc, 1) + self.assertEqual(stdout, 'Something bad :(') + + def test_bypassSkiaGoldFunctionality(self) -> None: + self.cmd_mock.return_value = (None, None) + args = createSkiaGoldArgs(git_revision='a', + bypass_skia_gold_functionality=True) + sgp = skia_gold_properties.SkiaGoldProperties(args) + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + self._json_keys, '', '') + with self.assertRaises(RuntimeError): + session.Diff('', '', None) + + +class SkiaGoldSessionTriageLinkOmissionTest(fake_filesystem_unittest.TestCase): + """Tests the functionality of SkiaGoldSession.GetTriageLinkOmissionReason.""" + + def setUp(self) -> None: + self.setUpPyfakefs() + self._working_dir = tempfile.mkdtemp() + + def _CreateSession(self) -> skia_gold_session.SkiaGoldSession: + sgp = skia_gold_properties.SkiaGoldProperties(createSkiaGoldArgs()) + json_keys = tempfile.NamedTemporaryFile(delete=False).name + session = skia_gold_session.SkiaGoldSession(self._working_dir, sgp, + json_keys, '', '') + session._comparison_results = { + 'foo': skia_gold_session.SkiaGoldSession.ComparisonResults(), + } + return session + + def test_noComparison(self) -> None: + session = self._CreateSession() + session._comparison_results = {} + reason = session.GetTriageLinkOmissionReason('foo') + self.assertEqual(reason, 'No image comparison performed for foo') + + def test_validReason(self) -> None: + session = self._CreateSession() + session._comparison_results['foo'].triage_link_omission_reason = 'bar' + reason = session.GetTriageLinkOmissionReason('foo') + self.assertEqual(reason, 'bar') + + def test_onlyLocal(self) -> None: + session = self._CreateSession() + session._comparison_results['foo'].local_diff_given_image = 'bar' + reason = session.GetTriageLinkOmissionReason('foo') + self.assertEqual(reason, 'Gold only used to do a local image diff') + + def test_onlyWithoutTriageLink(self) -> None: + session = self._CreateSession() + comparison_result = session._comparison_results['foo'] + comparison_result.public_triage_link = 'bar' + with self.assertRaises(AssertionError): + session.GetTriageLinkOmissionReason('foo') + comparison_result.public_triage_link = None + comparison_result.internal_triage_link = 'bar' + with self.assertRaises(AssertionError): + session.GetTriageLinkOmissionReason('foo') + + def test_resultsShouldNotExist(self) -> None: + session = self._CreateSession() + with self.assertRaises(RuntimeError): + session.GetTriageLinkOmissionReason('foo') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/skia_gold_common/unittest_utils.py b/skia_gold_common/unittest_utils.py new file mode 100644 index 000000000000..4fe23a9bf84b --- /dev/null +++ b/skia_gold_common/unittest_utils.py @@ -0,0 +1,44 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Utility methods for Skia Gold functionality unittests.""" + +import argparse +import collections +import typing +from typing import Optional + +_SkiaGoldArgs = collections.namedtuple('_SkiaGoldArgs', [ + 'local_pixel_tests', + 'no_luci_auth', + 'service_account', + 'code_review_system', + 'continuous_integration_system', + 'git_revision', + 'gerrit_issue', + 'gerrit_patchset', + 'buildbucket_id', + 'bypass_skia_gold_functionality', + 'skia_gold_local_png_write_directory', +]) + + +def createSkiaGoldArgs(local_pixel_tests: Optional[bool] = None, + no_luci_auth: Optional[bool] = None, + service_account: Optional[str] = None, + code_review_system: Optional[str] = None, + continuous_integration_system: Optional[str] = None, + git_revision: Optional[str] = None, + gerrit_issue: Optional[int] = None, + gerrit_patchset: Optional[int] = None, + buildbucket_id: Optional[int] = None, + bypass_skia_gold_functionality: Optional[bool] = None, + skia_gold_local_png_write_directory: Optional[str] = None + ) -> argparse.Namespace: + return typing.cast( + argparse.Namespace, + _SkiaGoldArgs(local_pixel_tests, no_luci_auth, service_account, + code_review_system, continuous_integration_system, + git_revision, gerrit_issue, gerrit_patchset, buildbucket_id, + bypass_skia_gold_functionality, + skia_gold_local_png_write_directory)) diff --git a/symlink.gni b/symlink.gni new file mode 100644 index 000000000000..e71128643410 --- /dev/null +++ b/symlink.gni @@ -0,0 +1,82 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Creates a symlink. +# Args: +# source: Path to link to. +# output: Where to create the symlink. +template("symlink") { + action(target_name) { + forward_variables_from(invoker, + [ + "data_deps", + "deps", + "testonly", + "visibility", + ]) + outputs = [ invoker.output ] + script = "//build/symlink.py" + args = [ + "-f", + rebase_path(invoker.source, get_path_info(invoker.output, "dir")), + rebase_path(invoker.output, root_build_dir), + ] + if (defined(invoker.touch) && invoker.touch) { + args += [ "--touch=" + rebase_path(invoker.source, root_build_dir) ] + } + } +} + +# Creates a symlink from root_build_dir/target_name to |binary_label|. This rule +# is meant to be used within if (current_toolchain == default_toolchain) blocks +# and point to targets in the non-default toolchain. +# Note that for executables, using a copy (as opposed to a symlink) does not +# work when is_component_build=true, since dependent libraries are found via +# relative location. +# +# Args: +# binary_label: Target that builds the file to symlink to. e.g.: +# ":$target_name($host_toolchain)". +# binary_output_name: The output_name set by the binary_label target +# (if applicable). +# output_name: Where to create the symlink +# (default="$root_out_dir/$binary_output_name"). +# +# Example: +# if (current_toolchain == host_toolchain) { +# executable("foo") { ... } +# } else if (current_toolchain == default_toolchain) { +# binary_symlink("foo") { +# binary_label = ":foo($host_toolchain)" +# } +# } +template("binary_symlink") { + symlink(target_name) { + forward_variables_from(invoker, + [ + "output", + "testonly", + "visibility", + ]) + deps = [ invoker.binary_label ] + data_deps = [ invoker.binary_label ] + if (defined(invoker.data_deps)) { + data_deps += invoker.data_deps + } + + _out_dir = get_label_info(invoker.binary_label, "root_out_dir") + if (defined(invoker.binary_output_name)) { + _name = invoker.binary_output_name + } else { + _name = get_label_info(invoker.binary_label, "name") + } + source = "$_out_dir/$_name" + + _output_name = _name + if (defined(invoker.output_name)) { + _output_name = invoker.output_name + } + output = "$root_out_dir/$_output_name" + } +} diff --git a/symlink.py b/symlink.py new file mode 100755 index 000000000000..ad938072d59e --- /dev/null +++ b/symlink.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +description = """ +Make a symlink and optionally touch a file (to handle dependencies). +""" +usage = "%prog [options] source[ source ...] linkname" +epilog = """\ +A symlink to source is created at linkname. If multiple sources are specified, +then linkname is assumed to be a directory, and will contain all the links to +the sources (basenames identical to their source). + +On Windows, this will use hard links (mklink /H) to avoid requiring elevation. +This means that if the original is deleted and replaced, the link will still +have the old contents. +""" + +import errno +import optparse +import os.path +import shutil +import subprocess +import sys + + +def Main(argv): + parser = optparse.OptionParser(usage=usage, description=description, + epilog=epilog) + parser.add_option('-f', '--force', action='store_true') + parser.add_option('--touch') + + options, args = parser.parse_args(argv[1:]) + if len(args) < 2: + parser.error('at least two arguments required.') + + target = args[-1] + sources = args[:-1] + for s in sources: + t = os.path.join(target, os.path.basename(s)) + if len(sources) == 1 and not os.path.isdir(target): + t = target + t = os.path.expanduser(t) + if os.path.realpath(t) == os.path.realpath(s): + continue + try: + # N.B. Python 2.x does not have os.symlink for Windows. + # Python 3 has os.symlink for Windows, but requires either the admin- + # granted privilege SeCreateSymbolicLinkPrivilege or, as of Windows 10 + # 1703, that Developer Mode be enabled. Hard links and junctions do not + # require any extra privileges to create. + if os.name == 'nt': + # mklink does not tolerate /-delimited path names. + t = t.replace('/', '\\') + s = s.replace('/', '\\') + # N.B. This tool only handles file hardlinks, not directory junctions. + subprocess.check_output(['cmd.exe', '/c', 'mklink', '/H', t, s], + stderr=subprocess.STDOUT) + else: + os.symlink(s, t) + except OSError as e: + if e.errno == errno.EEXIST and options.force: + if os.path.isdir(t): + shutil.rmtree(t, ignore_errors=True) + else: + os.remove(t) + os.symlink(s, t) + else: + raise + except subprocess.CalledProcessError as e: + # Since subprocess.check_output does not return an easily checked error + # number, in the 'force' case always assume it is 'file already exists' + # and retry. + if options.force: + if os.path.isdir(t): + shutil.rmtree(t, ignore_errors=True) + else: + os.remove(t) + subprocess.check_output(e.cmd, stderr=subprocess.STDOUT) + else: + raise + + + if options.touch: + os.makedirs(os.path.dirname(options.touch), exist_ok=True) + with open(options.touch, 'w'): + pass + + +if __name__ == '__main__': + sys.exit(Main(sys.argv)) diff --git a/timestamp.gni b/timestamp.gni new file mode 100644 index 000000000000..b9b57d9dd0cf --- /dev/null +++ b/timestamp.gni @@ -0,0 +1,34 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Defines the build_timestamp variable. + +import("//build/util/lastchange.gni") + +declare_args() { + # This should be the filename of a script that prints a single line + # containing an integer that's a unix timestamp in UTC. + # This timestamp is used as build time and will be compiled into + # other code. + # + # This argument may look unused. Before removing please check with the + # chromecast team to see if they still use it internally. + compute_build_timestamp = "compute_build_timestamp.py" +} + +if (is_official_build) { + official_name = "official" +} else { + official_name = "default" +} + +# This will return a timestamp that's different each day (official builds) +# or each month (regular builds). Just rely on gn rerunning due to other +# changes to keep this up to date. (Bots run gn on each build, and for devs +# the timestamp being 100% accurate doesn't matter.) +# See compute_build_timestamp.py for tradeoffs for picking the timestamp. +build_timestamp = exec_script(compute_build_timestamp, + [ official_name ], + "trim string", + [ lastchange_file ]) diff --git a/toolchain/BUILD.gn b/toolchain/BUILD.gn new file mode 100644 index 000000000000..a3bd8c58cc70 --- /dev/null +++ b/toolchain/BUILD.gn @@ -0,0 +1,30 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/concurrent_links.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") + +declare_args() { + # Pool for non goma tasks. + action_pool_depth = -1 +} + +if (current_toolchain == default_toolchain) { + if (action_pool_depth == -1 || (use_goma || use_remoteexec)) { + action_pool_depth = exec_script("get_cpu_count.py", [], "value") + } + + pool("link_pool") { + depth = concurrent_links + } + + pool("action_pool") { + depth = action_pool_depth + } + + pool("remote_action_pool") { + depth = 1000 + } +} diff --git a/toolchain/OWNERS b/toolchain/OWNERS new file mode 100644 index 000000000000..90229ac68e78 --- /dev/null +++ b/toolchain/OWNERS @@ -0,0 +1,2 @@ +# Code Coverage. +per-file *code_coverage*=pasthana@google.com diff --git a/toolchain/aix/BUILD.gn b/toolchain/aix/BUILD.gn new file mode 100644 index 000000000000..71e4de29befc --- /dev/null +++ b/toolchain/aix/BUILD.gn @@ -0,0 +1,24 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/gcc_toolchain.gni") + +gcc_toolchain("ppc64") { + cc = "gcc" + cxx = "g++" + + readelf = "readelf" + nm = "nm" + ar = "ar" + ld = cxx + + toolchain_args = { + current_cpu = "ppc64" + current_os = "aix" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} diff --git a/toolchain/android/BUILD.gn b/toolchain/android/BUILD.gn new file mode 100644 index 000000000000..20257d9ace19 --- /dev/null +++ b/toolchain/android/BUILD.gn @@ -0,0 +1,168 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/android/config.gni") +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/ozone.gni") +import("//build/config/sysroot.gni") # Imports android/config.gni. +import("//build/toolchain/gcc_toolchain.gni") + +declare_args() { + # Whether unstripped binaries, i.e. compiled with debug symbols, should be + # considered runtime_deps rather than stripped ones. + android_unstripped_runtime_outputs = true +} + +template("android_clang_toolchain") { + clang_toolchain(target_name) { + assert(defined(invoker.toolchain_args), + "toolchain_args must be defined for android_clang_toolchain()") + + # Android toolchains need to declare .dwp files as outputs, so need to know + # the value of "use_debug_fission" when defining them. + # The derived value of "use_debug_fission" varies based on current_os, but + # toolchain definitions are evaluated under the default toolchain. + # Rather than computing the value under current_os="android", just disable + # it if target_os != "android". + _use_debug_fission = use_debug_fission && target_os == "android" + + toolchain_args = { + forward_variables_from(invoker.toolchain_args, "*") + current_os = "android" + use_debug_fission = _use_debug_fission + } + + # Output linker map files for binary size analysis. + enable_linker_map = true + + strip = rebase_path("$clang_base_path/bin/llvm-strip", root_build_dir) + if (_use_debug_fission) { + # llvm-dwp does not work with thin lto, so use binutils one. + # https://crbug.com/1264130 + if (toolchain_args.current_cpu == "arm") { + _dwp = "arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin/arm-linux-androideabi-dwp" + } else if (toolchain_args.current_cpu == "arm64") { + _dwp = "aarch64-linux-android-4.9/prebuilt/linux-x86_64/bin/aarch64-linux-android-dwp" + } else if (toolchain_args.current_cpu == "x86") { + _dwp = "x86-4.9/prebuilt/linux-x86_64/bin/i686-linux-android-dwp" + } else if (toolchain_args.current_cpu == "x64") { + _dwp = "x86_64-4.9/prebuilt/linux-x86_64/bin/x86_64-linux-android-dwp" + } else { + _dwp = "llvm/prebuilt/linux-x86_64/bin/llvm-dwp" + } + + dwp = rebase_path("$android_ndk_root/toolchains/$_dwp", root_build_dir) + } + + use_unstripped_as_runtime_outputs = android_unstripped_runtime_outputs + + # Don't use .cr.so for loadable_modules since they are always loaded via + # absolute path. + loadable_module_extension = ".so" + + # We propagate configs to allow cross-toolchain JNI include directories to + # work. This flag does not otherwise affect our build, but if applied to + # non-android toolchains, it causes unwanted configs from perfetto to + # propagate from host_toolchain deps. + propagates_configs = true + } +} + +android_clang_toolchain("android_clang_x86") { + toolchain_args = { + current_cpu = "x86" + + # We lack the libclang_rt.profile library for x86 and x86_64, so we cannot + # link any binaries that are generated with coverage instrumentation. + # Therefore we need to turn off 'use_clang_coverage' for this toolchain. + # TODO(crbug.com/865376) + use_clang_coverage = false + + # This turns off all of the LaCrOS-specific flags. A LaCrOS related build + # may use |ash_clang_x64| or |lacros_clang_x64| toolchain, which are + # chromeos toolchains, to build Ash-Chrome or Lacros-Chrome in a + # subdirectory, and because chromeos toolchain uses android toolchain, which + # eventually resulted in that android toolchains being used inside a LaCrOS + # build. + also_build_ash_chrome = false + also_build_lacros_chrome = false + chromeos_is_browser_only = false + ozone_platform = "" + ozone_platform_wayland = false + } +} + +android_clang_toolchain("android_clang_arm") { + toolchain_args = { + current_cpu = "arm" + } +} + +android_clang_toolchain("android_clang_mipsel") { + toolchain_args = { + current_cpu = "mipsel" + } +} + +android_clang_toolchain("android_clang_x64") { + toolchain_args = { + current_cpu = "x64" + + # We lack the libclang_rt.profile library for x86 and x86_64, so we cannot + # link any binaries that are generated with coverage instrumentation. + # Therefore we need to turn off 'use_clang_coverage' for this toolchain. + # TODO(crbug.com/865376) + use_clang_coverage = false + + # This turns off all of the LaCrOS-specific flags. A LaCrOS related build + # may use |ash_clang_x64| or |lacros_clang_x64| toolchain, which are + # chromeos toolchains, to build Ash-Chrome or Lacros-Chrome in a + # subdirectory, and because chromeos toolchain uses android toolchain, which + # eventually resulted in that android toolchains being used inside a LaCrOS + # build. + also_build_ash_chrome = false + also_build_lacros_chrome = false + chromeos_is_browser_only = false + ozone_platform = "" + ozone_platform_wayland = false + } +} + +android_clang_toolchain("android_clang_arm64") { + toolchain_args = { + current_cpu = "arm64" + } +} + +android_clang_toolchain("android_clang_arm64_hwasan") { + toolchain_args = { + current_cpu = "arm64" + is_hwasan = true + android64_ndk_api_level = 29 + } +} + +android_clang_toolchain("android_clang_mips64el") { + toolchain_args = { + current_cpu = "mips64el" + } +} + +# Toolchain for creating native libraries that can be used by +# robolectric_binary targets. It does not emulate NDK APIs nor make available +# NDK header files. +# Targets that opt into defining JNI entrypoints should use the +# //third_party/jdk:jdk config to make jni.h available. +# This toolchain will set: +# is_linux = true +# is_android = false +# is_robolectric = true +clang_toolchain("robolectric_$host_cpu") { + toolchain_args = { + current_os = host_os + current_cpu = host_cpu + is_robolectric = true + } +} diff --git a/toolchain/android/DIR_METADATA b/toolchain/android/DIR_METADATA new file mode 100644 index 000000000000..cdc2d6fb6eb6 --- /dev/null +++ b/toolchain/android/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/android/COMMON_METADATA" diff --git a/toolchain/android/OWNERS b/toolchain/android/OWNERS new file mode 100644 index 000000000000..a74cfbe228b5 --- /dev/null +++ b/toolchain/android/OWNERS @@ -0,0 +1 @@ +file://build/android/OWNERS diff --git a/toolchain/apple/.style.yapf b/toolchain/apple/.style.yapf new file mode 100644 index 000000000000..557fa7bf84c0 --- /dev/null +++ b/toolchain/apple/.style.yapf @@ -0,0 +1,2 @@ +[style] +based_on_style = pep8 diff --git a/toolchain/apple/BUILD.gn b/toolchain/apple/BUILD.gn new file mode 100644 index 000000000000..ce5a7059eb5e --- /dev/null +++ b/toolchain/apple/BUILD.gn @@ -0,0 +1,22 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/concurrent_links.gni") + +declare_args() { + # Reduce the number of tasks using the copy_bundle_data and compile_xcassets + # tools as they can cause lots of I/O contention when invoking ninja with a + # large number of parallel jobs (e.g. when using distributed build like goma). + bundle_pool_depth = -1 +} + +if (current_toolchain == default_toolchain) { + pool("bundle_pool") { + if (bundle_pool_depth == -1) { + depth = concurrent_links + } else { + depth = bundle_pool_depth + } + } +} diff --git a/toolchain/apple/OWNERS b/toolchain/apple/OWNERS new file mode 100644 index 000000000000..6f3324f07c8f --- /dev/null +++ b/toolchain/apple/OWNERS @@ -0,0 +1 @@ +file://build/apple/OWNERS diff --git a/toolchain/apple/filter_libtool.py b/toolchain/apple/filter_libtool.py new file mode 100644 index 000000000000..269093bbbf5e --- /dev/null +++ b/toolchain/apple/filter_libtool.py @@ -0,0 +1,51 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import os +import re +import subprocess +import sys + +# This script executes libool and filters out logspam lines like: +# '/path/to/libtool: file: foo.o has no symbols' + +SUPPRESSED_PATTERNS = [ + re.compile(v) for v in [ + r'^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$', + # Xcode 11 spelling of the "empty archive" warning. + # TODO(thakis): Remove once we require Xcode 12. + r'^.*libtool: warning for library: .* the table of contents is empty ' \ + r'\(no object file members in the library define global symbols\)$', + # Xcode 12 spelling of the "empty archive" warning. + r'^warning: .*libtool: archive library: .* ' \ + r'the table of contents is empty ', + r'\(no object file members in the library define global symbols\)$', + r'^.*libtool: warning same member name \(\S*\) in output file used ' \ + r'for input files: \S* and: \S* \(due to use of basename, ' \ + r'truncation, blank padding or duplicate input files\)$', + ] +] + + +def ShouldSuppressLine(line): + """Returns whether the line should be filtered out.""" + for pattern in SUPPRESSED_PATTERNS: + if pattern.match(line): + return True + return False + + +def Main(cmd_list): + env = os.environ.copy() + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) + _, err = libtoolout.communicate() + for line in err.decode('UTF-8').splitlines(): + if not ShouldSuppressLine(line): + print(line, file=sys.stderr) + return libtoolout.returncode + + +if __name__ == '__main__': + sys.exit(Main(sys.argv[1:])) diff --git a/toolchain/apple/get_tool_mtime.py b/toolchain/apple/get_tool_mtime.py new file mode 100644 index 000000000000..4ce19e1cc73a --- /dev/null +++ b/toolchain/apple/get_tool_mtime.py @@ -0,0 +1,18 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import os +import sys + +# Usage: python get_tool_mtime.py path/to/file1.py path/to/file2.py +# +# Prints a GN scope with the variable name being the basename sans-extension +# and the value being the file modification time. A variable is emitted for +# each file argument on the command line. + +if __name__ == '__main__': + for f in sys.argv[1:]: + variable = os.path.splitext(os.path.basename(f))[0] + print('%s = %d' % (variable, os.path.getmtime(f))) diff --git a/toolchain/apple/linker_driver.py b/toolchain/apple/linker_driver.py new file mode 100755 index 000000000000..415a9fd21ec9 --- /dev/null +++ b/toolchain/apple/linker_driver.py @@ -0,0 +1,368 @@ +#!/usr/bin/env python3 + +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import os.path +import shutil +import subprocess +import sys +import tempfile + +# The path to `whole_archive`. +sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) + +import whole_archive + +# Prefix for all custom linker driver arguments. +LINKER_DRIVER_ARG_PREFIX = '-Wcrl,' +# Linker action to create a directory and pass it to the linker as +# `-object_path_lto`. Special-cased since it has to run before the link. +OBJECT_PATH_LTO = 'object_path_lto' + +# The linker_driver.py is responsible for forwarding a linker invocation to +# the compiler driver, while processing special arguments itself. +# +# Usage: linker_driver.py clang++ main.o -L. -llib -o prog -Wcrl,dsym,out +# +# On Mac, the logical step of linking is handled by three discrete tools to +# perform the image link, debug info link, and strip. The linker_driver.py +# combines these three steps into a single tool. +# +# The command passed to the linker_driver.py should be the compiler driver +# invocation for the linker. It is first invoked unaltered (except for the +# removal of the special driver arguments, described below). Then the driver +# performs additional actions, based on these arguments: +# +# -Wcrl,installnametoolpath, +# Sets the path to the `install_name_tool` to run with +# -Wcrl,installnametool, in which case `xcrun` is not used to invoke it. +# +# -Wcrl,installnametool, +# After invoking the linker, this will run install_name_tool on the linker's +# output. |arguments| are comma-separated arguments to be passed to the +# install_name_tool command. +# +# -Wcrl,dsym, +# After invoking the linker, this will run `dsymutil` on the linker's +# output, producing a dSYM bundle, stored at dsym_path_prefix. As an +# example, if the linker driver were invoked with: +# "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..." +# The resulting dSYM would be out/gn/libbar.dylib.dSYM/. +# +# -Wcrl,dsymutilpath, +# Sets the path to the dsymutil to run with -Wcrl,dsym, in which case +# `xcrun` is not used to invoke it. +# +# -Wcrl,unstripped, +# After invoking the linker, and before strip, this will save a copy of +# the unstripped linker output in the directory unstripped_path_prefix. +# +# -Wcrl,strip, +# After invoking the linker, and optionally dsymutil, this will run +# the strip command on the linker's output. strip_arguments are +# comma-separated arguments to be passed to the strip command. +# +# -Wcrl,strippath, +# Sets the path to the strip to run with -Wcrl,strip, in which case +# `xcrun` is not used to invoke it. +# -Wcrl,object_path_lto +# Creates temporary directory for LTO object files. + + +class LinkerDriver(object): + def __init__(self, args): + """Creates a new linker driver. + + Args: + args: list of string, Arguments to the script. + """ + if len(args) < 2: + raise RuntimeError("Usage: linker_driver.py [linker-invocation]") + self._args = args + + # List of linker driver actions. **The sort order of this list affects + # the order in which the actions are invoked.** + # The first item in the tuple is the argument's -Wcrl, + # and the second is the function to invoke. + self._actions = [ + ('installnametoolpath,', self.set_install_name_tool_path), + ('installnametool,', self.run_install_name_tool), + ('dsymutilpath,', self.set_dsymutil_path), + ('dsym,', self.run_dsymutil), + ('unstripped,', self.run_save_unstripped), + ('strippath,', self.set_strip_path), + ('strip,', self.run_strip), + ] + + # Linker driver actions can modify the these values. + self._install_name_tool_cmd = ['xcrun', 'install_name_tool'] + self._dsymutil_cmd = ['xcrun', 'dsymutil'] + self._strip_cmd = ['xcrun', 'strip'] + + # The linker output file, lazily computed in self._get_linker_output(). + self._linker_output = None + # The temporary directory for intermediate LTO object files. If it + # exists, it will clean itself up on script exit. + self._object_path_lto = None + + def run(self): + """Runs the linker driver, separating out the main compiler driver's + arguments from the ones handled by this class. It then invokes the + required tools, starting with the compiler driver to produce the linker + output. + """ + # Collect arguments to the linker driver (this script) and remove them + # from the arguments being passed to the compiler driver. + linker_driver_actions = {} + compiler_driver_args = [] + for index, arg in enumerate(self._args[1:]): + if arg.startswith(LINKER_DRIVER_ARG_PREFIX): + # Convert driver actions into a map of name => lambda to invoke. + driver_action = self._process_driver_arg(arg) + assert driver_action[0] not in linker_driver_actions + linker_driver_actions[driver_action[0]] = driver_action[1] + else: + compiler_driver_args.append(arg) + + if self._object_path_lto is not None: + compiler_driver_args.append('-Wl,-object_path_lto,{}'.format( + self._object_path_lto.name)) + if self._get_linker_output() is None: + raise ValueError( + 'Could not find path to linker output (-o or --output)') + + # We want to link rlibs as --whole-archive if they are part of a unit + # test target. This is determined by switch + # `-LinkWrapper,add-whole-archive`. + compiler_driver_args = whole_archive.wrap_with_whole_archive( + compiler_driver_args) + + linker_driver_outputs = [self._get_linker_output()] + + try: + # Zero the mtime in OSO fields for deterministic builds. + # https://crbug.com/330262. + env = os.environ.copy() + env['ZERO_AR_DATE'] = '1' + # Run the linker by invoking the compiler driver. + subprocess.check_call(compiler_driver_args, env=env) + + # Run the linker driver actions, in the order specified by the + # actions list. + for action in self._actions: + name = action[0] + if name in linker_driver_actions: + linker_driver_outputs += linker_driver_actions[name]() + except: + # If a linker driver action failed, remove all the outputs to make + # the build step atomic. + map(_remove_path, linker_driver_outputs) + + # Re-report the original failure. + raise + + def _get_linker_output(self): + """Returns the value of the output argument to the linker.""" + if not self._linker_output: + for index, arg in enumerate(self._args): + if arg in ('-o', '-output', '--output'): + self._linker_output = self._args[index + 1] + break + return self._linker_output + + def _process_driver_arg(self, arg): + """Processes a linker driver argument and returns a tuple containing the + name and unary lambda to invoke for that linker driver action. + + Args: + arg: string, The linker driver argument. + + Returns: + A 2-tuple: + 0: The driver action name, as in |self._actions|. + 1: A lambda that calls the linker driver action with its direct + argument and returns a list of outputs from the action. + """ + if not arg.startswith(LINKER_DRIVER_ARG_PREFIX): + raise ValueError('%s is not a linker driver argument' % (arg, )) + + sub_arg = arg[len(LINKER_DRIVER_ARG_PREFIX):] + # Special-cased, since it needs to run before the link. + # TODO(lgrey): Remove if/when we start running `dsymutil` + # through the clang driver. See https://crbug.com/1324104 + if sub_arg == OBJECT_PATH_LTO: + self._object_path_lto = tempfile.TemporaryDirectory( + dir=os.getcwd()) + return (OBJECT_PATH_LTO, lambda: []) + + for driver_action in self._actions: + (name, action) = driver_action + if sub_arg.startswith(name): + return (name, lambda: action(sub_arg[len(name):])) + + raise ValueError('Unknown linker driver argument: %s' % (arg, )) + + def set_install_name_tool_path(self, install_name_tool_path): + """Linker driver action for -Wcrl,installnametoolpath,. + + Sets the invocation command for install_name_tool, which allows the + caller to specify an alternate path. This action is always + processed before the run_install_name_tool action. + + Args: + install_name_tool_path: string, The path to the install_name_tool + binary to run + + Returns: + No output - this step is run purely for its side-effect. + """ + self._install_name_tool_cmd = [install_name_tool_path] + return [] + + def run_install_name_tool(self, args_string): + """Linker driver action for -Wcrl,installnametool,. Invokes + install_name_tool on the linker's output. + + Args: + args_string: string, Comma-separated arguments for + `install_name_tool`. + + Returns: + No output - this step is run purely for its side-effect. + """ + command = list(self._install_name_tool_cmd) + command.extend(args_string.split(',')) + command.append(self._get_linker_output()) + subprocess.check_call(command) + return [] + + def run_dsymutil(self, dsym_path_prefix): + """Linker driver action for -Wcrl,dsym,. Invokes + dsymutil on the linker's output and produces a dsym file at |dsym_file| + path. + + Args: + dsym_path_prefix: string, The path at which the dsymutil output + should be located. + + Returns: + list of string, Build step outputs. + """ + if not len(dsym_path_prefix): + raise ValueError('Unspecified dSYM output file') + + linker_output = self._get_linker_output() + base = os.path.basename(linker_output) + dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM') + + # Remove old dSYMs before invoking dsymutil. + _remove_path(dsym_out) + + tools_paths = _find_tools_paths(self._args) + if os.environ.get('PATH'): + tools_paths.append(os.environ['PATH']) + dsymutil_env = os.environ.copy() + dsymutil_env['PATH'] = ':'.join(tools_paths) + subprocess.check_call(self._dsymutil_cmd + + ['-o', dsym_out, linker_output], + env=dsymutil_env) + return [dsym_out] + + def set_dsymutil_path(self, dsymutil_path): + """Linker driver action for -Wcrl,dsymutilpath,. + + Sets the invocation command for dsymutil, which allows the caller to + specify an alternate dsymutil. This action is always processed before + the RunDsymUtil action. + + Args: + dsymutil_path: string, The path to the dsymutil binary to run + + Returns: + No output - this step is run purely for its side-effect. + """ + self._dsymutil_cmd = [dsymutil_path] + return [] + + def run_save_unstripped(self, unstripped_path_prefix): + """Linker driver action for -Wcrl,unstripped,. + Copies the linker output to |unstripped_path_prefix| before stripping. + + Args: + unstripped_path_prefix: string, The path at which the unstripped + output should be located. + + Returns: + list of string, Build step outputs. + """ + if not len(unstripped_path_prefix): + raise ValueError('Unspecified unstripped output file') + + base = os.path.basename(self._get_linker_output()) + unstripped_out = os.path.join(unstripped_path_prefix, + base + '.unstripped') + + shutil.copyfile(self._get_linker_output(), unstripped_out) + return [unstripped_out] + + def run_strip(self, strip_args_string): + """Linker driver action for -Wcrl,strip,. + + Args: + strip_args_string: string, Comma-separated arguments for `strip`. + + Returns: + list of string, Build step outputs. + """ + strip_command = list(self._strip_cmd) + if len(strip_args_string) > 0: + strip_command += strip_args_string.split(',') + strip_command.append(self._get_linker_output()) + subprocess.check_call(strip_command) + return [] + + def set_strip_path(self, strip_path): + """Linker driver action for -Wcrl,strippath,. + + Sets the invocation command for strip, which allows the caller to + specify an alternate strip. This action is always processed before the + RunStrip action. + + Args: + strip_path: string, The path to the strip binary to run + + Returns: + No output - this step is run purely for its side-effect. + """ + self._strip_cmd = [strip_path] + return [] + + +def _find_tools_paths(full_args): + """Finds all paths where the script should look for additional tools.""" + paths = [] + for idx, arg in enumerate(full_args): + if arg in ['-B', '--prefix']: + paths.append(full_args[idx + 1]) + elif arg.startswith('-B'): + paths.append(arg[2:]) + elif arg.startswith('--prefix='): + paths.append(arg[9:]) + return paths + + +def _remove_path(path): + """Removes the file or directory at |path| if it exists.""" + if os.path.exists(path): + if os.path.isdir(path): + shutil.rmtree(path) + else: + os.unlink(path) + + +if __name__ == '__main__': + LinkerDriver(sys.argv).run() + sys.exit(0) diff --git a/toolchain/apple/toolchain.gni b/toolchain/apple/toolchain.gni new file mode 100644 index 000000000000..70d7c036392a --- /dev/null +++ b/toolchain/apple/toolchain.gni @@ -0,0 +1,831 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires +# some enhancements since the commands on Mac are slightly different than on +# Linux. + +import("//build/config/apple/symbols.gni") +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/coverage/coverage.gni") +import("//build/config/rust.gni") +import("//build/toolchain/cc_wrapper.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") +import("//build_overrides/build.gni") + +# TODO(crbug.com/1370527): This import is required to detect whether the +# build is for the catalyst environment in order to disable the hermetic +# swift compiler (as it does not include support for catalyst). Remove it +# once the support is available. +if (is_ios) { + import("//build/config/ios/config.gni") +} + +assert((target_os == "ios" && host_os == "mac") || host_os != "win") + +declare_args() { + # This controls whether whole module optimization is enabled when building + # Swift modules. If enabled, the compiler will compile the module as one + # unit, generating just one single object file. Otherwise, it will generate + # one object file per .swift file. If unspecified, will default to "true" + # for official builds, and "false" for all other builds. + swift_whole_module_optimization = -1 + + # If unspecified, will use the toolchain downloaded via deps. + swift_toolchain_path = -1 +} + +# TODO(crbug.com/1370527): Remove this and replace with `build_with_chromium` +# once the support for catalyst is available in the hermetic swift compiler. +_can_use_hermetic_swift = + build_with_chromium && is_ios && target_environment != "catalyst" + +if (swift_toolchain_path == -1) { + if (_can_use_hermetic_swift) { + # Version of the hermetic compiler. Needs to be updated when a new version of + # the compiler is rolled to ensure that all outputs are regenerated. It must + # be kept in sync with the `version` of `third_party/swift-toolchain` in + # //DEPS. + swiftc_version = "swift-5.7-release" + + # Use the hermetic swift toolchain. + swift_toolchain_path = "//third_party/swift-toolchain/" + } else { + swift_toolchain_path = "" + } +} + +if (swift_whole_module_optimization == -1) { + swift_whole_module_optimization = is_official_build +} + +# When implementing tools using Python scripts, a TOOL_VERSION=N env +# variable is placed in front of the command. The N should be incremented +# whenever the script is changed, so that the build system rebuilds all +# edges that utilize the script. Ideally this should be changed to use +# proper input-dirty checking, but that could be expensive. Instead, use a +# script to get the tool scripts' modification time to use as the version. +# This won't cause a re-generation of GN files when the tool script changes +# but it will cause edges to be marked as dirty if the ninja files are +# regenerated. See https://crbug.com/619083 for details. A proper fix +# would be to have inputs to tools (https://crbug.com/621119). +tool_versions = + exec_script("get_tool_mtime.py", + rebase_path([ + "//build/toolchain/apple/filter_libtool.py", + "//build/toolchain/apple/linker_driver.py", + "//build/toolchain/ios/compile_xcassets.py", + "//build/toolchain/ios/swiftc.py", + ], + root_build_dir), + "trim scope") + +# Shared toolchain definition. Invocations should set current_os to set the +# build args in this definition. This is titled "single_apple_toolchain" +# because it makes exactly one toolchain. Callers will normally want to +# invoke instead "apple_toolchain" which may make an additional toolchain +# without sanitizers. +template("single_apple_toolchain") { + toolchain(target_name) { + # When invoking this toolchain not as the default one, these args will be + # passed to the build. They are ignored when this is the default toolchain. + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker.toolchain_args, "*") + + # The host toolchain value computed by the default toolchain's setup + # needs to be passed through unchanged to all secondary toolchains to + # ensure that it's always the same, regardless of the values that may be + # set on those toolchains. + host_toolchain = host_toolchain + + # Similarly for the host toolchain which can be used to make .dylibs + # that will successfully load into prebuilt tools. + host_toolchain_no_sanitizers = host_toolchain_no_sanitizers + } + + # When the invoker has explicitly overridden use_goma or cc_wrapper in the + # toolchain args, use those values, otherwise default to the global one. + # This works because the only reasonable override that toolchains might + # supply for these values are to force-disable them. + if (defined(toolchain_args.use_remoteexec)) { + toolchain_uses_remoteexec = toolchain_args.use_remoteexec + } else { + toolchain_uses_remoteexec = use_remoteexec + } + if (defined(toolchain_args.use_goma)) { + toolchain_uses_goma = toolchain_args.use_goma + } else { + toolchain_uses_goma = use_goma + } + if (defined(toolchain_args.cc_wrapper)) { + toolchain_cc_wrapper = toolchain_args.cc_wrapper + } else { + toolchain_cc_wrapper = cc_wrapper + } + assert(!(toolchain_uses_remoteexec && toolchain_uses_goma), + "Goma and re-client can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec), + "re-client and cc_wrapper can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma), + "Goma and cc_wrapper can't be used together.") + + if (defined(toolchain_args.use_lld)) { + toolchain_uses_lld = toolchain_args.use_lld + } else { + toolchain_uses_lld = use_lld + } + + # The value of all global variables (such as `is_component_build`) is the + # one from the default toolchain when evaluating a secondary toolchain + # (see https://crbug.com/gn/286). This mean that the value may change when + # evaluating target/configs in the new toolchain if the variable default + # value depends on variable set in `toolchain_args`. + # + # For this reason, "ios" needs to override `is_component_build` as its + # default value depends on `current_os`. Use the overridden value if it + # is set in `toolchain_args`. + if (defined(toolchain_args.is_component_build)) { + toolchain_is_component_build = toolchain_args.is_component_build + } else { + toolchain_is_component_build = is_component_build + } + + prefix = rebase_path("$clang_base_path/bin/", root_build_dir) + _cc = "${prefix}clang" + _cxx = "${prefix}clang++" + + swiftmodule_switch = "-Wl,-add_ast_path," + + # Compute the compiler prefix. + if (toolchain_uses_remoteexec) { + if (defined(toolchain_args.rbe_cc_cfg_file)) { + toolchain_rbe_cc_cfg_file = toolchain_args.rbe_cc_cfg_file + } else { + toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file + } + + # C/C++ (clang) rewrapper prefix to use when use_remoteexec is true. + compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " + } else if (toolchain_uses_goma) { + assert(toolchain_cc_wrapper == "", + "Goma and cc_wrapper can't be used together.") + compiler_prefix = "$goma_dir/gomacc " + if (use_goma_rust) { + rust_compiler_prefix = compiler_prefix + } + } else if (toolchain_cc_wrapper != "") { + compiler_prefix = toolchain_cc_wrapper + " " + } else { + compiler_prefix = "" + } + + cc = compiler_prefix + _cc + cxx = compiler_prefix + _cxx + ld = _cxx + + # Set the explicit search path for clang++ so it uses the right linker + # binary. + if (!toolchain_uses_lld) { + ld += " -B " + invoker.bin_path + } + + if (defined(toolchain_args.coverage_instrumentation_input_file)) { + toolchain_coverage_instrumentation_input_file = + toolchain_args.coverage_instrumentation_input_file + } else { + toolchain_coverage_instrumentation_input_file = + coverage_instrumentation_input_file + } + _use_clang_coverage_wrapper = + toolchain_coverage_instrumentation_input_file != "" + if (_use_clang_coverage_wrapper) { + _coverage_wrapper = + rebase_path("//build/toolchain/clang_code_coverage_wrapper.py", + root_build_dir) + " --files-to-instrument=" + + rebase_path(toolchain_coverage_instrumentation_input_file, + root_build_dir) + " --target-os=" + target_os + cc = "$python_path $_coverage_wrapper ${cc}" + cxx = "$python_path $_coverage_wrapper ${cxx}" + } + + linker_driver = + "TOOL_VERSION=${tool_versions.linker_driver} " + + rebase_path("//build/toolchain/apple/linker_driver.py", root_build_dir) + + # Specify an explicit path for the strip binary. + _strippath = invoker.bin_path + "strip" + _installnametoolpath = invoker.bin_path + "install_name_tool" + linker_driver += " -Wcrl,strippath,${_strippath} -Wcrl,installnametoolpath,${_installnametoolpath}" + _enable_dsyms = enable_dsyms + _save_unstripped_output = save_unstripped_output + + # Make these apply to all tools below. + lib_switch = "-l" + lib_dir_switch = "-L" + + # Object files go in this directory. Use label_name instead of + # target_output_name since labels will generally have no spaces and will be + # unique in the directory. + object_subdir = "{{target_out_dir}}/{{label_name}}" + + # If dSYMs are enabled, this flag will be added to the link tools. + if (_enable_dsyms) { + dsym_switch = " -Wcrl,dsym,{{root_out_dir}} " + dsym_switch += "-Wcrl,dsymutilpath," + + rebase_path("//tools/clang/dsymutil/bin/dsymutil", + root_build_dir) + " " + + dsym_output_dir = + "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.dSYM" + dsym_output = [ + "$dsym_output_dir/Contents/Info.plist", + "$dsym_output_dir/Contents/Resources/DWARF/" + + "{{target_output_name}}{{output_extension}}", + ] + } else { + dsym_switch = "" + } + + if (_save_unstripped_output) { + _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped" + } + + if (toolchain_has_rust) { + if (!defined(rust_compiler_prefix)) { + rust_compiler_prefix = "" + } + rustc_bin = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir) + rustc = "$rust_compiler_prefix${rustc_bin}" + rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir) + rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py") + + tool("rust_staticlib") { + libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$libname.rsp" + depfile = "$libname.d" + + default_output_extension = ".a" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(STATICLIB) {{output}}" + outputs = [ libname ] + + # TODO(danakj): When `!toolchain_uses_lld` do we need to specify a path + # to libtool like the "alink" rule? + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"$_cxx\" $rustc_common_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_rlib") { + # We must always prefix with `lib` even if the library already starts + # with that prefix or else our stdlib is unable to find libc.rlib (or + # actually liblibc.rlib). + rlibname = + "{{output_dir}}/lib{{target_output_name}}{{output_extension}}" + depfile = "$rlibname.d" + + # Do not use rsp files in this (common) case because they occupy the + # ninja main thread, and {{rlibs}} have shorter command lines than + # fully linked targets. + + default_output_extension = ".rlib" + + # This is prefixed unconditionally in `rlibname`. + # output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST {{output}}" + outputs = [ rlibname ] + + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"$_cxx\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rlibname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_bin") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$exename.rsp" + depfile = "$exename.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # TODO(danakj): solink can generate TOC files for re-exporting library + # symbols, and we should do the same here. + + default_output_dir = "{{root_out_dir}}" + description = "RUST(BIN) {{output}}" + outputs = [ exename ] + + # TODO(danakj): Support dsym_switch like C++ targets. + # link_command += dsym_switch + # if (_enable_dsyms) { + # outputs += dsym_output + # } + # if (_save_unstripped_output) { + # outputs += [ _unstripped_output ] + # } + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_cdylib") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # TODO(danakj): solink can generate TOC files for re-exporting library + # symbols, and we should do the same here. + + default_output_extension = ".dylib" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(CDYLIB) {{output}}" + outputs = [ dllname ] + + # TODO(danakj): Support dsym_switch like C++ targets. + # link_command += dsym_switch + # if (_enable_dsyms) { + # outputs += dsym_output + # } + # if (_save_unstripped_output) { + # outputs += [ _unstripped_output ] + # } + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_macro") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # TODO(danakj): solink can generate TOC files for re-exporting library + # symbols, and we should do the same here. + + default_output_extension = ".dylib" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(MACRO) {{output}}" + outputs = [ dllname ] + + # TODO(danakj): Support dsym_switch like C++ targets. + # link_command += dsym_switch + # if (_enable_dsyms) { + # outputs += dsym_output + # } + # if (_save_unstripped_output) { + # outputs += [ _unstripped_output ] + # } + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${_cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + } + + tool("cc") { + depfile = "{{output}}.d" + precompiled_header_type = "gcc" + command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CC {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("cxx") { + depfile = "{{output}}.d" + precompiled_header_type = "gcc" + command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CXX {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("asm") { + # For GCC we can just use the C compiler to compile assembly. + depfile = "{{output}}.d" + command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "ASM {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("objc") { + depfile = "{{output}}.d" + precompiled_header_type = "gcc" + command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{framework_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "OBJC {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("objcxx") { + depfile = "{{output}}.d" + precompiled_header_type = "gcc" + command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{framework_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "OBJCXX {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("alink") { + rspfile = "{{output}}.rsp" + rspfile_content = "{{inputs}}" + + if (!toolchain_uses_lld) { + script = rebase_path("//build/toolchain/apple/filter_libtool.py", + root_build_dir) + + # Specify explicit path for libtool. + libtool = invoker.bin_path + "libtool" + command = "rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} $python_path $script $libtool -static -D {{arflags}} -o {{output}} @$rspfile" + description = "LIBTOOL-STATIC {{output}}" + } else { + ar = "${prefix}llvm-ar" + command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} @$rspfile" + + # Remove the output file first so that ar doesn't try to modify the + # existing file. + command = "rm -f {{output}} && $command" + description = "AR {{output}}" + } + outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ] + default_output_dir = "{{target_out_dir}}" + default_output_extension = ".a" + output_prefix = "lib" + } + + tool("solink") { + # E.g. "./libfoo.dylib": + dylib = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = dylib + ".rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + # These variables are not built into GN but are helpers that implement + # (1) linking to produce a .dylib, (2) extracting the symbols from that + # file to a temporary file, (3) if the temporary file has differences from + # the existing .TOC file, overwrite it, otherwise, don't change it. + # + # As a special case, if the library reexports symbols from other dynamic + # libraries, we always update the .TOC and skip the temporary file and + # diffing steps, since that library always needs to be re-linked. + tocname = dylib + ".TOC" + temporary_tocname = dylib + ".tmp" + + # Use explicit paths to binaries. The binaries present on the default + # search path in /usr/bin are thin wrappers around xcrun, which requires a + # full CommandLineTools or Xcode install, and still may not choose the + # appropriate binary if there are multiple installs. + if (host_os == "mac") { + nm = invoker.bin_path + "nm" + otool = invoker.bin_path + "otool" + } else { + nm = "${prefix}llvm-nm" + otool = "${prefix}llvm-otool" + } + + does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || $otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB" + + link_command = "$linker_driver $ld -shared " + if (toolchain_is_component_build) { + link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" " + } + link_command += dsym_switch + link_command += "{{ldflags}} -o \"$dylib\" \"@$rspfile\"" + + replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\"" + extract_toc_command = "{ $otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; $nm -gPp \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }" + + command = "if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi" + + rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" + + description = "SOLINK {{output}}" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_dir = "{{root_out_dir}}" + default_output_extension = ".dylib" + + output_prefix = "lib" + + # Since the above commands only updates the .TOC file when it changes, ask + # Ninja to check if the timestamp actually changed to know if downstream + # dependencies should be recompiled. + restat = true + + # Tell GN about the output files. It will link to the dylib but use the + # tocname for dependency management. + outputs = [ + dylib, + tocname, + ] + link_output = dylib + depend_output = tocname + + if (_enable_dsyms) { + outputs += dsym_output + } + if (_save_unstripped_output) { + outputs += [ _unstripped_output ] + } + } + + tool("solink_module") { + # E.g. "./libfoo.so": + sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = sofile + ".rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + link_command = + "$linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" \"@$rspfile\"" + link_command += dsym_switch + command = link_command + + rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" + + description = "SOLINK_MODULE {{output}}" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_dir = "{{root_out_dir}}" + default_output_extension = ".so" + + outputs = [ sofile ] + + if (_enable_dsyms) { + outputs += dsym_output + } + if (_save_unstripped_output) { + outputs += [ _unstripped_output ] + } + } + + tool("link") { + outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$outfile.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" \"@$rspfile\"" + description = "LINK $outfile" + rspfile_content = "{{inputs}} {{frameworks}} {{swiftmodules}} {{solibs}} {{libs}} {{rlibs}}" + outputs = [ outfile ] + + if (_enable_dsyms) { + outputs += dsym_output + } + if (_save_unstripped_output) { + outputs += [ _unstripped_output ] + } + + default_output_dir = "{{root_out_dir}}" + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + } + tool("copy") { + command = copy_command + description = copy_description + } + + tool("copy_bundle_data") { + # copy_command use hardlink if possible but this does not work with + # directories. Also when running EG2 tests from Xcode, Xcode tries to + # copy some files into the application bundle which fails if source + # and destination are hardlinked together. + # + # Instead use clonefile to copy the files which is as efficient as + # hardlink but ensure the file have distinct metadata (thus avoid the + # error with ditto, see https://crbug.com/1042182). + if (host_os == "mac") { + command = "rm -rf {{output}} && /bin/cp -Rc {{source}} {{output}}" + } else { + command = "rm -rf {{output}} && /bin/cp -Rld {{source}} {{output}}" + } + description = "COPY_BUNDLE_DATA {{source}} {{output}}" + pool = "//build/toolchain/apple:bundle_pool($default_toolchain)" + } + + # Swift is only used on iOS, not macOS. We want to minimize the number + # of Xcode-based tools used by the macOS toolchain, so we intentionally + # disallow future uses of Swift on macOS. https://crbug.com/965663. + if (toolchain_args.current_os == "ios") { + tool("swift") { + _tool = rebase_path("//build/toolchain/ios/swiftc.py", root_build_dir) + + depfile = "{{target_out_dir}}/{{module_name}}.d" + depsformat = "gcc" + + outputs = [ + # The module needs to be the first output listed. The blank line after + # the module is required to prevent `gn format` from changing the file + # order. + "{{target_gen_dir}}/{{module_name}}.swiftmodule", + + "{{target_gen_dir}}/{{target_output_name}}.h", + "{{target_gen_dir}}/{{module_name}}.swiftdoc", + "{{target_gen_dir}}/{{module_name}}.swiftsourceinfo", + ] + + # Additional flags passed to the wrapper script but that are only + # set conditionally. + _extra_flags = "" + + if (swift_whole_module_optimization) { + _extra_flags += " -whole-module-optimization" + _objects_dir = "{{target_out_dir}}" + + outputs += [ "$_objects_dir/{{module_name}}.o" ] + } else { + _objects_dir = "{{target_out_dir}}/{{label_name}}" + + partial_outputs = [ "$_objects_dir/{{source_name_part}}.o" ] + } + + _env_vars = "TOOL_VERSION=${tool_versions.swiftc}" + if (invoker.sdk_developer_dir != "") { + _env_vars += " DEVELOPER_DIR=${toolchain_args.sdk_developer_dir}" + } + + # Starting with version 5.6, the Swift compiler will always + # generates precompiled headers. In anterior version, it was + # used when bridging headers and whole module optimisation + # where enabled, and it could be disabled with the parameter + # `-disable-bridging-pch`. + # + # The precompiled headers are binary files (i.e. they are not + # regular Objective-C header files and cannot be loaded as such). + # + # There is an hidden requirements that the compiler needs to + # be told where to save those .pch files (via the parameter + # `-pch-output-dir $dir`). If this parameter is not passed, the + # compiler will silently write them at an incorrect location, + # leading later pass to try to load those .pch files as either + # regular header files (.h) or object files (.o) and causing + # to compilation failures. + # + # List the directory where the precompiled header is generated + # as an output, but do not list the .pch file itself. This is + # because the names includes two hashes (one corresponding to + # the compiler revision, and the other probably derived from + # the module itself) that are difficult to generate. + # + # Still we want to avoid creating a directory that has the same + # name as a file generated by another rule, so explicitly list + # the directory in `outputs` so that gn can warn it is conflicts + # with another output file. + + _pch_output_dir = "{{target_out_dir}}/{{module_name}}:pch/" + outputs += [ _pch_output_dir ] + + # Include the version of the compiler on the command-line. This causes + # `ninja` to consider all the compilation output to be dirty when the + # version changes. + if (defined(swiftc_version)) { + _extra_flags += " -swiftc-version $swiftc_version" + } + + # Include the version of Xcode on the command-line (if specified via + # toolchain_args). This causes `ninja` to consider all the compilation + # outputs to be dirty when the version change. + # + # This is required because sometimes module dependency changes between + # different version of Xcode (e.g. when moving from Xcode 14 beta 6 to + # Xcode 14 RC). If the swiftmodule are not rebuilt when the version + # changes, they may encode dependency on now non-existing frameworks + # causing linker failures ultimately. + if (defined(toolchain_args.xcode_build)) { + _extra_flags += " -xcode-version ${toolchain_args.xcode_build}" + } + + if (swift_toolchain_path != "") { + _extra_flags += " -swift-toolchain-path " + + rebase_path(swift_toolchain_path, root_build_dir) + } + + # The Swift compiler assumes that the generated header will be used by + # Objective-C code compiled with module support enabled (-fmodules). + # + # The import looks like this in the generated header: + # + # #if __has_feature(modules) + # @import UIKit; + # #endif + # + # As Chromium code is compiled without support for modules (i.e. the + # code is compiled without `-fmodules`), the dependent modules are not + # imported from the generated header, which causes compilation failure + # if the client code does not first import the required modules (see + # https://crbug.com/1316061 for details). + # + # Secondly, clang ToT always returns `1` when `__has_features(modules)` + # is evaluated, even if building with `-fno-modules` when building with + # `-std=c++20` (see https://crbug.com/1284275 for details). This causes + # the `@import` lines to be reached and the build to fail (since the + # support for modules is not enabled). + # + # Instruct swiftc.py to rewrite the generated header to use the old + # import pre-processor instructions (#import ) to work + # around those two issues. + _extra_flags += " -fix-module-imports" + + command = + "$_env_vars $python_path $_tool -module-name {{module_name}} " + + "-root-dir " + rebase_path("//", root_build_dir) + " " + + "-object-dir $_objects_dir -pch-output-dir $_pch_output_dir " + + "-module-path {{target_gen_dir}}/{{module_name}}.swiftmodule " + + "-header-path {{target_gen_dir}}/{{target_output_name}}.h " + + "-depfile {{target_out_dir}}/{{module_name}}.d " + + "-bridge-header {{bridge_header}} $_extra_flags " + + "{{swiftflags}} {{include_dirs}} {{module_dirs}} {{inputs}}" + } + } + + # xcassets are only used on iOS, not macOS. We want to minimize the number + # of Xcode-based tools used by the macOS toolchain, so we intentionally + # disallow future uses of xcassets on macOS. https://crbug.com/965663. + if (toolchain_args.current_os == "ios") { + tool("compile_xcassets") { + _tool = rebase_path("//build/toolchain/ios/compile_xcassets.py", + root_build_dir) + + _env_vars = "TOOL_VERSION=${tool_versions.compile_xcassets}" + if (invoker.sdk_developer_dir != "") { + _env_vars += " DEVELOPER_DIR=${toolchain_args.sdk_developer_dir}" + } + + command = + "$_env_vars $python_path $_tool " + + "-p '${toolchain_args.current_os}' " + + "-e '${invoker.target_environment}' " + + "-t '${invoker.deployment_target}' " + + "-T '{{bundle_product_type}}' " + + "-P '{{bundle_partial_info_plist}}' " + "-o {{output}} {{inputs}}" + + description = "COMPILE_XCASSETS {{output}}" + pool = "//build/toolchain/apple:bundle_pool($default_toolchain)" + } + } + + tool("action") { + pool = "//build/toolchain:action_pool($default_toolchain)" + } + } +} + +# Makes a single Apple toolchain, or possibly two if we need a +# sanitizer-free equivalent. +template("apple_toolchain") { + single_apple_toolchain(target_name) { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "visibility", + "test_only", + ]) + + # No need to forward visibility and test_only as they apply to targets not + # toolchains, but presubmit checks require that we explicitly exclude them + } + + if (using_sanitizer) { + # Make an additional toolchain with no sanitizers. + single_apple_toolchain("${target_name}_no_sanitizers") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "toolchain_args", + "visibility", + "test_only", + ]) + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker.toolchain_args, "*") + toolchain_disables_sanitizers = true + } + } + } +} diff --git a/toolchain/cc_wrapper.gni b/toolchain/cc_wrapper.gni new file mode 100644 index 000000000000..d70fa7f23427 --- /dev/null +++ b/toolchain/cc_wrapper.gni @@ -0,0 +1,43 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") + +# Defines the configuration of cc wrapper +# ccache: a c/c++ compiler cache which can greatly reduce recompilation times. +# icecc, distcc: it takes compile jobs from a build and distributes them among +# remote machines allowing a parallel build. +# +# TIPS +# +# 1) ccache +# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since +# these versions don't support -Xclang. (3.1.10 and later will silently +# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins +# or not). +# +# Use ccache 3.2 or later to avoid clang unused argument warnings: +# https://bugzilla.samba.org/show_bug.cgi?id=8118 +# +# To avoid -Wparentheses-equality clang warnings, at some cost in terms of +# speed, you can do: +# export CCACHE_CPP2=yes +# +# 2) icecc +# Set clang_use_chrome_plugins=false because icecc cannot distribute custom +# clang libraries. +# +# To use icecc and ccache together, set cc_wrapper = "ccache" with +# export CCACHE_PREFIX=icecc + +declare_args() { + # Set to "ccache", "icecc" or "distcc". Probably doesn't work on windows. + cc_wrapper = "" +} + +assert(!use_goma || cc_wrapper == "", + "use_goma and cc_wrapper can not be used together.") +assert(!use_remoteexec || cc_wrapper == "", + "use_remoteexec and cc_wrapper can not be used together.") diff --git a/toolchain/clang_code_coverage_wrapper.py b/toolchain/clang_code_coverage_wrapper.py new file mode 100755 index 000000000000..5c9090114013 --- /dev/null +++ b/toolchain/clang_code_coverage_wrapper.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Removes code coverage flags from invocations of the Clang C/C++ compiler. + +If the GN arg `use_clang_coverage=true`, this script will be invoked by default. +GN will add coverage instrumentation flags to almost all source files. + +This script is used to remove instrumentation flags from a subset of the source +files. By default, it will not remove flags from any files. If the option +--files-to-instrument is passed, this script will remove flags from all files +except the ones listed in --files-to-instrument. + +This script also contains hard-coded exclusion lists of files to never +instrument, indexed by target operating system. Files in these lists have their +flags removed in both modes. The OS can be selected with --target-os. + +This script also contains hard-coded force lists of files to always instrument, +indexed by target operating system. Files in these lists never have their flags +removed in either mode. The OS can be selected with --target-os. + +The order of precedence is: force list, exclusion list, --files-to-instrument. + +The path to the coverage instrumentation input file should be relative to the +root build directory, and the file consists of multiple lines where each line +represents a path to a source file, and the specified paths must be relative to +the root build directory. e.g. ../../base/task/post_task.cc for build +directory 'out/Release'. The paths should be written using OS-native path +separators for the current platform. + +One caveat with this compiler wrapper is that it may introduce unexpected +behaviors in incremental builds when the file path to the coverage +instrumentation input file changes between consecutive runs, so callers of this +script are strongly advised to always use the same path such as +"${root_build_dir}/coverage_instrumentation_input.txt". + +It's worth noting on try job builders, if the contents of the instrumentation +file changes so that a file doesn't need to be instrumented any longer, it will +be recompiled automatically because if try job B runs after try job A, the files +that were instrumented in A will be updated (i.e., reverted to the checked in +version) in B, and so they'll be considered out of date by ninja and recompiled. + +Example usage: + clang_code_coverage_wrapper.py \\ + --files-to-instrument=coverage_instrumentation_input.txt +""" + + +import argparse +import os +import subprocess +import sys + +# Flags used to enable coverage instrumentation. +# Flags should be listed in the same order that they are added in +# build/config/coverage/BUILD.gn +_COVERAGE_FLAGS = [ + '-fprofile-instr-generate', + '-fcoverage-mapping', + # Following experimental flags remove unused header functions from the + # coverage mapping data embedded in the test binaries, and the reduction + # of binary size enables building Chrome's large unit test targets on + # MacOS. Please refer to crbug.com/796290 for more details. + '-mllvm', + '-limited-coverage-experimental=true', +] + +# Files that should not be built with coverage flags by default. +_DEFAULT_COVERAGE_EXCLUSION_LIST = [ + # TODO(crbug.com/1051561): angle_unittests affected by coverage. + '../../base/message_loop/message_pump_default.cc', + '../../base/message_loop/message_pump_libevent.cc', + '../../base/message_loop/message_pump_win.cc', + '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc', #pylint: disable=line-too-long +] + +# Map of exclusion lists indexed by target OS. +# If no target OS is defined, or one is defined that doesn't have a specific +# entry, use _DEFAULT_COVERAGE_EXCLUSION_LIST. +_COVERAGE_EXCLUSION_LIST_MAP = { + 'android': [ + # This file caused webview native library failed on arm64. + '../../device/gamepad/dualshock4_controller.cc', + ], + 'fuchsia': [ + # TODO(crbug.com/1174725): These files caused clang to crash while + # compiling them. + '../../base/allocator/partition_allocator/pcscan.cc', + '../../third_party/skia/src/core/SkOpts.cpp', + '../../third_party/skia/src/opts/SkOpts_hsw.cpp', + '../../third_party/skia/third_party/skcms/skcms.cc', + ], + 'linux': [ + # These files caused a static initializer to be generated, which + # shouldn't. + # TODO(crbug.com/990948): Remove when the bug is fixed. + '../../chrome/browser/media/router/providers/cast/cast_internal_message_util.cc', #pylint: disable=line-too-long + '../../components/media_router/common/providers/cast/channel/cast_channel_enum.cc', #pylint: disable=line-too-long + '../../components/media_router/common/providers/cast/channel/cast_message_util.cc', #pylint: disable=line-too-long + '../../components/media_router/common/providers/cast/cast_media_source.cc', #pylint: disable=line-too-long + '../../ui/events/keycodes/dom/keycode_converter.cc', + # TODO(crbug.com/1051561): angle_unittests affected by coverage. + '../../base/message_loop/message_pump_default.cc', + '../../base/message_loop/message_pump_libevent.cc', + '../../base/message_loop/message_pump_win.cc', + '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc', #pylint: disable=line-too-long + ], + 'chromeos': [ + # These files caused clang to crash while compiling them. They are + # excluded pending an investigation into the underlying compiler bug. + '../../third_party/webrtc/p2p/base/p2p_transport_channel.cc', + '../../third_party/icu/source/common/uts46.cpp', + '../../third_party/icu/source/common/ucnvmbcs.cpp', + '../../base/android/android_image_reader_compat.cc', + # TODO(crbug.com/1051561): angle_unittests affected by coverage. + '../../base/message_loop/message_pump_default.cc', + '../../base/message_loop/message_pump_libevent.cc', + '../../base/message_loop/message_pump_win.cc', + '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc', #pylint: disable=line-too-long + ], + 'win': [ + # TODO(crbug.com/1051561): angle_unittests affected by coverage. + '../../base/message_loop/message_pump_default.cc', + '../../base/message_loop/message_pump_libevent.cc', + '../../base/message_loop/message_pump_win.cc', + '../../base/task/sequence_manager/thread_controller_with_message_pump_impl.cc', #pylint: disable=line-too-long + ], +} + +# Map of force lists indexed by target OS. +_COVERAGE_FORCE_LIST_MAP = { + # clang_profiling.cc refers to the symbol `__llvm_profile_dump` from the + # profiling runtime. In a partial coverage build, it is possible for a + # binary to include clang_profiling.cc but have no instrumented files, thus + # causing an unresolved symbol error because the profiling runtime will not + # be linked in. Therefore we force coverage for this file to ensure that + # any target that includes it will also get the profiling runtime. + 'win': [r'..\..\base\test\clang_profiling.cc'], + # TODO(crbug.com/1141727) We're seeing runtime LLVM errors in mac-rel when + # no files are changed, so we suspect that this is similar to the other + # problem with clang_profiling.cc on Windows. The TODO here is to force + # coverage for this specific file on ALL platforms, if it turns out to fix + # this issue on Mac as well. It's the only file that directly calls + # `__llvm_profile_dump` so it warrants some special treatment. + 'mac': ['../../base/test/clang_profiling.cc'], +} + + +def _remove_flags_from_command(command): + # We need to remove the coverage flags for this file, but we only want to + # remove them if we see the exact sequence defined in _COVERAGE_FLAGS. + # That ensures that we only remove the flags added by GN when + # "use_clang_coverage" is true. Otherwise, we would remove flags set by + # other parts of the build system. + start_flag = _COVERAGE_FLAGS[0] + num_flags = len(_COVERAGE_FLAGS) + start_idx = 0 + try: + while True: + idx = command.index(start_flag, start_idx) + if command[idx:idx + num_flags] == _COVERAGE_FLAGS: + del command[idx:idx + num_flags] + # There can be multiple sets of _COVERAGE_FLAGS. All of these need to be + # removed. + start_idx = idx + else: + start_idx = idx + 1 + except ValueError: + pass + + +def main(): + arg_parser = argparse.ArgumentParser() + arg_parser.usage = __doc__ + arg_parser.add_argument( + '--files-to-instrument', + type=str, + help='Path to a file that contains a list of file names to instrument.') + arg_parser.add_argument( + '--target-os', required=False, help='The OS to compile for.') + arg_parser.add_argument('args', nargs=argparse.REMAINDER) + parsed_args = arg_parser.parse_args() + + if (parsed_args.files_to_instrument and + not os.path.isfile(parsed_args.files_to_instrument)): + raise Exception('Path to the coverage instrumentation file: "%s" doesn\'t ' + 'exist.' % parsed_args.files_to_instrument) + + compile_command = parsed_args.args + if not any('clang' in s for s in compile_command): + return subprocess.call(compile_command) + + target_os = parsed_args.target_os + + try: + # The command is assumed to use Clang as the compiler, and the path to the + # source file is behind the -c argument, and the path to the source path is + # relative to the root build directory. For example: + # clang++ -fvisibility=hidden -c ../../base/files/file_path.cc -o \ + # obj/base/base/file_path.o + # On Windows, clang-cl.exe uses /c instead of -c. + source_flag = '/c' if target_os == 'win' else '-c' + source_flag_index = compile_command.index(source_flag) + except ValueError: + print('%s argument is not found in the compile command.' % source_flag) + raise + + if source_flag_index + 1 >= len(compile_command): + raise Exception('Source file to be compiled is missing from the command.') + + # On Windows, filesystem paths should use '\', but GN creates build commands + # that use '/'. We invoke os.path.normpath to ensure that the path uses the + # correct separator for the current platform (i.e. '\' on Windows and '/' + # otherwise). + compile_source_file = os.path.normpath(compile_command[source_flag_index + 1]) + extension = os.path.splitext(compile_source_file)[1] + if not extension in ['.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.S']: + raise Exception('Invalid source file %s found' % compile_source_file) + exclusion_list = _COVERAGE_EXCLUSION_LIST_MAP.get( + target_os, _DEFAULT_COVERAGE_EXCLUSION_LIST) + force_list = _COVERAGE_FORCE_LIST_MAP.get(target_os, []) + + should_remove_flags = False + if compile_source_file not in force_list: + if compile_source_file in exclusion_list: + should_remove_flags = True + elif parsed_args.files_to_instrument: + with open(parsed_args.files_to_instrument) as f: + if compile_source_file not in f.read(): + should_remove_flags = True + + if should_remove_flags: + _remove_flags_from_command(compile_command) + + return subprocess.call(compile_command) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/toolchain/concurrent_links.gni b/toolchain/concurrent_links.gni new file mode 100644 index 000000000000..e3590228e347 --- /dev/null +++ b/toolchain/concurrent_links.gni @@ -0,0 +1,117 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file should only be imported from files that define toolchains. +# There's no way to enforce this exactly, but all toolchains are processed +# in the context of the default_toolchain, so we can at least check for that. +assert(current_toolchain == default_toolchain) + +import("//build/config/android/config.gni") +import("//build/config/apple/symbols.gni") +import("//build/config/chromeos/ui_mode.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/coverage/coverage.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/toolchain/toolchain.gni") + +declare_args() { + # Limit the number of concurrent links; we often want to run fewer + # links at once than we do compiles, because linking is memory-intensive. + # The default to use varies by platform and by the amount of memory + # available, so we call out to a script to get the right value. + concurrent_links = -1 +} + +if (concurrent_links == -1) { + if (use_thin_lto) { + _args = [ "--reserve_mem_gb=10" ] + if (use_goma_thin_lto) { + _args += [ "--thin-lto=goma" ] + } else { + _args += [ "--thin-lto=local" ] + } + if (is_win) { + # Based on measurements of linking chrome.dll and chrome_child.dll, plus + # a little padding to account for future growth. + _args += [ "--mem_per_link_gb=45" ] + } else { + _args += [ "--mem_per_link_gb=20" ] + } + } else if ((use_clang_coverage && + # When coverage_instrumentation_input_file is not empty it means + # we're only instrumenting changed files and not using a lot of + # memory. Likewise, when it's empty we're building everything with + # coverage, which requires more memory. + coverage_instrumentation_input_file == "") || + use_sanitizer_coverage || use_fuzzing_engine) { + # Full sanitizer coverage instrumentation increases linker memory consumption + # significantly. + _args = [ "--mem_per_link_gb=16" ] + } else if (is_win && symbol_level == 1 && !is_debug && is_component_build) { + _args = [ "--mem_per_link_gb=3" ] + } else if (is_win) { + _args = [ "--mem_per_link_gb=6" ] + } else if (is_mac) { + if (enable_dsyms) { + _args = [ "--mem_per_link_gb=12" ] + } else { + _args = [ "--mem_per_link_gb=4" ] + } + } else if (is_android && !is_component_build && symbol_level == 2) { + # Full debug symbols require large memory for link. + _args = [ "--mem_per_link_gb=25" ] + } else if (is_android && !is_debug && !using_sanitizer && symbol_level < 2) { + if (symbol_level == 1) { + _args = [ "--mem_per_link_gb=6" ] + } else { + _args = [ "--mem_per_link_gb=4" ] + } + } else if ((is_linux || is_chromeos_lacros) && symbol_level == 0) { + # Memory consumption on link without debug symbols is low on linux. + _args = [ "--mem_per_link_gb=3" ] + } else if (current_os == "zos") { + _args = [ "--mem_per_link_gb=1" ] + } else if (is_fuchsia) { + # TODO(crbug.com/1347159): This was defaulting to 8GB. The number of + # linker instances to run in parallel is calculated by diviging + # the available memory by this value. On a 32GB machine with + # roughly 29GB of available memory, this would cause three instances + # to run. This started running out of memory and thrashing. This change + # addresses that issue to get the SDk rollers running again but + # could be optimized (maybe to 12GB or for different configs like + # component build). + _args = [ "--mem_per_link_gb=16" ] + } else { + _args = [] + } + + # For Android builds, we also need to be wary of: + # * ProGuard / R8 + # * Android Lint + # These both have a peak usage of < 2GB, but that is still large enough for + # them to need to use a pool since they both typically happen at the + # same time as linking. + if (is_android) { + _args += [ "--secondary_mem_per_link=2" ] + } + + # TODO(crbug.com/617429) Pass more build configuration info to the script + # so that we can compute better values. + _command_dict = exec_script("get_concurrent_links.py", _args, "scope") + + concurrent_links = _command_dict.primary_pool_size + concurrent_links_logs = _command_dict.explanation + + if (_command_dict.secondary_pool_size >= concurrent_links) { + # Have R8 / Lint share the link pool unless we would safely get more + # concurrency out of using a separate one. + # On low-RAM machines, this allows an apk's native library to link at the + # same time as its java is optimized with R8. + java_cmd_pool_size = _command_dict.secondary_pool_size + } +} else { + assert(!use_thin_lto, "can't explicitly set concurrent_links with thinlto") + concurrent_links_logs = + [ "concurrent_links set by GN arg (value=$concurrent_links)" ] +} diff --git a/toolchain/cros/BUILD.gn b/toolchain/cros/BUILD.gn new file mode 100644 index 000000000000..c815e1ab2844 --- /dev/null +++ b/toolchain/cros/BUILD.gn @@ -0,0 +1,305 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/compiler/compiler.gni") +import("//build/config/sysroot.gni") +import("//build/toolchain/cros_toolchain.gni") +import("//build/toolchain/gcc_toolchain.gni") + +declare_args() { + # If set, build lacros with Chromium's toolchain instead of with Chrome OS's. + # TODO(thakis): Set this to `= chromeos_is_browser_only` once that works. + lacros_use_chromium_toolchain = false +} + +# This is mostly identical to gcc_toolchain, but handles relativizing toolchain +# paths. This is needed for CrOS since these paths often change based on the +# environment. For example, cxx is a relative path picked up on $PATH in the +# chroot. But in Simple Chrome, cxx is a system-absolute path. +template("cros_toolchain") { + if (lacros_use_chromium_toolchain) { + clang_toolchain(target_name) { + forward_variables_from(invoker, "*") + } + } else { + gcc_toolchain(target_name) { + forward_variables_from(invoker, "*") + + toolchain_args.cc_wrapper = "" + toolchain_args.clang_use_chrome_plugins = false + + # CrOS's target toolchain wrapper prefers to invoke gomacc itself, so pass + # it the gomacc path via cmd-line arg. Otherwise, for both CrOS's host + # wrapper (used in the ebuild) and Chrome's clang (used in Simple Chrome), + # prepend gomacc like normal. + if (use_goma && toolchain_args.needs_gomacc_path_arg) { + extra_cppflags += " --gomacc-path $goma_dir/gomacc" + } + if (use_remoteexec && toolchain_args.needs_gomacc_path_arg) { + extra_cppflags += "--rewrapper-path $rbe_cros_cc_wrapper --rewrapper-cfg ${rbe_cc_cfg_file}" + } + + # Relativize path if compiler is specified such that not to lookup from + # $PATH and cc/cxx does not contain additional flags. + if (cc != get_path_info(cc, "file") && + string_replace(cc, " ", "") == cc) { + cc = rebase_path(cc, root_build_dir) + } + if (cxx != get_path_info(cxx, "file") && + string_replace(cxx, " ", "") == cxx) { + cxx = rebase_path(cxx, root_build_dir) + } + if (ar != get_path_info(ar, "file") && + string_replace(ar, " ", "") == ar) { + ar = rebase_path(ar, root_build_dir) + } + if (ld != get_path_info(ld, "file") && + string_replace(ld, " ", "") == ld) { + ld = rebase_path(ld, root_build_dir) + } + } + } +} + +# This is the normal toolchain for most targets. +cros_toolchain("target") { + toolchain_args = { + current_cpu = target_cpu + current_os = "chromeos" + sysroot = target_sysroot + } + + if (!lacros_use_chromium_toolchain) { + ar = cros_target_ar + cc = cros_target_cc + cxx = cros_target_cxx + ld = cros_target_ld + + if (cros_target_nm != "") { + nm = cros_target_nm + } + if (cros_target_readelf != "") { + readelf = cros_target_readelf + } + extra_cflags = cros_target_extra_cflags + extra_cppflags = cros_target_extra_cppflags + extra_cxxflags = cros_target_extra_cxxflags + extra_ldflags = cros_target_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = true + } +} + +# This is a special toolchain needed just for the nacl_bootstrap target in +# //native_client/src/trusted/service_runtime/linux. It is identical +# to ":target" except that it forces use_debug_fission, use_gold, and +# use_sysroot off, and allows the user to set different sets of extra flags. +cros_toolchain("nacl_bootstrap") { + toolchain_args = { + if (target_cpu == "arm64") { + current_cpu = "arm" + } else { + current_cpu = target_cpu + } + current_os = "chromeos" + use_debug_fission = false + use_gold = false + use_sysroot = false + } + + if (!lacros_use_chromium_toolchain) { + ar = cros_target_ar + cc = cros_target_cc + cxx = cros_target_cxx + ld = cros_target_ld + + if (cros_target_nm != "") { + nm = cros_target_nm + } + if (cros_target_readelf != "") { + readelf = cros_target_readelf + } + extra_cflags = cros_nacl_bootstrap_extra_cflags + extra_cppflags = cros_nacl_bootstrap_extra_cppflags + extra_cxxflags = cros_nacl_bootstrap_extra_cxxflags + extra_ldflags = cros_nacl_bootstrap_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = true + } + + # We build for ARM32, even when the rest of the build targets ARM64. + if (target_cpu == "arm64") { + ar = cros_nacl_helper_arm32_ar + cc = cros_nacl_helper_arm32_cc + cxx = cros_nacl_helper_arm32_cxx + ld = cros_nacl_helper_arm32_ld + # Avoid accidental use of Arm64 sysroot because of SYSROOT + # env variable set in ChromeOS builds. + toolchain_args.sysroot = cros_nacl_helper_arm32_sysroot + } +} + +# This is a special toolchain needed just for the nacl_helper target for +# building an Arm32 nacl_helper binary on Arm64 ChromeOS targets. +cros_toolchain("nacl_helper_arm32") { + toolchain_args = { + current_cpu = "arm" + current_os = "chromeos" + use_debug_fission = false + use_gold = false + sysroot = cros_nacl_helper_arm32_sysroot + + # Disable some uses of libraries that this build does not require. The + # sysroot for this build does not provide them, and they would be pulled in + # by indirect dependencies of nacl_helper otherwise. + use_cras = false + use_nss_certs = false + use_system_libdrm = false + use_system_libsync = false + } + ar = cros_nacl_helper_arm32_ar + cc = cros_nacl_helper_arm32_cc + cxx = cros_nacl_helper_arm32_cxx + ld = cros_nacl_helper_arm32_ld + readelf = cros_nacl_helper_arm32_readelf + + extra_cflags = "" + extra_cppflags = "" + extra_cxxflags = "" + extra_ldflags = "" + + if (!lacros_use_chromium_toolchain) { + toolchain_args.needs_gomacc_path_arg = true + } +} + +cros_toolchain("host") { + toolchain_args = { + current_cpu = host_cpu + current_os = "linux" + sysroot = cros_host_sysroot + } + + if (!lacros_use_chromium_toolchain) { + # These are args for the template. + ar = cros_host_ar + cc = cros_host_cc + cxx = cros_host_cxx + ld = cros_host_ld + + if (cros_host_nm != "") { + nm = cros_host_nm + } + if (cros_host_readelf != "") { + readelf = cros_host_readelf + } + extra_cflags = cros_host_extra_cflags + extra_cppflags = cros_host_extra_cppflags + extra_cxxflags = cros_host_extra_cxxflags + extra_ldflags = cros_host_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = false + } +} + +cros_toolchain("v8_snapshot") { + toolchain_args = { + if (target_cpu == "x86" || target_cpu == "arm" || target_cpu == "mipsel") { + current_cpu = "x86" + } else { + current_cpu = "x64" + } + v8_current_cpu = v8_target_cpu + current_os = "linux" + sysroot = cros_v8_snapshot_sysroot + } + + if (!lacros_use_chromium_toolchain) { + # These are args for the template. + ar = cros_v8_snapshot_ar + cc = cros_v8_snapshot_cc + cxx = cros_v8_snapshot_cxx + ld = cros_v8_snapshot_ld + + if (cros_v8_snapshot_nm != "") { + nm = cros_v8_snapshot_nm + } + if (cros_v8_snapshot_readelf != "") { + readelf = cros_v8_snapshot_readelf + } + extra_cflags = cros_v8_snapshot_extra_cflags + extra_cppflags = cros_v8_snapshot_extra_cppflags + extra_cxxflags = cros_v8_snapshot_extra_cxxflags + extra_ldflags = cros_v8_snapshot_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = false + } +} + +# This toolchain is used when we want to build Lacros using alternate toolchain. +# To use this, you need to set gn arg 'also_build_lacros_chrome_for_architecture'. +# See build/config/chromeos/ui_mode.gni +if (also_build_lacros_chrome_for_architecture != "") { + cros_toolchain("lacros_clang") { + if (also_build_lacros_chrome_for_architecture == "amd64") { + lacros_args = + read_file("//build/args/chromeos/amd64-generic-crostoolchain.gni", + "scope") + } else if (also_build_lacros_chrome_for_architecture == "arm") { + lacros_args = + read_file("//build/args/chromeos/arm-generic-crostoolchain.gni", + "scope") + } else { + assert(false, + "also_build_lacros_chrome_for_architecture is not " + + "one of the supported architectures.") + } + + toolchain_args = { + forward_variables_from(lacros_args, "*") + + # TODO(crbug.com/1298821) Change to a better way to set gn args. + # The following gn args are present in ash config like + # //build/args/chromeos/atlas.gni but not in + # //build/args/chromeos/amd64-generic-crostoolchain.gni. + # So we need to reset them to the default value where Lacros needs. + # Starts from here. + ozone_auto_platforms = true + ozone_platform = "" + ozone_platform_gbm = -1 + ozone_platform_headless = false + + # Ends here. + + current_os = "chromeos" + target_os = "chromeos" + current_cpu = current_cpu + also_build_lacros_chrome_for_architecture = "" + chromeos_is_browser_only = true + use_clang_coverage = false + } + if (!lacros_use_chromium_toolchain) { + # These are args for the template. + ar = lacros_args.cros_target_ar + cc = lacros_args.cros_target_cc + cxx = lacros_args.cros_target_cxx + ld = lacros_args.cros_target_ld + + if (defined(lacros_args.cros_target_nm) && + lacros_args.cros_target_nm != "") { + nm = lacros_args.cros_target_nm + } + if (defined(lacros_args.cros_target_readelf) && + lacros_args.cros_target_readelf != "") { + readelf = lacros_args.cros_target_readelf + } + extra_cflags = lacros_args.cros_target_extra_cflags + extra_cppflags = lacros_args.cros_target_extra_cppflags + extra_cxxflags = lacros_args.cros_target_extra_cxxflags + extra_ldflags = lacros_args.cros_target_extra_ldflags + + toolchain_args.needs_gomacc_path_arg = true + } + } +} diff --git a/toolchain/cros_toolchain.gni b/toolchain/cros_toolchain.gni new file mode 100644 index 000000000000..a2696bd3f4b6 --- /dev/null +++ b/toolchain/cros_toolchain.gni @@ -0,0 +1,92 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# CrOS builds must cross-compile on a Linux host for the actual CrOS +# device target. There are many different CrOS devices so the build +# system provides configuration variables that permit a CrOS build to +# control the cross-compilation tool chain. However, requiring such +# fine-grain specification is tedious for build-bots and developers. +# Consequently, the CrOS build system defaults to a convenience +# compilation mode where the compilation host is also the build target. +# +# Chrome can be compiled in this way with the gn variable: +# +# target_os = "chromeos" +# +# To perform a board-specific build, first obtain the correct system +# root (http://goo.gl/aFB4XH) for the board. Then configure GN to use it +# by setting appropriate cross-compilation variables. +# +# For example, to compile a Chrome source tree in /g/src for an +# auron_paine CrOS device with the system root cached in /g/.cros_cache, +# the following GN arguments must be provided to configure +# cross-compilation with Goma acceleration. (NB: additional variables +# will be necessary to successfully compile a working CrOS Chrome. See +# the definition of GYP_DEFINES inside a sysroot shell.) +# +# goma_dir = "/g/.cros_cache/common/goma+2" +# target_sysroot= /g/.cros_cache/chrome-sdk/tarballs/auron_paine+7644.0.0+sysroot_chromeos-base_chromeos-chrome.tar.xz" +# cros_target_cc = "x86_64-cros-linux-gnu-gcc -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold" +# cros_target_cxx = "x86_64-cros-linux-gnu-g++ -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold" +# cros_target_ar = "x86_64-cros-linux-gnu-gcc-ar" +# target_cpu = "x64" + +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") + +declare_args() { + # These must be specified for a board-specific build. + cros_target_ar = "ar" + cros_target_cc = "gcc" + cros_target_cxx = "g++" + cros_target_nm = "" + cros_target_readelf = "" + + # These can be optionally set. The "_cppflags" will be applied to *both* + # C and C++ files; use "_cxxflags" for C++-only flags. + cros_target_extra_cflags = "" + cros_target_extra_cppflags = "" + cros_target_extra_cxxflags = "" + cros_target_extra_ldflags = "" + + cros_host_ar = "${clang_base_path}/bin/llvm-ar" + cros_host_cc = "${clang_base_path}/bin/clang" + cros_host_cxx = "${clang_base_path}/bin/clang++" + cros_host_nm = "" + cros_host_readelf = "" + cros_host_extra_cflags = "" + cros_host_extra_cppflags = "" + cros_host_extra_cxxflags = "" + cros_host_extra_ldflags = "" + cros_host_sysroot = "" + + cros_v8_snapshot_ar = "${clang_base_path}/bin/llvm-ar" + cros_v8_snapshot_cc = "${clang_base_path}/bin/clang" + cros_v8_snapshot_cxx = "${clang_base_path}/bin/clang++" + cros_v8_snapshot_nm = "" + cros_v8_snapshot_readelf = "" + cros_v8_snapshot_extra_cflags = "" + cros_v8_snapshot_extra_cppflags = "" + cros_v8_snapshot_extra_cxxflags = "" + cros_v8_snapshot_extra_ldflags = "" + cros_v8_snapshot_sysroot = "" + + cros_nacl_bootstrap_extra_cflags = "" + cros_nacl_bootstrap_extra_cppflags = "" + cros_nacl_bootstrap_extra_cxxflags = "" + cros_nacl_bootstrap_extra_ldflags = "" + + cros_nacl_helper_arm32_ar = "ar" + cros_nacl_helper_arm32_cc = "gcc" + cros_nacl_helper_arm32_cxx = "g++" + cros_nacl_helper_arm32_readelf = "" + cros_nacl_helper_arm32_sysroot = "" +} + +declare_args() { + cros_target_ld = cros_target_cxx + cros_host_ld = cros_host_cxx + cros_v8_snapshot_ld = cros_v8_snapshot_cxx + cros_nacl_helper_arm32_ld = cros_nacl_helper_arm32_cxx +} diff --git a/toolchain/fuchsia/BUILD.gn b/toolchain/fuchsia/BUILD.gn new file mode 100644 index 000000000000..63504ea70a00 --- /dev/null +++ b/toolchain/fuchsia/BUILD.gn @@ -0,0 +1,38 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/gcc_toolchain.gni") + +# Fuchsia builds using the Clang toolchain, with most parameters common across +# the different target architectures. +template("fuchsia_clang_toolchain") { + clang_toolchain(target_name) { + assert(host_os == "linux" || host_os == "mac") + assert(defined(invoker.toolchain_args), + "toolchain_args must be defined for fuchsia_clang_toolchain()") + + # While we want use stripped binaries on the device, we need to retain the + # unstripped binaries in runtime_deps to make them available for the test + # isolates to enable symbolizing on bots. + strip = rebase_path("${clang_base_path}/bin/llvm-strip", root_build_dir) + use_unstripped_as_runtime_outputs = true + + default_shlib_subdir = "/lib" + + toolchain_args = invoker.toolchain_args + toolchain_args.current_os = "fuchsia" + } +} + +fuchsia_clang_toolchain("x64") { + toolchain_args = { + current_cpu = "x64" + } +} + +fuchsia_clang_toolchain("arm64") { + toolchain_args = { + current_cpu = "arm64" + } +} diff --git a/toolchain/fuchsia/DIR_METADATA b/toolchain/fuchsia/DIR_METADATA new file mode 100644 index 000000000000..210aa6a954b8 --- /dev/null +++ b/toolchain/fuchsia/DIR_METADATA @@ -0,0 +1 @@ +mixins: "//build/fuchsia/COMMON_METADATA" diff --git a/toolchain/fuchsia/OWNERS b/toolchain/fuchsia/OWNERS new file mode 100644 index 000000000000..e7034eabb1e9 --- /dev/null +++ b/toolchain/fuchsia/OWNERS @@ -0,0 +1 @@ +file://build/fuchsia/OWNERS diff --git a/toolchain/gcc_link_wrapper.py b/toolchain/gcc_link_wrapper.py new file mode 100755 index 000000000000..5c08a7e4a00c --- /dev/null +++ b/toolchain/gcc_link_wrapper.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs a linking command and optionally a strip command. + +This script exists to avoid using complex shell commands in +gcc_toolchain.gni's tool("link"), in case the host running the compiler +does not have a POSIX-like shell (e.g. Windows). +""" + +import argparse +import os +import subprocess +import sys + +import wrapper_utils + + +# When running on a Windows host and using a toolchain whose tools are +# actually wrapper scripts (i.e. .bat files on Windows) rather than binary +# executables, the "command" to run has to be prefixed with this magic. +# The GN toolchain definitions take care of that for when GN/Ninja is +# running the tool directly. When that command is passed in to this +# script, it appears as a unitary string but needs to be split up so that +# just 'cmd' is the actual command given to Python's subprocess module. +BAT_PREFIX = 'cmd /c call ' + +def CommandToRun(command): + if command[0].startswith(BAT_PREFIX): + command = command[0].split(None, 3) + command[1:] + return command + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--strip', + help='The strip binary to run', + metavar='PATH') + parser.add_argument('--unstripped-file', + help='Executable file produced by linking command', + metavar='FILE') + parser.add_argument('--map-file', + help=('Use --Wl,-Map to generate a map file. Will be ' + 'gzipped if extension ends with .gz'), + metavar='FILE') + parser.add_argument('--dwp', help=('The dwp binary to run'), metavar='FILE') + parser.add_argument('--output', + required=True, + help='Final output executable file', + metavar='FILE') + parser.add_argument('command', nargs='+', + help='Linking command') + args = parser.parse_args() + + # Work-around for gold being slow-by-default. http://crbug.com/632230 + fast_env = dict(os.environ) + fast_env['LC_ALL'] = 'C' + result = wrapper_utils.RunLinkWithOptionalMapFile(args.command, env=fast_env, + map_file=args.map_file) + if result != 0: + return result + + # If dwp is set, then package debug info for this exe. + dwp_proc = None + if args.dwp: + exe_file = args.output + if args.unstripped_file: + exe_file = args.unstripped_file + # Suppress warnings about duplicate CU entries (https://crbug.com/1264130) + dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun( + [args.dwp, '-e', exe_file, '-o', exe_file + '.dwp']), + stderr=subprocess.DEVNULL) + + # Finally, strip the linked executable (if desired). + if args.strip: + result = subprocess.call( + CommandToRun([args.strip, '-o', args.output, args.unstripped_file])) + + if dwp_proc: + dwp_result = dwp_proc.wait() + if dwp_result != 0: + sys.stderr.write('dwp failed with error code {}\n'.format(dwp_result)) + return dwp_result + + return result + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/toolchain/gcc_solink_wrapper.py b/toolchain/gcc_solink_wrapper.py new file mode 100755 index 000000000000..03ef042618f5 --- /dev/null +++ b/toolchain/gcc_solink_wrapper.py @@ -0,0 +1,211 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs 'ld -shared' and generates a .TOC file that's untouched when unchanged. + +This script exists to avoid using complex shell commands in +gcc_toolchain.gni's tool("solink"), in case the host running the compiler +does not have a POSIX-like shell (e.g. Windows). +""" + +import argparse +import os +import shlex +import subprocess +import sys + +import wrapper_utils + + +def CollectSONAME(args): + """Replaces: readelf -d $sofile | grep SONAME""" + # TODO(crbug.com/1259067): Come up with a way to get this info without having + # to bundle readelf in the toolchain package. + toc = '' + readelf = subprocess.Popen(wrapper_utils.CommandToRun( + [args.readelf, '-d', args.sofile]), + stdout=subprocess.PIPE, + bufsize=-1, + universal_newlines=True) + for line in readelf.stdout: + if 'SONAME' in line: + toc += line + return readelf.wait(), toc + + +def CollectDynSym(args): + """Replaces: nm --format=posix -g -D -p $sofile | cut -f1-2 -d' '""" + toc = '' + nm = subprocess.Popen(wrapper_utils.CommandToRun( + [args.nm, '--format=posix', '-g', '-D', '-p', args.sofile]), + stdout=subprocess.PIPE, + bufsize=-1, + universal_newlines=True) + for line in nm.stdout: + toc += ' '.join(line.split(' ', 2)[:2]) + '\n' + return nm.wait(), toc + + +def CollectTOC(args): + result, toc = CollectSONAME(args) + if result == 0: + result, dynsym = CollectDynSym(args) + toc += dynsym + return result, toc + + +def UpdateTOC(tocfile, toc): + if os.path.exists(tocfile): + old_toc = open(tocfile, 'r').read() + else: + old_toc = None + if toc != old_toc: + open(tocfile, 'w').write(toc) + + +def CollectInputs(out, args): + for x in args: + if x.startswith('@'): + with open(x[1:]) as rsp: + CollectInputs(out, shlex.split(rsp.read())) + elif not x.startswith('-') and (x.endswith('.o') or x.endswith('.a')): + out.write(x) + out.write('\n') + + +def InterceptFlag(flag, command): + ret = flag in command + if ret: + command.remove(flag) + return ret + + +def SafeDelete(path): + try: + os.unlink(path) + except OSError: + pass + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('--readelf', + required=True, + help='The readelf binary to run', + metavar='PATH') + parser.add_argument('--nm', + required=True, + help='The nm binary to run', + metavar='PATH') + parser.add_argument('--strip', + help='The strip binary to run', + metavar='PATH') + parser.add_argument('--dwp', help='The dwp binary to run', metavar='PATH') + parser.add_argument('--sofile', + required=True, + help='Shared object file produced by linking command', + metavar='FILE') + parser.add_argument('--tocfile', + required=True, + help='Output table-of-contents file', + metavar='FILE') + parser.add_argument('--map-file', + help=('Use --Wl,-Map to generate a map file. Will be ' + 'gzipped if extension ends with .gz'), + metavar='FILE') + parser.add_argument('--output', + required=True, + help='Final output shared object file', + metavar='FILE') + parser.add_argument('command', nargs='+', + help='Linking command') + args = parser.parse_args() + + # Work-around for gold being slow-by-default. http://crbug.com/632230 + fast_env = dict(os.environ) + fast_env['LC_ALL'] = 'C' + + # Extract flags passed through ldflags but meant for this script. + # https://crbug.com/954311 tracks finding a better way to plumb these. + partitioned_library = InterceptFlag('--partitioned-library', args.command) + collect_inputs_only = InterceptFlag('--collect-inputs-only', args.command) + + # Partitioned .so libraries are used only for splitting apart in a subsequent + # step. + # + # - The TOC file optimization isn't useful, because the partition libraries + # must always be re-extracted if the combined library changes (and nothing + # should be depending on the combined library's dynamic symbol table). + # - Stripping isn't necessary, because the combined library is not used in + # production or published. + # + # Both of these operations could still be done, they're needless work, and + # tools would need to be updated to handle and/or not complain about + # partitioned libraries. Instead, to keep Ninja happy, simply create dummy + # files for the TOC and stripped lib. + if collect_inputs_only or partitioned_library: + open(args.output, 'w').close() + open(args.tocfile, 'w').close() + + # Instead of linking, records all inputs to a file. This is used by + # enable_resource_allowlist_generation in order to avoid needing to + # link (which is slow) to build the resources allowlist. + if collect_inputs_only: + if args.map_file: + open(args.map_file, 'w').close() + if args.dwp: + open(args.sofile + '.dwp', 'w').close() + + with open(args.sofile, 'w') as f: + CollectInputs(f, args.command) + return 0 + + # First, run the actual link. + command = wrapper_utils.CommandToRun(args.command) + result = wrapper_utils.RunLinkWithOptionalMapFile(command, + env=fast_env, + map_file=args.map_file) + + if result != 0: + return result + + # If dwp is set, then package debug info for this SO. + dwp_proc = None + if args.dwp: + # Explicit delete to account for symlinks (when toggling between + # debug/release). + SafeDelete(args.sofile + '.dwp') + # Suppress warnings about duplicate CU entries (https://crbug.com/1264130) + dwp_proc = subprocess.Popen(wrapper_utils.CommandToRun( + [args.dwp, '-e', args.sofile, '-o', args.sofile + '.dwp']), + stderr=subprocess.DEVNULL) + + if not partitioned_library: + # Next, generate the contents of the TOC file. + result, toc = CollectTOC(args) + if result != 0: + return result + + # If there is an existing TOC file with identical contents, leave it alone. + # Otherwise, write out the TOC file. + UpdateTOC(args.tocfile, toc) + + # Finally, strip the linked shared object file (if desired). + if args.strip: + result = subprocess.call( + wrapper_utils.CommandToRun( + [args.strip, '-o', args.output, args.sofile])) + + if dwp_proc: + dwp_result = dwp_proc.wait() + if dwp_result != 0: + sys.stderr.write('dwp failed with error code {}\n'.format(dwp_result)) + return dwp_result + + return result + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/toolchain/gcc_toolchain.gni b/toolchain/gcc_toolchain.gni new file mode 100644 index 000000000000..ad994319cd6f --- /dev/null +++ b/toolchain/gcc_toolchain.gni @@ -0,0 +1,897 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/coverage/coverage.gni") +import("//build/config/rust.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/v8_target_cpu.gni") +import("//build/toolchain/cc_wrapper.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") + +if (is_nacl) { + # To keep NaCl variables out of builds that don't include NaCl, all + # variables defined in nacl/config.gni referenced here should be protected by + # is_nacl conditions. + import("//build/config/nacl/config.gni") +} + +declare_args() { + # Enables allowlist generation for IDR_ grit defines seen by the compiler. + # Currently works only on some platforms and enabled by default for official + # builds. Requires debug info. + enable_resource_allowlist_generation = + is_official_build && + # Don't enable for Android-on-Chrome OS. + (target_os == "android" || target_os == "win") + + # Use -MD instead of -MMD for compiler commands. This is useful for tracking + # the comprehensive set of dependencies. + system_headers_in_deps = false +} + +# When the arg is set via args.gn, it applies to all toolchains. In order to not +# hit the assert in grit_rule.gni, explicitly disable for host toolchains. +if ((is_linux || is_chromeos) && target_os == "android") { + enable_resource_allowlist_generation = false +} + +# Ensure enable_resource_allowlist_generation is enabled only when it will work. +if (enable_resource_allowlist_generation) { + assert( + !strip_debug_info, + "enable_resource_allowlist_generation=true requires strip_debug_info=false") + assert( + !is_component_build, + "enable_resource_allowlist_generation=true requires is_component_build=false") + assert( + target_os == "android" || target_os == "win", + "enable_resource_allowlist_generation=true does not work for target_os=$target_os") +} + +# This template defines a toolchain for something that works like gcc +# (including clang). +# +# It requires the following variables specifying the executables to run: +# - ar +# - cc +# - cxx +# - ld +# +# Optional parameters that control the tools: +# +# - extra_cflags +# Extra flags to be appended when compiling C files (but not C++ files). +# - extra_cppflags +# Extra flags to be appended when compiling both C and C++ files. "CPP" +# stands for "C PreProcessor" in this context, although it can be +# used for non-preprocessor flags as well. Not to be confused with +# "CXX" (which follows). +# - extra_cxxflags +# Extra flags to be appended when compiling C++ files (but not C files). +# - extra_asmflags +# Extra flags to be appended when compiling assembly. +# - extra_ldflags +# Extra flags to be appended when linking +# +# - link_outputs +# The content of this array, if specified, will be added to the list of +# outputs from the link command. This can be useful in conjunction with +# the post_link parameter. +# - use_unstripped_as_runtime_outputs +# When |strip| is set, mark unstripped executables as runtime deps rather +# than stripped ones. +# - post_link +# The content of this string, if specified, will be run as a separate +# command following the the link command. +# - deps +# Just forwarded to the toolchain definition. +# - executable_extension +# If this string is specified it will be used for the file extension +# for an executable, rather than using no extension; targets will +# still be able to override the extension using the output_extension +# variable. +# - rebuild_define +# The contents of this string, if specified, will be passed as a #define +# to the toolchain. It can be used to force recompiles whenever a +# toolchain is updated. +# - shlib_extension +# If this string is specified it will be used for the file extension +# for a shared library, rather than default value specified in +# toolchain.gni +# - strip +# Location of the strip executable. When specified, strip will be run on +# all shared libraries and executables as they are built. The pre-stripped +# artifacts will be put in lib.unstripped/ and exe.unstripped/. +# +# Callers will normally want to invoke "gcc_toolchain" instead, which makes +# a toolchain just like this but may additionally create an extra toolchain +# without sanitizers for host-side tools. +template("single_gcc_toolchain") { + toolchain(target_name) { + assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value") + assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value") + assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value") + assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value") + + # This define changes when the toolchain changes, forcing a rebuild. + # Nothing should ever use this define. + if (defined(invoker.rebuild_define)) { + rebuild_string = "-D" + invoker.rebuild_define + " " + } else { + rebuild_string = "" + } + + # GN's syntax can't handle more than one scope dereference at once, like + # "invoker.toolchain_args.foo", so make a temporary to hold the toolchain + # args so we can do "invoker_toolchain_args.foo". + assert(defined(invoker.toolchain_args), + "Toolchains must specify toolchain_args") + invoker_toolchain_args = invoker.toolchain_args + assert(defined(invoker_toolchain_args.current_cpu), + "toolchain_args must specify a current_cpu") + assert(defined(invoker_toolchain_args.current_os), + "toolchain_args must specify a current_os") + + # When invoking this toolchain not as the default one, these args will be + # passed to the build. They are ignored when this is the default toolchain. + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker_toolchain_args, "*") + + # The host toolchain value computed by the default toolchain's setup + # needs to be passed through unchanged to all secondary toolchains to + # ensure that it's always the same, regardless of the values that may be + # set on those toolchains. + host_toolchain = host_toolchain + + # The same applies to the toolchain we use to build Rust procedural + # macros, which is probably the same but might have sanitizers disabled. + host_toolchain_no_sanitizers = host_toolchain_no_sanitizers + + if (!defined(invoker_toolchain_args.v8_current_cpu)) { + v8_current_cpu = invoker_toolchain_args.current_cpu + } + } + + # When the invoker has explicitly overridden use_remoteexec, use_goma or + # cc_wrapper in the toolchain args, use those values, otherwise default + # to the global one. This works because the only reasonable override + # that toolchains might supply for these values are to force-disable them. + if (defined(toolchain_args.use_remoteexec)) { + toolchain_uses_remoteexec = toolchain_args.use_remoteexec + } else { + toolchain_uses_remoteexec = use_remoteexec + } + if (defined(toolchain_args.use_remoteexec_links)) { + toolchain_uses_remoteexec_links = toolchain_args.use_remoteexec_links + } else { + toolchain_uses_remoteexec_links = use_remoteexec_links + } + if (defined(toolchain_args.use_goma)) { + toolchain_uses_goma = toolchain_args.use_goma + } else { + toolchain_uses_goma = use_goma + } + + # x86_64-nacl-* is ELF-32 and Goma/RBE won't support ELF-32. + if (toolchain_uses_goma && + get_path_info(invoker.cc, "name") == "x86_64-nacl-gcc") { + # it will also disable x86_64-nacl-g++ since these are in + # the same toolchain. + toolchain_uses_goma = false + } + if (defined(toolchain_args.cc_wrapper)) { + toolchain_cc_wrapper = toolchain_args.cc_wrapper + } else { + toolchain_cc_wrapper = cc_wrapper + } + assert(!(toolchain_uses_remoteexec && toolchain_uses_goma), + "Goma and re-client can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec), + "re-client and cc_wrapper can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma), + "Goma and cc_wrapper can't be used together.") + + # When the invoker has explicitly overridden use_goma or cc_wrapper in the + # toolchain args, use those values, otherwise default to the global one. + # This works because the only reasonable override that toolchains might + # supply for these values are to force-disable them. + # But if needs_gomacc_path_arg is set in a Chrome OS build, the toolchain + # wrapper will have picked up gomacc via cmd-line arg. So need to prepend + # gomacc in that case. + goma_path = "$goma_dir/gomacc" + if (toolchain_uses_remoteexec && + (!defined(invoker_toolchain_args.needs_gomacc_path_arg) || + !invoker_toolchain_args.needs_gomacc_path_arg)) { + if (defined(toolchain_args.rbe_cc_cfg_file)) { + toolchain_rbe_cc_cfg_file = toolchain_args.rbe_cc_cfg_file + } else { + toolchain_rbe_cc_cfg_file = rbe_cc_cfg_file + } + + # C/C++ (clang) rewrapper prefix to use when use_remoteexec is true. + compiler_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " + } else if (toolchain_uses_goma && + (!defined(invoker_toolchain_args.needs_gomacc_path_arg) || + !invoker_toolchain_args.needs_gomacc_path_arg)) { + compiler_prefix = "${goma_path} " + if (use_goma_rust) { + rust_compiler_prefix = compiler_prefix + } + } else { + compiler_prefix = "${toolchain_cc_wrapper} " + } + + if (toolchain_uses_remoteexec_links) { + if (defined(toolchain_args.rbe_link_cfg_file)) { + toolchain_rbe_link_cfg_file = toolchain_args.rbe_link_cfg_file + } else { + toolchain_rbe_link_cfg_file = rbe_link_cfg_file + } + link_prefix = "${rbe_bin_dir}/rewrapper -cfg=${toolchain_rbe_link_cfg_file} -exec_root=${rbe_exec_root} " + not_needed([ "goma_path" ]) + } else if (use_goma_thin_lto && toolchain_uses_goma && use_thin_lto) { + # remote_ld.py uses autoninja in an attempt to set a reasonable + # number of jobs, but this results in too low a value on + # Chrome OS builders. So we pass in an explicit value. + link_prefix = + "\"$python_path\" " + + rebase_path("//tools/clang/scripts/remote_ld.py", root_build_dir) + + " --wrapper ${goma_path} --jobs 200 -- " + } else { + link_prefix = "" + not_needed([ "goma_path" ]) + } + + # Create a distinct variable for "asm", since coverage runs pass a bunch of + # flags to clang/clang++ that are nonsensical on assembler runs. + asm_prefix = compiler_prefix + + # A specific toolchain may wish to avoid coverage instrumentation, so we + # allow the global "use_clang_coverage" arg to be overridden. + if (defined(toolchain_args.use_clang_coverage)) { + toolchain_use_clang_coverage = toolchain_args.use_clang_coverage + } else { + toolchain_use_clang_coverage = use_clang_coverage + } + + # For a coverage build, we use the wrapper script globally so that it can + # remove coverage cflags from files that should not have them. + if (toolchain_use_clang_coverage) { + # "coverage_instrumentation_input_file" is set in args.gn, but it can be + # overridden by a toolchain config. + if (defined(toolchain_args.coverage_instrumentation_input_file)) { + toolchain_coverage_instrumentation_input_file = + toolchain_args.coverage_instrumentation_input_file + } else { + toolchain_coverage_instrumentation_input_file = + coverage_instrumentation_input_file + } + + _coverage_wrapper = + rebase_path("//build/toolchain/clang_code_coverage_wrapper.py", + root_build_dir) + + # The wrapper needs to know what OS we target because it uses that to + # select a list of files that should not be instrumented. + _coverage_wrapper = _coverage_wrapper + " --target-os=" + + invoker_toolchain_args.current_os + + # We want to instrument everything if there is no input file set. + # If there is a file we need to give it to the wrapper script so it can + # instrument only those files. + if (toolchain_coverage_instrumentation_input_file != "") { + _coverage_wrapper = + _coverage_wrapper + " --files-to-instrument=" + + rebase_path(toolchain_coverage_instrumentation_input_file, + root_build_dir) + } + compiler_prefix = + "\"$python_path\" ${_coverage_wrapper} " + compiler_prefix + } + + cc = compiler_prefix + invoker.cc + cxx = compiler_prefix + invoker.cxx + asm = asm_prefix + invoker.cc + ar = invoker.ar + ld = link_prefix + invoker.ld + if (defined(invoker.readelf)) { + readelf = invoker.readelf + } else { + readelf = "readelf" + } + if (defined(invoker.nm)) { + nm = invoker.nm + } else { + nm = "nm" + } + if (defined(invoker.dwp)) { + dwp_switch = " --dwp=\"${invoker.dwp}\"" + } else { + dwp_switch = "" + } + + if (defined(invoker.shlib_extension)) { + default_shlib_extension = invoker.shlib_extension + } else { + default_shlib_extension = shlib_extension + } + + if (defined(invoker.default_shlib_subdir)) { + default_shlib_subdir = invoker.default_shlib_subdir + } else { + default_shlib_subdir = "" + } + + if (defined(invoker.executable_extension)) { + default_executable_extension = invoker.executable_extension + } else { + default_executable_extension = "" + } + + # Bring these into our scope for string interpolation with default values. + if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") { + extra_cflags = " " + invoker.extra_cflags + } else { + extra_cflags = "" + } + + if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") { + extra_cppflags = " " + invoker.extra_cppflags + } else { + extra_cppflags = "" + } + + if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") { + extra_cxxflags = " " + invoker.extra_cxxflags + } else { + extra_cxxflags = "" + } + + if (defined(invoker.extra_asmflags) && invoker.extra_asmflags != "") { + extra_asmflags = " " + invoker.extra_asmflags + } else { + extra_asmflags = "" + } + + if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") { + extra_ldflags = " " + invoker.extra_ldflags + } else { + extra_ldflags = "" + } + + if (system_headers_in_deps) { + md = "-MD" + } else { + md = "-MMD" + } + + enable_linker_map = defined(invoker.enable_linker_map) && + invoker.enable_linker_map && generate_linker_map + + # These library switches can apply to all tools below. + lib_switch = "-l" + lib_dir_switch = "-L" + + # Object files go in this directory. + object_subdir = "{{target_out_dir}}/{{label_name}}" + + tool("cc") { + depfile = "{{output}}.d" + precompiled_header_type = "gcc" + command = "$cc $md -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CC {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("cxx") { + depfile = "{{output}}.d" + precompiled_header_type = "gcc" + command = "$cxx $md -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CXX {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("asm") { + # For GCC we can just use the C compiler to compile assembly. + depfile = "{{output}}.d" + command = "$asm $md -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "ASM {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("alink") { + if (current_os == "aix") { + # AIX does not support either -D (deterministic output) or response + # files. + command = "$ar -X64 {{arflags}} -r -c -s {{output}} {{inputs}}" + } else { + rspfile = "{{output}}.rsp" + rspfile_content = "{{inputs}}" + command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} @\"$rspfile\"" + } + + # Remove the output file first so that ar doesn't try to modify the + # existing file. + if (host_os == "win") { + tool_wrapper_path = + rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir) + command = "cmd /s /c \"\"$python_path\" $tool_wrapper_path delete-file {{output}} && $command\"" + } else { + command = "rm -f {{output}} && $command" + } + + # Almost all targets build with //build/config/compiler:thin_archive which + # adds -T to arflags. + description = "AR {{output}}" + outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ] + + # Shared libraries go in the target out directory by default so we can + # generate different targets with the same name and not have them collide. + default_output_dir = "{{target_out_dir}}" + default_output_extension = ".a" + output_prefix = "lib" + } + + tool("solink") { + soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". + sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir. + rspfile = sofile + ".rsp" + + pool = "//build/toolchain:link_pool($default_toolchain)" + + if (defined(invoker.strip)) { + unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname" + } else { + unstripped_sofile = sofile + } + + # These variables are not built into GN but are helpers that + # implement (1) linking to produce a .so, (2) extracting the symbols + # from that file (3) if the extracted list differs from the existing + # .TOC file, overwrite it, otherwise, don't change it. + tocfile = sofile + ".TOC" + + soname_flag = "" + if (current_os != "aix") { + # -soname flag is not available on aix ld + soname_flag = "-Wl,-soname=\"$soname\"" + } + link_command = "$ld -shared $soname_flag {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\" {{rlibs}}" + + # Generate a map file to be used for binary size analysis. + # Map file adds ~10% to the link time on a z620. + # With target_os="android", libchrome.so.map.gz is ~20MB. + map_switch = "" + if (enable_linker_map) { + map_file = "$unstripped_sofile.map.gz" + map_switch = " --map-file \"$map_file\"" + } + + assert(defined(readelf), "to solink you must have a readelf") + assert(defined(nm), "to solink you must have an nm") + strip_switch = "" + if (defined(invoker.strip)) { + strip_switch = "--strip=${invoker.strip} " + } + + # This needs a Python script to avoid using a complex shell command + # requiring sh control structures, pipelines, and POSIX utilities. + # The host might not have a POSIX shell and utilities (e.g. Windows). + solink_wrapper = + rebase_path("//build/toolchain/gcc_solink_wrapper.py", root_build_dir) + solink_extra_flags = "" + if (current_os == "aix") { + # to be intercepted by solink_wrapper, so that we exit immediately + # after linking the shared object, without generating the TOC file + # (skipped on Aix) + solink_extra_flags = "--partitioned-library" + } + command = "\"$python_path\" \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch$dwp_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\" -- $link_command $solink_extra_flags" + + if (target_cpu == "mipsel" && is_component_build && is_android) { + rspfile_content = "-Wl,--start-group -Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}} -Wl,--end-group" + } else if (current_os == "aix") { + # --whole-archive, --no-whole-archive flags are not available on the aix + # ld. + rspfile_content = "{{inputs}} {{solibs}} {{libs}}" + } else { + rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}" + } + + description = "SOLINK $sofile" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_extension = default_shlib_extension + + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + + output_prefix = "lib" + + # Since the above commands only updates the .TOC file when it changes, ask + # Ninja to check if the timestamp actually changed to know if downstream + # dependencies should be recompiled. + restat = true + + # Tell GN about the output files. It will link to the sofile but use the + # tocfile for dependency management. + outputs = [ + sofile, + tocfile, + ] + if (sofile != unstripped_sofile) { + outputs += [ unstripped_sofile ] + if (defined(invoker.use_unstripped_as_runtime_outputs) && + invoker.use_unstripped_as_runtime_outputs) { + runtime_outputs = [ unstripped_sofile ] + } + } + + # Clank build will generate DWP files when Fission is used. + # Other builds generate DWP files outside of the gn link targets, if at + # all. + if (defined(invoker.dwp)) { + outputs += [ unstripped_sofile + ".dwp" ] + if (defined(invoker.use_unstripped_as_runtime_outputs) && + invoker.use_unstripped_as_runtime_outputs) { + runtime_outputs += [ unstripped_sofile + ".dwp" ] + } + } + if (defined(map_file)) { + outputs += [ map_file ] + } + link_output = sofile + depend_output = tocfile + } + + tool("solink_module") { + soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". + sofile = "{{output_dir}}/$soname" + rspfile = sofile + ".rsp" + + pool = "//build/toolchain:link_pool($default_toolchain)" + + if (defined(invoker.strip)) { + unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname" + } else { + unstripped_sofile = sofile + } + + soname_flag = "" + whole_archive_flag = "" + no_whole_archive_flag = "" + if (current_os != "aix") { + # -soname, --whole-archive, --no-whole-archive flags are not available + # on aix ld + soname_flag = "-Wl,-soname=\"$soname\"" + whole_archive_flag = "-Wl,--whole-archive" + no_whole_archive_flag = "-Wl,--no-whole-archive" + } + command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" $soname_flag @\"$rspfile\"" + + if (defined(invoker.strip)) { + strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\"" + command += " && " + strip_command + } + rspfile_content = "$whole_archive_flag {{inputs}} {{solibs}} $no_whole_archive_flag {{libs}} {{rlibs}}" + + description = "SOLINK_MODULE $sofile" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + if (defined(invoker.loadable_module_extension)) { + default_output_extension = invoker.loadable_module_extension + } else { + default_output_extension = default_shlib_extension + } + + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + + output_prefix = "lib" + + outputs = [ sofile ] + if (sofile != unstripped_sofile) { + outputs += [ unstripped_sofile ] + if (defined(invoker.use_unstripped_as_runtime_outputs) && + invoker.use_unstripped_as_runtime_outputs) { + runtime_outputs = [ unstripped_sofile ] + } + } + } + + tool("link") { + exename = "{{target_output_name}}{{output_extension}}" + outfile = "{{output_dir}}/$exename" + rspfile = "$outfile.rsp" + unstripped_outfile = outfile + + pool = "//build/toolchain:link_pool($default_toolchain)" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_extension = default_executable_extension + + default_output_dir = "{{root_out_dir}}" + + if (defined(invoker.strip)) { + unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename" + } + + start_group_flag = "" + end_group_flag = "" + if (current_os != "aix") { + # the "--start-group .. --end-group" feature isn't available on the aix + # ld. + start_group_flag = "-Wl,--start-group" + end_group_flag = "-Wl,--end-group " + } + link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $start_group_flag @\"$rspfile\" {{solibs}} $end_group_flag {{libs}} {{rlibs}}" + + # Generate a map file to be used for binary size analysis. + # Map file adds ~10% to the link time on a z620. + # With target_os="android", libchrome.so.map.gz is ~20MB. + map_switch = "" + if (enable_linker_map) { + map_file = "$unstripped_outfile.map.gz" + map_switch = " --map-file \"$map_file\"" + } + + strip_switch = "" + if (defined(invoker.strip)) { + strip_switch = " --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\"" + } + + link_wrapper = + rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir) + command = "\"$python_path\" \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch$dwp_switch -- $link_command" + + description = "LINK $outfile" + + rspfile_content = "{{inputs}}" + outputs = [ outfile ] + if (outfile != unstripped_outfile) { + outputs += [ unstripped_outfile ] + if (defined(invoker.use_unstripped_as_runtime_outputs) && + invoker.use_unstripped_as_runtime_outputs) { + runtime_outputs = [ unstripped_outfile ] + } + } + + # Clank build will generate DWP files when Fission is used. + # Other builds generate DWP files outside of the gn link targets, if at + # all. + if (defined(invoker.dwp)) { + outputs += [ unstripped_outfile + ".dwp" ] + if (defined(invoker.use_unstripped_as_runtime_outputs) && + invoker.use_unstripped_as_runtime_outputs) { + runtime_outputs += [ unstripped_outfile + ".dwp" ] + } + } + if (defined(invoker.link_outputs)) { + outputs += invoker.link_outputs + } + if (defined(map_file)) { + outputs += [ map_file ] + } + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + } + tool("copy") { + command = copy_command + description = copy_description + } + + tool("action") { + pool = "//build/toolchain:action_pool($default_toolchain)" + } + + if (toolchain_has_rust) { + if (!defined(rust_compiler_prefix)) { + rust_compiler_prefix = "" + } + rustc_bin = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir) + rustc = "$rust_compiler_prefix${rustc_bin}" + rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir) + rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py") + + # RSP manipulation due to https://bugs.chromium.org/p/gn/issues/detail?id=249 + tool("rust_staticlib") { + libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$libname.rsp" + depfile = "$libname.d" + + default_output_extension = ".a" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(STATICLIB) {{output}}" + outputs = [ libname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_rlib") { + # We must always prefix with `lib` even if the library already starts + # with that prefix or else our stdlib is unable to find libc.rlib (or + # actually liblibc.rlib). + rlibname = + "{{output_dir}}/lib{{target_output_name}}{{output_extension}}" + depfile = "$rlibname.d" + + # Do not use rsp files in this (common) case because they occupy the + # ninja main thread, and {{rlibs}} have shorter command lines than + # fully linked targets. + + default_output_extension = ".rlib" + + # This is prefixed unconditionally in `rlibname`. + # output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST {{output}}" + outputs = [ rlibname ] + + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args {{rustdeps}} {{externs}} --emit=dep-info=$depfile,link -o $rlibname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_bin") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + depfile = "$exename.d" + rspfile = "$exename.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = default_executable_extension + default_output_dir = "{{root_out_dir}}" + description = "RUST(BIN) {{output}}" + outputs = [ exename ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_cdylib") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + depfile = "$dllname.d" + rspfile = "$dllname.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = default_shlib_extension + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + description = "RUST(CDYLIB) {{output}}" + outputs = [ dllname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_macro") { + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + depfile = "$dllname.d" + rspfile = "$dllname.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = default_shlib_extension + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + description = "RUST(MACRO) {{output}}" + outputs = [ dllname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- -Clinker=\"${invoker.cxx}\" $rustc_common_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} ${extra_ldflags} RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + } + + forward_variables_from(invoker, + [ + "deps", + "propagates_configs", + ]) + } +} + +# Makes a single GCC toolchain, or possibly two if we need +# an equivalent toolchain without sanitizers. +template("gcc_toolchain") { + single_gcc_toolchain(target_name) { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "visibility", + "test_only", + ]) + + # No need to forward visibility and test_only as they apply to targets not + # toolchains, but presubmit checks require that we explicitly exclude them + } + + if (using_sanitizer) { + # Make an additional toolchain with no sanitizers. + single_gcc_toolchain("${target_name}_no_sanitizers") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "toolchain_args", + "visibility", + "test_only", + ]) + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker.toolchain_args, "*") + toolchain_disables_sanitizers = true + } + } + } +} + +# This is a shorthand for gcc_toolchain instances based on the Chromium-built +# version of Clang. Only the toolchain_cpu and toolchain_os variables need to +# be specified by the invoker, and optionally toolprefix if it's a +# cross-compile case. Note that for a cross-compile case this toolchain +# requires a config to pass the appropriate -target option, or else it will +# actually just be doing a native compile. The invoker can optionally override +# use_gold too. +template("clang_toolchain") { + gcc_toolchain(target_name) { + _path = "$clang_base_path/bin" + _is_path_absolute = get_path_info(_path, "abspath") == _path + + # Preserve absolute paths for tools like distcc. + if (_is_path_absolute && filter_include([ _path ], [ "//*" ]) == []) { + prefix = _path + } else { + prefix = rebase_path(_path, root_build_dir) + } + + cc = "${prefix}/clang" + cxx = "${prefix}/clang++" + ld = cxx + readelf = "${prefix}/llvm-readelf" + ar = "${prefix}/llvm-ar" + nm = "${prefix}/llvm-nm" + + forward_variables_from(invoker, + [ + "strip", + "default_shlib_subdir", + "dwp", + "enable_linker_map", + "loadable_module_extension", + "propagates_configs", + "use_unstripped_as_runtime_outputs", + ]) + + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + is_clang = true + } + } +} diff --git a/toolchain/get_concurrent_links.py b/toolchain/get_concurrent_links.py new file mode 100755 index 000000000000..47f009362d7c --- /dev/null +++ b/toolchain/get_concurrent_links.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script computs the number of concurrent links we want to run in the build +# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP. + +import argparse +import multiprocessing +import os +import re +import subprocess +import sys + +sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..')) +import gn_helpers + + +def _GetTotalMemoryInBytes(): + if sys.platform in ('win32', 'cygwin'): + import ctypes + + class MEMORYSTATUSEX(ctypes.Structure): + _fields_ = [ + ("dwLength", ctypes.c_ulong), + ("dwMemoryLoad", ctypes.c_ulong), + ("ullTotalPhys", ctypes.c_ulonglong), + ("ullAvailPhys", ctypes.c_ulonglong), + ("ullTotalPageFile", ctypes.c_ulonglong), + ("ullAvailPageFile", ctypes.c_ulonglong), + ("ullTotalVirtual", ctypes.c_ulonglong), + ("ullAvailVirtual", ctypes.c_ulonglong), + ("sullAvailExtendedVirtual", ctypes.c_ulonglong), + ] + + stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX)) + ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) + return stat.ullTotalPhys + elif sys.platform.startswith('linux'): + if os.path.exists("/proc/meminfo"): + with open("/proc/meminfo") as meminfo: + memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') + for line in meminfo: + match = memtotal_re.match(line) + if not match: + continue + return float(match.group(1)) * 2**10 + elif sys.platform == 'darwin': + try: + return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) + except Exception: + return 0 + # TODO(scottmg): Implement this for other platforms. + return 0 + + +def _GetDefaultConcurrentLinks(per_link_gb, reserve_gb, thin_lto_type, + secondary_per_link_gb, override_ram_in_gb): + explanation = [] + explanation.append( + 'per_link_gb={} reserve_gb={} secondary_per_link_gb={}'.format( + per_link_gb, reserve_gb, secondary_per_link_gb)) + if override_ram_in_gb: + mem_total_gb = override_ram_in_gb + else: + mem_total_gb = float(_GetTotalMemoryInBytes()) / 2**30 + adjusted_mem_total_gb = max(0, mem_total_gb - reserve_gb) + + # Ensure that there is at least as many links allocated for the secondary as + # there is for the primary. The secondary link usually uses fewer gbs. + mem_cap = int( + max(1, adjusted_mem_total_gb / (per_link_gb + secondary_per_link_gb))) + + try: + cpu_count = multiprocessing.cpu_count() + except: + cpu_count = 1 + + # A local LTO links saturate all cores, but only for some amount of the link. + # Goma LTO runs LTO codegen on goma, only run one of these tasks at once. + cpu_cap = cpu_count + if thin_lto_type is not None: + if thin_lto_type == 'goma': + cpu_cap = 1 + else: + assert thin_lto_type == 'local' + cpu_cap = min(cpu_count, 6) + + explanation.append( + 'cpu_count={} cpu_cap={} mem_total_gb={:.1f}GiB adjusted_mem_total_gb={:.1f}GiB' + .format(cpu_count, cpu_cap, mem_total_gb, adjusted_mem_total_gb)) + + num_links = min(mem_cap, cpu_cap) + if num_links == cpu_cap: + if cpu_cap == cpu_count: + reason = 'cpu_count' + else: + reason = 'cpu_cap (thinlto)' + else: + reason = 'RAM' + + # static link see too many open files if we have many concurrent links. + # ref: http://b/233068481 + if num_links > 30: + num_links = 30 + reason = 'nofile' + + explanation.append('concurrent_links={} (reason: {})'.format( + num_links, reason)) + + # Use remaining RAM for a secondary pool if needed. + if secondary_per_link_gb: + mem_remaining = adjusted_mem_total_gb - num_links * per_link_gb + secondary_size = int(max(0, mem_remaining / secondary_per_link_gb)) + if secondary_size > cpu_count: + secondary_size = cpu_count + reason = 'cpu_count' + else: + reason = 'mem_remaining={:.1f}GiB'.format(mem_remaining) + explanation.append('secondary_size={} (reason: {})'.format( + secondary_size, reason)) + else: + secondary_size = 0 + + return num_links, secondary_size, explanation + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--mem_per_link_gb', type=int, default=8) + parser.add_argument('--reserve_mem_gb', type=int, default=0) + parser.add_argument('--secondary_mem_per_link', type=int, default=0) + parser.add_argument('--override-ram-in-gb-for-testing', type=float, default=0) + parser.add_argument('--thin-lto') + options = parser.parse_args() + + primary_pool_size, secondary_pool_size, explanation = ( + _GetDefaultConcurrentLinks(options.mem_per_link_gb, + options.reserve_mem_gb, options.thin_lto, + options.secondary_mem_per_link, + options.override_ram_in_gb_for_testing)) + if options.override_ram_in_gb_for_testing: + print('primary={} secondary={} explanation={}'.format( + primary_pool_size, secondary_pool_size, explanation)) + else: + sys.stdout.write( + gn_helpers.ToGNString({ + 'primary_pool_size': primary_pool_size, + 'secondary_pool_size': secondary_pool_size, + 'explanation': explanation, + })) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/toolchain/get_cpu_count.py b/toolchain/get_cpu_count.py new file mode 100644 index 000000000000..f7cf9511d707 --- /dev/null +++ b/toolchain/get_cpu_count.py @@ -0,0 +1,22 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script shows cpu count to specify capacity of action pool. + + +import multiprocessing +import sys + +def main(): + try: + cpu_count = multiprocessing.cpu_count() + except: + cpu_count = 1 + + print(cpu_count) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/toolchain/get_goma_dir.py b/toolchain/get_goma_dir.py new file mode 100644 index 000000000000..14c9d5b6c1a9 --- /dev/null +++ b/toolchain/get_goma_dir.py @@ -0,0 +1,42 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This script gets default goma_dir for depot_tools goma. + +import os +import sys + + +def main(): + gomacc = 'gomacc' + candidates = [] + if sys.platform in ['win32', 'cygwin']: + gomacc = 'gomacc.exe' + + for path in os.environ.get('PATH', '').split(os.pathsep): + # normpath() required to strip trailing slash when present. + if os.path.basename(os.path.normpath(path)) == 'depot_tools': + candidates.append(os.path.join(path, '.cipd_bin')) + + for d in candidates: + if os.path.isfile(os.path.join(d, gomacc)): + sys.stdout.write(d) + return 0 + # mb analyze step set use_goma=true, but goma_dir="", + # and bot doesn't have goma in default locataion above. + # to mitigate this, just use initial depot_tools path + # or default path as before (if depot_tools doesn't exist + # in PATH). + # TODO(ukai): crbug.com/1073276: fix mb analyze step and make it hard error? + if sys.platform in ['win32', 'cygwin']: + sys.stdout.write('C:\\src\\goma\\goma-win64') + elif 'GOMA_DIR' in os.environ: + sys.stdout.write(os.environ.get('GOMA_DIR')) + else: + sys.stdout.write(os.path.join(os.environ.get('HOME', ''), 'goma')) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/toolchain/goma.gni b/toolchain/goma.gni new file mode 100644 index 000000000000..9e0e5476ee85 --- /dev/null +++ b/toolchain/goma.gni @@ -0,0 +1,30 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Defines the configuration of Goma. + +declare_args() { + # Set to true to enable distributed compilation using Goma. + use_goma = false + + # This flag is for ChromeOS compiler wrapper. + # By passing gomacc path via cmd-line arg, ChromeOS' compiler wrapper + # invokes gomacc inside it. + needs_gomacc_path_arg = false + + # Absolute directory containing the gomacc binary. + goma_dir = "" +} + +if (use_goma && goma_dir == "") { + goma_dir = exec_script("get_goma_dir.py", [], "string") +} + +declare_args() { + # TODO(crbug.com/726475): true if use_goma = true in the future. + use_java_goma = false +} + +assert(!is_win || !use_goma || is_clang, + "cl.exe does not work on goma, use clang") diff --git a/toolchain/ios/BUILD.gn b/toolchain/ios/BUILD.gn new file mode 100644 index 000000000000..5623a84f5001 --- /dev/null +++ b/toolchain/ios/BUILD.gn @@ -0,0 +1,66 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/ios/ios_sdk.gni") +import("//build/toolchain/apple/toolchain.gni") + +# Specialisation of the apple_toolchain template to declare the toolchain +# and its tools to build target for iOS platform. +template("ios_toolchain") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + + apple_toolchain(target_name) { + forward_variables_from(invoker, "*", [ "toolchain_args" ]) + + sdk_developer_dir = ios_sdk_developer_dir + deployment_target = ios_deployment_target + target_environment = target_environment + bin_path = ios_bin_path + + toolchain_args = { + forward_variables_from(invoker.toolchain_args, "*") + xcode_build = xcode_build + current_os = "ios" + } + } +} + +ios_toolchain("ios_clang_arm64") { + toolchain_args = { + current_cpu = "arm64" + } +} + +ios_toolchain("ios_clang_arm64_13_4") { + toolchain_args = { + current_cpu = "arm64" + ios_deployment_target = "13.4" + } +} + +ios_toolchain("ios_clang_arm") { + toolchain_args = { + current_cpu = "arm" + } +} + +ios_toolchain("ios_clang_x64") { + toolchain_args = { + current_cpu = "x64" + } +} + +ios_toolchain("ios_clang_x64_13_4") { + toolchain_args = { + current_cpu = "x64" + ios_deployment_target = "13.4" + } +} + +ios_toolchain("ios_clang_x86") { + toolchain_args = { + current_cpu = "x86" + } +} diff --git a/toolchain/ios/OWNERS b/toolchain/ios/OWNERS new file mode 100644 index 000000000000..6f3324f07c8f --- /dev/null +++ b/toolchain/ios/OWNERS @@ -0,0 +1 @@ +file://build/apple/OWNERS diff --git a/toolchain/ios/compile_xcassets.py b/toolchain/ios/compile_xcassets.py new file mode 100644 index 000000000000..a62b96a4d7b6 --- /dev/null +++ b/toolchain/ios/compile_xcassets.py @@ -0,0 +1,293 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wrapper around actool to compile assets catalog. + +The script compile_xcassets.py is a wrapper around actool to compile +assets catalog to Assets.car that turns warning into errors. It also +fixes some quirks of actool to make it work from ninja (mostly that +actool seems to require absolute path but gn generates command-line +with relative paths). + +The wrapper filter out any message that is not a section header and +not a warning or error message, and fails if filtered output is not +empty. This should to treat all warnings as error until actool has +an option to fail with non-zero error code when there are warnings. +""" + +import argparse +import os +import re +import shutil +import subprocess +import sys +import tempfile + +# Pattern matching a section header in the output of actool. +SECTION_HEADER = re.compile('^/\\* ([^ ]*) \\*/$') + +# Name of the section containing informational messages that can be ignored. +NOTICE_SECTION = 'com.apple.actool.compilation-results' + +# Map special type of asset catalog to the corresponding command-line +# parameter that need to be passed to actool. +ACTOOL_FLAG_FOR_ASSET_TYPE = { + '.appiconset': '--app-icon', + '.launchimage': '--launch-image', +} + +def FixAbsolutePathInLine(line, relative_paths): + """Fix absolute paths present in |line| to relative paths.""" + absolute_path = line.split(':')[0] + relative_path = relative_paths.get(absolute_path, absolute_path) + if absolute_path == relative_path: + return line + return relative_path + line[len(absolute_path):] + + +def FilterCompilerOutput(compiler_output, relative_paths): + """Filers actool compilation output. + + The compiler output is composed of multiple sections for each different + level of output (error, warning, notices, ...). Each section starts with + the section name on a single line, followed by all the messages from the + section. + + The function filter any lines that are not in com.apple.actool.errors or + com.apple.actool.document.warnings sections (as spurious messages comes + before any section of the output). + + See crbug.com/730054, crbug.com/739163 and crbug.com/770634 for some example + messages that pollute the output of actool and cause flaky builds. + + Args: + compiler_output: string containing the output generated by the + compiler (contains both stdout and stderr) + relative_paths: mapping from absolute to relative paths used to + convert paths in the warning and error messages (unknown paths + will be left unaltered) + + Returns: + The filtered output of the compiler. If the compilation was a + success, then the output will be empty, otherwise it will use + relative path and omit any irrelevant output. + """ + + filtered_output = [] + current_section = None + data_in_section = False + for line in compiler_output.splitlines(): + match = SECTION_HEADER.search(line) + if match is not None: + data_in_section = False + current_section = match.group(1) + continue + if current_section and current_section != NOTICE_SECTION: + if not data_in_section: + data_in_section = True + filtered_output.append('/* %s */\n' % current_section) + + fixed_line = FixAbsolutePathInLine(line, relative_paths) + filtered_output.append(fixed_line + '\n') + + return ''.join(filtered_output) + + +def CompileAssetCatalog(output, platform, target_environment, product_type, + min_deployment_target, inputs, compress_pngs, + partial_info_plist): + """Compile the .xcassets bundles to an asset catalog using actool. + + Args: + output: absolute path to the containing bundle + platform: the targeted platform + product_type: the bundle type + min_deployment_target: minimum deployment target + inputs: list of absolute paths to .xcassets bundles + compress_pngs: whether to enable compression of pngs + partial_info_plist: path to partial Info.plist to generate + """ + command = [ + 'xcrun', + 'actool', + '--output-format=human-readable-text', + '--notices', + '--warnings', + '--errors', + '--minimum-deployment-target', + min_deployment_target, + ] + + if compress_pngs: + command.extend(['--compress-pngs']) + + if product_type != '': + command.extend(['--product-type', product_type]) + + if platform == 'mac': + command.extend([ + '--platform', + 'macosx', + '--target-device', + 'mac', + ]) + elif platform == 'ios': + if target_environment == 'simulator': + command.extend([ + '--platform', + 'iphonesimulator', + '--target-device', + 'iphone', + '--target-device', + 'ipad', + ]) + elif target_environment == 'device': + command.extend([ + '--platform', + 'iphoneos', + '--target-device', + 'iphone', + '--target-device', + 'ipad', + ]) + elif target_environment == 'catalyst': + command.extend([ + '--platform', + 'macosx', + '--target-device', + 'ipad', + '--ui-framework-family', + 'uikit', + ]) + + # Scan the input directories for the presence of asset catalog types that + # require special treatment, and if so, add them to the actool command-line. + for relative_path in inputs: + + if not os.path.isdir(relative_path): + continue + + for file_or_dir_name in os.listdir(relative_path): + if not os.path.isdir(os.path.join(relative_path, file_or_dir_name)): + continue + + asset_name, asset_type = os.path.splitext(file_or_dir_name) + if asset_type not in ACTOOL_FLAG_FOR_ASSET_TYPE: + continue + + command.extend([ACTOOL_FLAG_FOR_ASSET_TYPE[asset_type], asset_name]) + + # Always ask actool to generate a partial Info.plist file. If no path + # has been given by the caller, use a temporary file name. + temporary_file = None + if not partial_info_plist: + temporary_file = tempfile.NamedTemporaryFile(suffix='.plist') + partial_info_plist = temporary_file.name + + command.extend(['--output-partial-info-plist', partial_info_plist]) + + # Dictionary used to convert absolute paths back to their relative form + # in the output of actool. + relative_paths = {} + + # actool crashes if paths are relative, so convert input and output paths + # to absolute paths, and record the relative paths to fix them back when + # filtering the output. + absolute_output = os.path.abspath(output) + relative_paths[output] = absolute_output + relative_paths[os.path.dirname(output)] = os.path.dirname(absolute_output) + command.extend(['--compile', os.path.dirname(os.path.abspath(output))]) + + for relative_path in inputs: + absolute_path = os.path.abspath(relative_path) + relative_paths[absolute_path] = relative_path + command.append(absolute_path) + + try: + # Run actool and redirect stdout and stderr to the same pipe (as actool + # is confused about what should go to stderr/stdout). + process = subprocess.Popen(command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout = process.communicate()[0].decode('utf-8') + + # If the invocation of `actool` failed, copy all the compiler output to + # the standard error stream and exit. See https://crbug.com/1205775 for + # example of compilation that failed with no error message due to filter. + if process.returncode: + for line in stdout.splitlines(): + fixed_line = FixAbsolutePathInLine(line, relative_paths) + sys.stderr.write(fixed_line + '\n') + sys.exit(1) + + # Filter the output to remove all garbage and to fix the paths. If the + # output is not empty after filtering, then report the compilation as a + # failure (as some version of `actool` report error to stdout, yet exit + # with an return code of zero). + stdout = FilterCompilerOutput(stdout, relative_paths) + if stdout: + sys.stderr.write(stdout) + sys.exit(1) + + finally: + if temporary_file: + temporary_file.close() + + +def Main(): + parser = argparse.ArgumentParser( + description='compile assets catalog for a bundle') + parser.add_argument('--platform', + '-p', + required=True, + choices=('mac', 'ios'), + help='target platform for the compiled assets catalog') + parser.add_argument('--target-environment', + '-e', + default='', + choices=('simulator', 'device', 'catalyst'), + help='target environment for the compiled assets catalog') + parser.add_argument( + '--minimum-deployment-target', + '-t', + required=True, + help='minimum deployment target for the compiled assets catalog') + parser.add_argument('--output', + '-o', + required=True, + help='path to the compiled assets catalog') + parser.add_argument('--compress-pngs', + '-c', + action='store_true', + default=False, + help='recompress PNGs while compiling assets catalog') + parser.add_argument('--product-type', + '-T', + help='type of the containing bundle') + parser.add_argument('--partial-info-plist', + '-P', + help='path to partial info plist to create') + parser.add_argument('inputs', + nargs='+', + help='path to input assets catalog sources') + args = parser.parse_args() + + if os.path.basename(args.output) != 'Assets.car': + sys.stderr.write('output should be path to compiled asset catalog, not ' + 'to the containing bundle: %s\n' % (args.output, )) + sys.exit(1) + + if os.path.exists(args.output): + if os.path.isfile(args.output): + os.unlink(args.output) + else: + shutil.rmtree(args.output) + + CompileAssetCatalog(args.output, args.platform, args.target_environment, + args.product_type, args.minimum_deployment_target, + args.inputs, args.compress_pngs, args.partial_info_plist) + + +if __name__ == '__main__': + sys.exit(Main()) diff --git a/toolchain/ios/swiftc.py b/toolchain/ios/swiftc.py new file mode 100644 index 000000000000..e77e0b1ff525 --- /dev/null +++ b/toolchain/ios/swiftc.py @@ -0,0 +1,327 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import subprocess +import sys +import tempfile + + +def fix_module_imports(header_path, output_path): + """Convert modules import to work without -fmodules support. + + The Swift compiler assumes that the generated Objective-C header will be + imported from code compiled with module support enabled (-fmodules). The + generated code thus uses @import and provides no fallback if modules are + not enabled. + + This function converts the generated header to instead use #import. It + assumes that `@import Foo;` can be replaced by `#import `. + + The header is read at `header_path` and written to `output_path`. + """ + + header_contents = [] + with open(header_path, 'r') as header_file: + for line in header_file: + if line == '#if __has_feature(modules)\n': + header_contents.append('#if 1 // #if __has_feature(modules)\n') + nesting_level = 1 + for line in header_file: + if line == '#endif\n': + nesting_level -= 1 + elif line.startswith('@import'): + name = line.split()[1].split(';')[0] + if name != 'ObjectiveC': + header_contents.append(f'#import <{name}/{name}.h> ') + header_contents.append('// ') + elif line.startswith('#if'): + nesting_level += 1 + + header_contents.append(line) + if nesting_level == 0: + break + else: + header_contents.append(line) + + with open(output_path, 'w') as header_file: + for line in header_contents: + header_file.write(line) + + +def compile_module(module, sources, settings, extras, tmpdir): + """Compile `module` from `sources` using `settings`.""" + output_file_map = {} + if settings.whole_module_optimization: + output_file_map[''] = { + 'object': os.path.join(settings.object_dir, module + '.o'), + 'dependencies': os.path.join(tmpdir, module + '.d'), + } + else: + for source in sources: + name, _ = os.path.splitext(os.path.basename(source)) + output_file_map[source] = { + 'object': os.path.join(settings.object_dir, name + '.o'), + 'dependencies': os.path.join(tmpdir, name + '.d'), + } + + for key in ('module_path', 'header_path', 'depfile'): + path = getattr(settings, key) + if os.path.exists(path): + os.unlink(path) + if key == 'module_path': + for ext in '.swiftdoc', '.swiftsourceinfo': + path = os.path.splitext(getattr(settings, key))[0] + ext + if os.path.exists(path): + os.unlink(path) + directory = os.path.dirname(path) + if not os.path.exists(directory): + os.makedirs(directory) + + if not os.path.exists(settings.object_dir): + os.makedirs(settings.object_dir) + + if not os.path.exists(settings.pch_output_dir): + os.makedirs(settings.pch_output_dir) + + for key in output_file_map: + path = output_file_map[key]['object'] + if os.path.exists(path): + os.unlink(path) + + output_file_map.setdefault('', {})['swift-dependencies'] = \ + os.path.join(tmpdir, module + '.swift.d') + + output_file_map_path = os.path.join(tmpdir, module + '.json') + with open(output_file_map_path, 'w') as output_file_map_file: + output_file_map_file.write(json.dumps(output_file_map)) + output_file_map_file.flush() + + extra_args = [] + if settings.file_compilation_dir: + extra_args.extend([ + '-file-compilation-dir', + settings.file_compilation_dir, + ]) + + if settings.bridge_header: + extra_args.extend([ + '-import-objc-header', + os.path.abspath(settings.bridge_header), + ]) + + if settings.whole_module_optimization: + extra_args.append('-whole-module-optimization') + + if settings.target: + extra_args.extend([ + '-target', + settings.target, + ]) + + if settings.sdk: + extra_args.extend([ + '-sdk', + os.path.abspath(settings.sdk), + ]) + + if settings.swift_version: + extra_args.extend([ + '-swift-version', + settings.swift_version, + ]) + + if settings.include_dirs: + for include_dir in settings.include_dirs: + extra_args.append('-I' + include_dir) + + if settings.system_include_dirs: + for system_include_dir in settings.system_include_dirs: + extra_args.extend(['-Xcc', '-isystem', '-Xcc', system_include_dir]) + + if settings.framework_dirs: + for framework_dir in settings.framework_dirs: + extra_args.extend([ + '-F', + framework_dir, + ]) + + if settings.system_framework_dirs: + for system_framework_dir in settings.system_framework_dirs: + extra_args.extend([ + '-F', + system_framework_dir, + ]) + + if settings.enable_cxx_interop: + extra_args.extend([ + '-Xfrontend', + '-enable-cxx-interop', + ]) + + # The swiftc compiler uses a global module cache that is not robust against + # changes in the sub-modules nor against corruption (see crbug.com/1358073). + # Force the compiler to store the module cache in a sub-directory of `tmpdir` + # to ensure a pristine module cache is used for every compiler invocation. + module_cache_path = os.path.join(tmpdir, settings.swiftc_version, + 'ModuleCache') + + # If the generated header is post-processed, generate it to a temporary + # location (to avoid having the file appear to suddenly change). + if settings.fix_module_imports: + header_path = os.path.join(tmpdir, f'{module}.h') + else: + header_path = settings.header_path + + process = subprocess.Popen([ + settings.swift_toolchain_path + '/usr/bin/swiftc', + '-parse-as-library', + '-module-name', + module, + '-module-cache-path', + module_cache_path, + '-emit-object', + '-emit-dependencies', + '-emit-module', + '-emit-module-path', + settings.module_path, + '-emit-objc-header', + '-emit-objc-header-path', + header_path, + '-output-file-map', + output_file_map_path, + '-pch-output-dir', + os.path.abspath(settings.pch_output_dir), + ] + extra_args + extras + sources) + + process.communicate() + if process.returncode: + sys.exit(process.returncode) + + if settings.fix_module_imports: + fix_module_imports(header_path, settings.header_path) + + # The swiftc compiler generates depfile that uses absolute paths, but + # ninja requires paths in depfiles to be identical to paths used in + # the build.ninja files. + # + # Since gn generates paths relative to the build directory for all paths + # below the repository checkout, we need to convert those to relative + # paths. + # + # See https://crbug.com/1287114 for build failure that happen when the + # paths in the depfile are kept absolute. + out_dir = os.getcwd() + os.path.sep + src_dir = os.path.abspath(settings.root_dir) + os.path.sep + + depfile_content = dict() + for key in output_file_map: + + # When whole module optimisation is disabled, there will be an entry + # with an empty string as the key and only ('swift-dependencies') as + # keys in the value dictionary. This is expected, so skip entry that + # do not include 'dependencies' in their keys. + depencency_file_path = output_file_map[key].get('dependencies') + if not depencency_file_path: + continue + + for line in open(depencency_file_path): + output, inputs = line.split(' : ', 2) + _, ext = os.path.splitext(output) + if ext == '.o': + key = output + else: + key = os.path.splitext(settings.module_path)[0] + ext + if key not in depfile_content: + depfile_content[key] = set() + for path in inputs.split(): + if path.startswith(src_dir) or path.startswith(out_dir): + path = os.path.relpath(path, out_dir) + depfile_content[key].add(path) + + with open(settings.depfile, 'w') as depfile: + keys = sorted(depfile_content.keys()) + for key in sorted(keys): + depfile.write('%s : %s\n' % (key, ' '.join(sorted(depfile_content[key])))) + + +def main(args): + parser = argparse.ArgumentParser(add_help=False) + parser.add_argument('-module-name', help='name of the Swift module') + parser.add_argument('-include', + '-I', + action='append', + dest='include_dirs', + help='add directory to header search path') + parser.add_argument('-isystem', + action='append', + dest='system_include_dirs', + help='add directory to system header search path') + parser.add_argument('sources', nargs='+', help='Swift source file to compile') + parser.add_argument('-whole-module-optimization', + action='store_true', + help='enable whole module optimization') + parser.add_argument('-object-dir', + help='path to the generated object files directory') + parser.add_argument('-pch-output-dir', + help='path to directory where .pch files are saved') + parser.add_argument('-module-path', help='path to the generated module file') + parser.add_argument('-header-path', help='path to the generated header file') + parser.add_argument('-bridge-header', + help='path to the Objective-C bridge header') + parser.add_argument('-depfile', help='path to the generated depfile') + parser.add_argument('-swift-version', + help='version of Swift language to support') + parser.add_argument('-target', + action='store', + help='generate code for the given target ') + parser.add_argument('-sdk', action='store', help='compile against sdk') + parser.add_argument('-F', + dest='framework_dirs', + action='append', + help='add dir to framework search path') + parser.add_argument('-Fsystem', + '-iframework', + dest='system_framework_dirs', + action='append', + help='add dir to system framework search path') + parser.add_argument('-root-dir', + dest='root_dir', + action='store', + required=True, + help='path to the root of the repository') + parser.add_argument('-swift-toolchain-path', + default='', + action='store', + dest='swift_toolchain_path', + help='path to the root of the Swift toolchain') + parser.add_argument('-file-compilation-dir', + default='', + action='store', + help='compilation directory to embed in the debug info') + parser.add_argument('-enable-cxx-interop', + dest='enable_cxx_interop', + action='store_true', + help='allow importing C++ modules into Swift') + parser.add_argument('-fix-module-imports', + action='store_true', + help='enable hack to fix module imports') + parser.add_argument('-swiftc-version', + default='', + action='store', + help='version of swiftc compiler') + parser.add_argument('-xcode-version', + default='', + action='store', + help='version of xcode') + + parsed, extras = parser.parse_known_args(args) + with tempfile.TemporaryDirectory() as tmpdir: + compile_module(parsed.module_name, parsed.sources, parsed, extras, tmpdir) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/toolchain/kythe.gni b/toolchain/kythe.gni new file mode 100644 index 000000000000..950d94379896 --- /dev/null +++ b/toolchain/kythe.gni @@ -0,0 +1,11 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file defines configuration for Kythe, an indexer and cross-referencer +# that powers codesearch. + +declare_args() { + # Enables Kythe annotations necessary to build cross references. + enable_kythe_annotations = false +} diff --git a/toolchain/linux/BUILD.gn b/toolchain/linux/BUILD.gn new file mode 100644 index 000000000000..3d6bb56599e6 --- /dev/null +++ b/toolchain/linux/BUILD.gn @@ -0,0 +1,440 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/dcheck_always_on.gni") +import("//build/config/ozone.gni") +import("//build/config/sysroot.gni") +import("//build/config/ui.gni") +import("//build/toolchain/gcc_toolchain.gni") + +clang_toolchain("clang_ppc64") { + enable_linker_map = true + toolchain_args = { + current_cpu = "ppc64" + current_os = "linux" + } +} + +clang_toolchain("clang_arm") { + toolchain_args = { + current_cpu = "arm" + current_os = "linux" + } +} + +clang_toolchain("clang_arm64") { + toolchain_args = { + current_cpu = "arm64" + current_os = "linux" + } +} + +gcc_toolchain("arm64") { + toolprefix = "aarch64-linux-gnu-" + + cc = "${toolprefix}gcc" + cxx = "${toolprefix}g++" + + ar = "${toolprefix}ar" + ld = cxx + readelf = "${toolprefix}readelf" + nm = "${toolprefix}nm" + + toolchain_args = { + current_cpu = "arm64" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +gcc_toolchain("arm") { + toolprefix = "arm-linux-gnueabihf-" + + cc = "${toolprefix}gcc" + cxx = "${toolprefix}g++" + + ar = "${toolprefix}ar" + ld = cxx + readelf = "${toolprefix}readelf" + nm = "${toolprefix}nm" + + toolchain_args = { + current_cpu = "arm" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +clang_toolchain("clang_x86") { + # Output linker map files for binary size analysis. + enable_linker_map = true + + toolchain_args = { + current_cpu = "x86" + current_os = "linux" + } +} + +gcc_toolchain("x86") { + cc = "gcc" + cxx = "g++" + + readelf = "readelf" + nm = "nm" + ar = "ar" + ld = cxx + + # Output linker map files for binary size analysis. + enable_linker_map = true + + toolchain_args = { + current_cpu = "x86" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +clang_toolchain("clang_x64") { + # Output linker map files for binary size analysis. + enable_linker_map = true + + toolchain_args = { + current_cpu = "x64" + current_os = "linux" + } +} + +template("clang_v8_toolchain") { + clang_toolchain(target_name) { + toolchain_args = { + current_os = "linux" + forward_variables_from(invoker.toolchain_args, "*") + } + } +} + +clang_v8_toolchain("clang_x86_v8_arm") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "arm" + } +} + +clang_v8_toolchain("clang_x86_v8_mipsel") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "mipsel" + } +} + +clang_v8_toolchain("clang_x86_v8_mips") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "mips" + } +} + +clang_v8_toolchain("clang_x64_v8_arm64") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "arm64" + } +} + +clang_v8_toolchain("clang_x64_v8_mips64el") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "mips64el" + } +} + +clang_v8_toolchain("clang_x64_v8_mips64") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "mips64" + } +} + +clang_v8_toolchain("clang_x64_v8_riscv64") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "riscv64" + } +} + +clang_v8_toolchain("clang_x64_v8_loong64") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "loong64" + } +} + +# In a LaCrOS build, this toolchain is intended to be used as an alternate +# toolchain to build Ash-Chrome in a subdirectory. +# This is a test-only toolchain. +clang_toolchain("ash_clang_x64") { + toolchain_args = { + # This turns the toolchain into the "Linux ChromeOS" build + current_os = "chromeos" + target_os = "chromeos" + current_cpu = current_cpu + + # This turns off all of the LaCrOS-specific flags. + also_build_ash_chrome = false + chromeos_is_browser_only = false + use_clang_coverage = false + + # Never build ash with asan. It is too slow for builders and cause + # tests being flaky. + is_asan = false + is_lsan = false + } +} + +# In an ash build, this toolchain is intended to be used as an alternate +# toolchain to build lacros-Chrome in a subdirectory. +# This is a test-only toolchain. +clang_toolchain("lacros_clang_x64") { + toolchain_args = { + # This turns the toolchain into the "Lacros" build + current_os = "chromeos" + target_os = "chromeos" + current_cpu = current_cpu + + # This turns on the LaCrOS-specific flag. + also_build_lacros_chrome = false + chromeos_is_browser_only = true + use_clang_coverage = false + dcheck_always_on = false + } +} + +gcc_toolchain("x64") { + cc = "gcc" + cxx = "g++" + + readelf = "readelf" + nm = "nm" + ar = "ar" + ld = cxx + + # Output linker map files for binary size analysis. + enable_linker_map = true + + toolchain_args = { + current_cpu = "x64" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +clang_toolchain("clang_mipsel") { + toolchain_args = { + current_cpu = "mipsel" + current_os = "linux" + } +} + +clang_toolchain("clang_mips64el") { + toolchain_args = { + current_cpu = "mips64el" + current_os = "linux" + } +} + +gcc_toolchain("mipsel") { + toolprefix = "mipsel-linux-gnu-" + + cc = "${toolprefix}gcc" + cxx = " ${toolprefix}g++" + ar = "${toolprefix}ar" + ld = cxx + readelf = "${toolprefix}readelf" + nm = "${toolprefix}nm" + + toolchain_args = { + cc_wrapper = "" + current_cpu = "mipsel" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + use_goma = false + } +} + +gcc_toolchain("mips64el") { + toolprefix = "mips64el-linux-gnuabi64-" + + cc = "${toolprefix}gcc" + cxx = "${toolprefix}g++" + ar = "${toolprefix}ar" + ld = cxx + readelf = "${toolprefix}readelf" + nm = "${toolprefix}nm" + + toolchain_args = { + cc_wrapper = "" + current_cpu = "mips64el" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + use_goma = false + } +} + +clang_toolchain("clang_riscv64") { + enable_linker_map = true + toolchain_args = { + current_cpu = "riscv64" + current_os = "linux" + is_clang = true + } +} + +gcc_toolchain("riscv64") { + toolprefix = "riscv64-linux-gnu" + + cc = "${toolprefix}-gcc" + cxx = "${toolprefix}-g++" + + readelf = "${toolprefix}-readelf" + nm = "${toolprefix}-nm" + ar = "${toolprefix}-ar" + ld = cxx + + toolchain_args = { + current_cpu = "riscv64" + current_os = "linux" + is_clang = false + } +} + +clang_toolchain("clang_s390x") { + toolchain_args = { + current_cpu = "s390x" + current_os = "linux" + is_clang = true + } +} + +gcc_toolchain("s390x") { + cc = "gcc" + cxx = "g++" + + readelf = "readelf" + nm = "nm" + ar = "ar" + ld = cxx + + toolchain_args = { + current_cpu = "s390x" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +gcc_toolchain("ppc64") { + cc = "gcc" + cxx = "g++" + + readelf = "readelf" + nm = "nm" + ar = "ar" + ld = cxx + + toolchain_args = { + current_cpu = "ppc64" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +gcc_toolchain("mips") { + toolprefix = "mips-linux-gnu-" + + cc = "${toolprefix}gcc" + cxx = "${toolprefix}g++" + + readelf = "${toolprefix}readelf" + nm = "${toolprefix}nm" + ar = "${toolprefix}ar" + ld = cxx + + toolchain_args = { + current_cpu = "mips" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +gcc_toolchain("mips64") { + toolprefix = "mips64-linux-gnuabi64-" + + cc = "${toolprefix}gcc" + cxx = "${toolprefix}g++" + + readelf = "${toolprefix}readelf" + nm = "${toolprefix}nm" + ar = "${toolprefix}ar" + ld = cxx + + toolchain_args = { + current_cpu = "mips64" + current_os = "linux" + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + } +} + +clang_toolchain("clang_loong64") { + toolchain_args = { + current_cpu = "loong64" + current_os = "linux" + } +} + +gcc_toolchain("loong64") { + toolprefix = "loongarch64-unknown-linux-gnu" + + cc = "${toolprefix}-gcc" + cxx = "${toolprefix}-g++" + + readelf = "${toolprefix}-readelf" + nm = "${toolprefix}-nm" + ar = "${toolprefix}-ar" + ld = cxx + + toolchain_args = { + current_cpu = "loong64" + current_os = "linux" + is_clang = false + } +} diff --git a/toolchain/linux/unbundle/BUILD.gn b/toolchain/linux/unbundle/BUILD.gn new file mode 100644 index 000000000000..a091491236bb --- /dev/null +++ b/toolchain/linux/unbundle/BUILD.gn @@ -0,0 +1,41 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/toolchain/gcc_toolchain.gni") + +gcc_toolchain("default") { + cc = getenv("CC") + cxx = getenv("CXX") + ar = getenv("AR") + nm = getenv("NM") + ld = cxx + + extra_cflags = getenv("CFLAGS") + extra_cppflags = getenv("CPPFLAGS") + extra_cxxflags = getenv("CXXFLAGS") + extra_ldflags = getenv("LDFLAGS") + + toolchain_args = { + current_cpu = current_cpu + current_os = current_os + } +} + +gcc_toolchain("host") { + cc = getenv("BUILD_CC") + cxx = getenv("BUILD_CXX") + ar = getenv("BUILD_AR") + nm = getenv("BUILD_NM") + ld = cxx + + extra_cflags = getenv("BUILD_CFLAGS") + extra_cppflags = getenv("BUILD_CPPFLAGS") + extra_cxxflags = getenv("BUILD_CXXFLAGS") + extra_ldflags = getenv("BUILD_LDFLAGS") + + toolchain_args = { + current_cpu = current_cpu + current_os = current_os + } +} diff --git a/toolchain/linux/unbundle/README.md b/toolchain/linux/unbundle/README.md new file mode 100644 index 000000000000..73a81a33a2b6 --- /dev/null +++ b/toolchain/linux/unbundle/README.md @@ -0,0 +1,41 @@ +# Overview + +This directory contains files that make it possible for Linux +distributions to build Chromium using system toolchain. + +For more info on established way such builds are configured, +please read the following: + + - https://www.gnu.org/software/make/manual/html_node/Implicit-Variables.html + +Why do distros want CFLAGS, LDFLAGS, etc? Please read the following +for some examples. This is not an exhaustive list. + + - https://wiki.debian.org/Hardening + - https://wiki.ubuntu.com/DistCompilerFlags + - https://fedoraproject.org/wiki/Changes/Harden_All_Packages + - https://fedoraproject.org/wiki/Changes/Modernise_GCC_Flags + - https://fedoraproject.org/wiki/Packaging:Guidelines#Compiler_flags + - https://blog.flameeyes.eu/2010/09/are-we-done-with-ldflags/ + - https://blog.flameeyes.eu/2008/08/flags-and-flags/ + +# Usage + +Add the following to GN args: + +``` +custom_toolchain="//build/toolchain/linux/unbundle:default" +host_toolchain="//build/toolchain/linux/unbundle:default" +``` + +See [more docs on GN](https://gn.googlesource.com/gn/+/main/docs/quick_start.md). + +To cross-compile (not fully tested), add the following: + +``` +host_toolchain="//build/toolchain/linux/unbundle:host" +v8_snapshot_toolchain="//build/toolchain/linux/unbundle:host" +``` + +Note: when cross-compiling for a 32-bit target, a matching 32-bit toolchain +may be needed. diff --git a/toolchain/mac/BUILD.gn b/toolchain/mac/BUILD.gn new file mode 100644 index 000000000000..73f44ae18341 --- /dev/null +++ b/toolchain/mac/BUILD.gn @@ -0,0 +1,116 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/mac/mac_sdk.gni") +import("//build/config/v8_target_cpu.gni") +import("//build/toolchain/apple/toolchain.gni") +import("//build_overrides/build.gni") + +# Specialisation of the apple_toolchain template to declare the toolchain +# and its tools to build target for macOS platform. +template("mac_toolchain") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + + apple_toolchain(target_name) { + forward_variables_from(invoker, "*", [ "toolchain_args" ]) + + bin_path = mac_bin_path + + toolchain_args = { + forward_variables_from(invoker.toolchain_args, "*") + current_os = "mac" + + if (target_os == "ios") { + # Use LLD for the host part of a chrome/ios build. + use_lld = true + + # Override `is_component_build` for the host toolchain. + # See https://crbug.com/gn/286 for details why this is + # required. + is_component_build = is_debug + + # Defined in //base, would trigger a warning if the build doesn't depend + # on it. + if (build_with_chromium) { + # cronet disable this because it targets 32-bit, + # enable it unconditionally for the host toolchain. + use_allocator_shim = true + } + + # TODO(crbug.com/753445): the use_sanitizer_coverage arg is currently + # not supported by the Chromium mac_clang_x64 toolchain on iOS + # distribution. + use_sanitizer_coverage = false + } + } + } +} + +mac_toolchain("clang_arm") { + toolchain_args = { + current_cpu = "arm" + } +} + +mac_toolchain("clang_arm64") { + toolchain_args = { + current_cpu = "arm64" + } +} + +mac_toolchain("clang_x64") { + toolchain_args = { + current_cpu = "x64" + } +} + +mac_toolchain("clang_x86") { + toolchain_args = { + current_cpu = "x86" + } +} + +mac_toolchain("clang_x86_v8_arm") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "arm" + } +} + +mac_toolchain("clang_x86_v8_mipsel") { + toolchain_args = { + current_cpu = "x86" + v8_current_cpu = "mipsel" + } +} + +mac_toolchain("clang_x64_v8_arm64") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "arm64" + } +} + +mac_toolchain("clang_x64_v8_mips64el") { + toolchain_args = { + current_cpu = "x64" + v8_current_cpu = "mips64el" + } +} + +mac_toolchain("clang_arm64_v8_x64") { + toolchain_args = { + current_cpu = "arm64" + v8_current_cpu = "x64" + } +} + +# Needed to run v8 on the host during a arm64 -> x86_64 cross-build +mac_toolchain("clang_arm64_v8_arm64") { + toolchain_args = { + current_cpu = "arm64" + v8_current_cpu = "arm64" + } +} diff --git a/toolchain/mac/OWNERS b/toolchain/mac/OWNERS new file mode 100644 index 000000000000..6f3324f07c8f --- /dev/null +++ b/toolchain/mac/OWNERS @@ -0,0 +1 @@ +file://build/apple/OWNERS diff --git a/toolchain/nacl/BUILD.gn b/toolchain/nacl/BUILD.gn new file mode 100644 index 000000000000..00f718976435 --- /dev/null +++ b/toolchain/nacl/BUILD.gn @@ -0,0 +1,290 @@ +# Copyright (c) 2014 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/nacl/config.gni") +import("//build/config/sysroot.gni") +import("//build/toolchain/nacl_toolchain.gni") + +# Add the toolchain revision as a preprocessor define so that sources are +# rebuilt when a toolchain is updated. +# Idea we could use the toolchain deps feature, but currently that feature is +# bugged and does not trigger a rebuild. +# https://code.google.com/p/chromium/issues/detail?id=431880 +# Calls to get the toolchain revision are relatively slow, so do them all in a +# single batch to amortize python startup, etc. +revisions = exec_script("//native_client/build/get_toolchain_revision.py", + [ + "nacl_x86_glibc", + "nacl_arm_glibc", + "pnacl_newlib", + "saigo_newlib", + ], + "trim list lines") +nacl_x86_glibc_rev = revisions[0] +nacl_arm_glibc_rev = revisions[1] + +pnacl_newlib_rev = revisions[2] +saigo_newlib_rev = revisions[3] + +if (host_os == "win") { + toolsuffix = ".exe" +} else { + toolsuffix = "" +} + +# The PNaCl toolchain tools are all wrapper scripts rather than binary +# executables. On POSIX systems, nobody cares what kind of executable +# file you are. But on Windows, scripts (.bat files) cannot be run +# directly and need the Windows shell (cmd.exe) specified explicily. +if (host_os == "win") { + # NOTE! The //build/toolchain/gcc_*_wrapper.py scripts recognize + # this exact prefix string, so they must be updated if this string + # is changed in any way. + scriptprefix = "cmd /c call " + scriptsuffix = ".bat" +} else { + scriptprefix = "" + scriptsuffix = "" +} + +# When the compilers are run via goma, rbe or ccache rather than directly by +# GN/Ninja, the rbe/goma/ccache wrapper handles .bat files but gets confused +# by being given the scriptprefix. +if (host_os == "win" && !use_goma && !use_remoteexec && cc_wrapper == "") { + compiler_scriptprefix = scriptprefix +} else { + compiler_scriptprefix = "" +} + +template("pnacl_toolchain") { + assert(defined(invoker.executable_extension), + "Must define executable_extension") + + nacl_toolchain(target_name) { + toolchain_package = "pnacl_newlib" + toolchain_revision = pnacl_newlib_rev + toolprefix = + rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/pnacl-", + root_build_dir) + + if (host_os == "win") { + # Flip the slashes so that copy/paste of the commands works. + # This is also done throughout build\toolchain\win\BUILD.gn + toolprefix = string_replace(toolprefix, "/", "\\") + } + + cc = compiler_scriptprefix + toolprefix + "clang" + scriptsuffix + cxx = compiler_scriptprefix + toolprefix + "clang++" + scriptsuffix + ar = toolprefix + "ar" + scriptsuffix + readelf = scriptprefix + toolprefix + "readelf" + scriptsuffix + nm = scriptprefix + toolprefix + "nm" + scriptsuffix + if (defined(invoker.strip)) { + strip = scriptprefix + toolprefix + invoker.strip + scriptsuffix + } + forward_variables_from(invoker, + [ + "executable_extension", + "is_clang_analysis_supported", + "extra_cppflags", + ]) + + # Note this is not the usual "ld = cxx" because "ld" uses are + # never run via goma, so this needs scriptprefix. + ld = scriptprefix + toolprefix + "clang++" + scriptsuffix + + toolchain_args = { + is_clang = true + current_cpu = "pnacl" + use_lld = false + } + } +} + +pnacl_toolchain("newlib_pnacl") { + executable_extension = ".pexe" + + # The pnacl-finalize tool turns a .pexe.debug file into a .pexe file. + # It's very similar in purpose to the traditional "strip" utility: it + # turns what comes out of the linker into what you actually want to + # distribute and run. PNaCl doesn't have a "strip"-like utility that + # you ever actually want to use other than pnacl-finalize, so just + # make pnacl-finalize the strip tool rather than adding an additional + # step like "postlink" to run pnacl-finalize. + strip = "finalize" +} + +template("nacl_glibc_toolchain") { + toolchain_cpu = target_name + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + assert(defined(invoker.toolchain_package), "Must define toolchain_package") + assert(defined(invoker.toolchain_revision), "Must define toolchain_revision") + forward_variables_from(invoker, + [ + "toolchain_package", + "toolchain_revision", + ]) + + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) + + if (host_os == "win") { + # Flip the slashes so that copy/paste of the commands works. + # This is also done throughout build\toolchain\win\BUILD.gn + toolprefix = string_replace(toolprefix, "/", "\\") + } + + nacl_toolchain("glibc_" + toolchain_cpu) { + cc = toolprefix + "gcc" + toolsuffix + cxx = toolprefix + "g++" + toolsuffix + ar = toolprefix + "ar" + toolsuffix + ld = cxx + readelf = toolprefix + "readelf" + toolsuffix + nm = toolprefix + "nm" + toolsuffix + strip = toolprefix + "strip" + toolsuffix + + toolchain_args = { + current_cpu = toolchain_cpu + + # reclient does not support gcc. + use_remoteexec = false + is_clang = false + is_nacl_glibc = true + use_lld = false + } + } +} + +nacl_glibc_toolchain("x86") { + toolchain_package = "nacl_x86_glibc" + toolchain_revision = nacl_x86_glibc_rev + + # Rely on the :compiler_cpu_abi config adding the -m32 flag here rather + # than using the i686-nacl binary directly. This is a because i686-nacl-gcc + # is a shell script wrapper around x86_64-nacl-gcc and goma has trouble with + # compiler executables that are shell scripts (so the i686 'compiler' is not + # currently in goma). + toolchain_tuple = "x86_64-nacl" +} + +nacl_glibc_toolchain("x64") { + toolchain_package = "nacl_x86_glibc" + toolchain_revision = nacl_x86_glibc_rev + toolchain_tuple = "x86_64-nacl" +} + +nacl_glibc_toolchain("arm") { + toolchain_package = "nacl_arm_glibc" + toolchain_revision = nacl_arm_glibc_rev + toolchain_tuple = "arm-nacl" +} + +template("nacl_clang_toolchain") { + toolchain_cpu = target_name + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + + toolchain_package = "pnacl_newlib" + toolchain_revision = pnacl_newlib_rev + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) + + if (host_os == "win") { + # Flip the slashes so that copy/paste of the commands works. + # This is also done throughout build\toolchain\win\BUILD.gn + toolprefix = string_replace(toolprefix, "/", "\\") + } + + nacl_toolchain("clang_newlib_" + toolchain_cpu) { + cc = toolprefix + "clang" + toolsuffix + cxx = toolprefix + "clang++" + toolsuffix + ar = toolprefix + "ar" + toolsuffix + ld = cxx + readelf = toolprefix + "readelf" + toolsuffix + nm = toolprefix + "nm" + toolsuffix + strip = toolprefix + "strip" + toolsuffix + + toolchain_args = { + current_cpu = toolchain_cpu + is_clang = true + use_lld = false + } + } +} + +template("nacl_irt_toolchain") { + toolchain_cpu = target_name + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + + toolchain_package = "saigo_newlib" + toolchain_revision = saigo_newlib_rev + toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" + + invoker.toolchain_tuple + "-", + root_build_dir) + + if (host_os == "win") { + # Flip the slashes so that copy/paste of the commands works. + # This is also done throughout build\toolchain\win\BUILD.gn + toolprefix = string_replace(toolprefix, "/", "\\") + } + + link_irt = rebase_path("//native_client/build/link_irt.py", root_build_dir) + + tls_edit_label = + "//native_client/src/tools/tls_edit:tls_edit($host_toolchain)" + host_toolchain_out_dir = + rebase_path(get_label_info(tls_edit_label, "root_out_dir"), + root_build_dir) + tls_edit = "${host_toolchain_out_dir}/tls_edit" + + nacl_toolchain("irt_" + toolchain_cpu) { + cc = toolprefix + "clang" + toolsuffix + cxx = toolprefix + "clang++" + toolsuffix + ar = toolprefix + "ar" + toolsuffix + readelf = toolprefix + "readelf" + toolsuffix + nm = toolprefix + "nm" + toolsuffix + strip = toolprefix + "strip" + toolsuffix + + # Some IRT implementations (notably, Chromium's) contain C++ code, + # so we need to link w/ the C++ linker. + ld = "${python_path} ${link_irt} --tls-edit=${tls_edit} --link-cmd=${cxx} --readelf-cmd=${readelf}" + + toolchain_args = { + current_cpu = toolchain_cpu + is_clang = true + use_lld = false + is_nacl_saigo = true + } + + # TODO(ncbray): depend on link script + deps = [ tls_edit_label ] + } +} + +template("nacl_clang_toolchains") { + assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple") + nacl_clang_toolchain(target_name) { + toolchain_tuple = invoker.toolchain_tuple + } + nacl_irt_toolchain(target_name) { + toolchain_tuple = invoker.toolchain_tuple + } +} + +nacl_clang_toolchains("x86") { + # Rely on :compiler_cpu_abi adding -m32. See nacl_x86_glibc above. + toolchain_tuple = "x86_64-nacl" +} + +nacl_clang_toolchains("x64") { + toolchain_tuple = "x86_64-nacl" +} + +nacl_clang_toolchains("arm") { + toolchain_tuple = "arm-nacl" +} + +nacl_clang_toolchains("mipsel") { + toolchain_tuple = "mipsel-nacl" +} diff --git a/toolchain/nacl_toolchain.gni b/toolchain/nacl_toolchain.gni new file mode 100644 index 000000000000..bebdbd80c968 --- /dev/null +++ b/toolchain/nacl_toolchain.gni @@ -0,0 +1,73 @@ +# Copyright (c) 2014 The Native Client Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/nacl/config.gni") +import("//build/toolchain/gcc_toolchain.gni") + +# This template defines a NaCl toolchain. +# +# It requires the following variables specifying the executables to run: +# - cc +# - cxx +# - ar +# - ld + +template("nacl_toolchain") { + assert(defined(invoker.cc), "nacl_toolchain() must specify a \"cc\" value") + assert(defined(invoker.cxx), "nacl_toolchain() must specify a \"cxx\" value") + assert(defined(invoker.ar), "nacl_toolchain() must specify a \"ar\" value") + assert(defined(invoker.ld), "nacl_toolchain() must specify a \"ld\" value") + gcc_toolchain(target_name) { + if (defined(invoker.executable_extension)) { + executable_extension = invoker.executable_extension + } else { + executable_extension = ".nexe" + } + rebuild_define = "NACL_TC_REV=" + invoker.toolchain_revision + + forward_variables_from(invoker, + [ + "ar", + "cc", + "cxx", + "deps", + "ld", + "link_outputs", + "nm", + "readelf", + "strip", + "extra_cppflags", + ]) + + toolchain_args = { + # Use all values set on the invoker's toolchain_args. + forward_variables_from(invoker.toolchain_args, "*") + + current_os = "nacl" + + # We do not support component builds with the NaCl toolchains. + is_component_build = false + + # We do not support clang profiling in the NaCl toolchains. + use_clang_profiling = false + use_clang_coverage = false + coverage_instrumentation_input_file = "" + + if (use_remoteexec) { + if (is_win) { + rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_windows.cfg" + } else if (is_mac) { + rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_mac.cfg" + } else { + # TODO(ukai): non linux? + rbe_cc_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_linux.cfg" + } + } + + if (use_remoteexec_links) { + rbe_link_cfg_file = "${rbe_cfg_dir}/nacl/rewrapper_linux_link.cfg" + } + } + } +} diff --git a/toolchain/rbe.gni b/toolchain/rbe.gni new file mode 100644 index 000000000000..cd86e22c01ef --- /dev/null +++ b/toolchain/rbe.gni @@ -0,0 +1,62 @@ +# Defines the configuration of Remote Build Execution (RBE). + +# The directory where the re-client tooling binaries are. +rbe_bin_dir = rebase_path("//buildtools/reclient", root_build_dir) + +declare_args() { + # Execution root - this should be the root of the source tree. + # This is defined here instead of in the config file because + # this will vary depending on where the user has placed the + # chromium source on their system. + rbe_exec_root = rebase_path("//") + + # Set to true to enable remote compilation using reclient. + use_remoteexec = false + + # Set to true to enable remote linking using reclient. + use_remoteexec_links = false + + # The directory where the re-client configuration files are. + rbe_cfg_dir = rebase_path("//buildtools/reclient_cfgs", root_build_dir) +} + +declare_args() { + # Set to the path of the RBE reclient configuration files. + # Configuration file selection based on operating system. + if (is_linux || is_android || is_chromeos || is_fuchsia) { + rbe_py_cfg_file = "${rbe_cfg_dir}/python/rewrapper_linux.cfg" + rbe_cc_cfg_file = + "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_linux.cfg" + rbe_link_cfg_file = + "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_linux_link.cfg" + } else if (is_win) { + rbe_py_cfg_file = "${rbe_cfg_dir}/python/rewrapper_windows.cfg" + rbe_cc_cfg_file = + "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_windows.cfg" + rbe_link_cfg_file = "" + } else if (is_mac || is_ios) { + rbe_py_cfg_file = "${rbe_cfg_dir}/python/rewrapper_mac.cfg" + rbe_cc_cfg_file = "${rbe_cfg_dir}/chromium-browser-clang/rewrapper_mac.cfg" + rbe_link_cfg_file = "" + } else { + rbe_linkcfg_file = "" + rbe_py_cfg_file = "" + rbe_cc_cfg_file = "" + } + + # Set to the path of the RBE recleint wrapper for ChromeOS. + rbe_cros_cc_wrapper = "${rbe_bin_dir}/rewrapper" +} + +if (is_win) { + if (use_remoteexec_links) { + print("For now, remote linking is not available for Windows.") + use_remoteexec_links = false + } +} +if (is_mac || is_ios) { + if (use_remoteexec_links) { + print("For now, remote linking is not available on Macs.") + use_remoteexec_links = false + } +} diff --git a/toolchain/toolchain.gni b/toolchain/toolchain.gni new file mode 100644 index 000000000000..d32d7d0e9dd9 --- /dev/null +++ b/toolchain/toolchain.gni @@ -0,0 +1,105 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Toolchain-related configuration that may be needed outside the context of the +# toolchain() rules themselves. + +import("//build/config/chrome_build.gni") +import("//build/config/chromecast_build.gni") +import("//build_overrides/build.gni") + +declare_args() { + # If this is set to true, we use the revision in the llvm repo to determine + # the CLANG_REVISION to use, instead of the version hard-coded into + # //tools/clang/scripts/update.py. This should only be used in + # conjunction with setting the llvm_force_head_revision DEPS variable when + # `gclient runhooks` is run as well. + llvm_force_head_revision = false + + # Used for binary size analysis. + generate_linker_map = is_android && is_official_build + + # Whether this toolchain should avoid building any sanitizer support + # because it's a host toolchain where we aim to make shared objects that may + # be loaded by prebuilt binaries without sanitizer support. + toolchain_disables_sanitizers = false +} + +if (generate_linker_map) { + assert(is_official_build || is_castos || is_cast_android, + "Linker map files should only be generated when is_official_build = " + + "true or is_castos = true or is_cast_android = true") + assert(current_os == "android" || current_os == "linux" || + target_os == "android" || target_os == "linux" || + target_os == "chromeos", + "Linker map files should only be generated for Android, Linux, " + + "or ChromeOS.") +} + +declare_args() { + clang_version = "17" +} + +# Extension for shared library files (including leading dot). +if (is_apple) { + shlib_extension = ".dylib" +} else if (is_android && is_component_build) { + # By appending .cr, we prevent name collisions with libraries already + # loaded by the Android zygote. + shlib_extension = ".cr.so" +} else if (is_posix || is_fuchsia) { + shlib_extension = ".so" +} else if (is_win) { + shlib_extension = ".dll" +} else { + assert(false, "Platform not supported") +} + +# Prefix for shared library files. +if (is_posix || is_fuchsia) { + shlib_prefix = "lib" +} else { + shlib_prefix = "" +} + +# Directory for shared library files. +if (is_fuchsia) { + shlib_subdir = "/lib" +} else { + shlib_subdir = "" +} + +# While other "tool"s in a toolchain are specific to the target of that +# toolchain, the "stamp" and "copy" tools are really generic to the host; +# but each toolchain must define them separately. GN doesn't allow a +# template instantiation inside a toolchain definition, so some boilerplate +# has to be repeated in each toolchain to define these two tools. These +# four variables reduce the duplication in that boilerplate. +stamp_description = "STAMP {{output}}" +copy_description = "COPY {{source}} {{output}}" +if (host_os == "win") { + _tool_wrapper_path = + rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir) + + stamp_command = "cmd /c type nul > \"{{output}}\"" + copy_command = + "\"$python_path\" $_tool_wrapper_path recursive-mirror {{source}} {{output}}" +} else { + stamp_command = "touch {{output}}" + copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})" +} + +if (!defined(toolchain_disables_sanitizers)) { + toolchain_disables_sanitizers = false +} + +# This variable is true if the current toolchain is one of the target +# toolchains, i.e. a toolchain which is being used to build the main Chrome +# binary. This generally means "not the host toolchain", but in the case where +# we're targeting the host it's true then as well. We do require current_os to +# match target_os so that for example we avoid considering Android as a target +# toolchain when targeting CrOS. +is_a_target_toolchain = + (current_toolchain != host_toolchain || + default_toolchain == host_toolchain) && current_os == target_os diff --git a/toolchain/whole_archive.py b/toolchain/whole_archive.py new file mode 100644 index 000000000000..aeeb0ddc6997 --- /dev/null +++ b/toolchain/whole_archive.py @@ -0,0 +1,51 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import re + + +def wrap_with_whole_archive(command): + """Modify and return `command` such that -LinkWrapper,add-whole-archive=X + becomes a linking inclusion X (-lX) but wrapped in whole-archive + modifiers.""" + + # We want to link rlibs as --whole-archive if they are part of a unit test + # target. This is determined by switch `-LinkWrapper,add-whole-archive`. + # + # TODO(danakj): If the linking command line gets too large we could move + # {{rlibs}} into the rsp file, but then this script needs to modify the rsp + # file instead of the command line. + def extract_libname(s): + m = re.match(r'-LinkWrapper,add-whole-archive=(.+)', s) + return m.group(1) + + # The set of libraries we want to apply `--whole-archive`` to. + whole_archive_libs = [ + extract_libname(x) for x in command + if x.startswith("-LinkWrapper,add-whole-archive=") + ] + + # Remove the arguments meant for consumption by this LinkWrapper script. + command = [x for x in command if not x.startswith("-LinkWrapper,")] + + def has_any_suffix(string, suffixes): + for suffix in suffixes: + if string.endswith(suffix): + return True + return False + + def wrap_libs_with(command, libnames, before, after): + out = [] + for arg in command: + # The arg is a full path to a library, we look if the the library name (a + # suffix of the full arg) is one of `libnames`. + if has_any_suffix(arg, libnames): + out.extend([before, arg, after]) + else: + out.append(arg) + return out + + # Apply --whole-archive to the libraries that desire it. + return wrap_libs_with(command, whole_archive_libs, "-Wl,--whole-archive", + "-Wl,--no-whole-archive") diff --git a/toolchain/win/BUILD.gn b/toolchain/win/BUILD.gn new file mode 100644 index 000000000000..fe0979038df8 --- /dev/null +++ b/toolchain/win/BUILD.gn @@ -0,0 +1,94 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/win/visual_studio_version.gni") +import("//build/toolchain/win/toolchain.gni") + +assert(is_win, "Should only be running on Windows") + +# Setup the Visual Studio state. +# +# Its arguments are the VS path and the compiler wrapper tool. It will write +# "environment.x86" and "environment.x64" to the build directory and return a +# list to us. + +# Copy the VS runtime DLL for the default toolchain to the root build directory +# so things will run. +if (current_toolchain == default_toolchain) { + if (is_debug) { + configuration_name = "Debug" + } else { + configuration_name = "Release" + } + exec_script("../../vs_toolchain.py", + [ + "copy_dlls", + rebase_path(root_build_dir), + configuration_name, + target_cpu, + ]) +} + +if (target_cpu == "x86" || target_cpu == "x64") { + win_toolchains("x86") { + toolchain_arch = "x86" + } + win_toolchains("x64") { + toolchain_arch = "x64" + } +} + +if (target_cpu == "arm64") { + win_toolchains("arm64") { + toolchain_arch = "arm64" + } + win_toolchains(host_cpu) { + toolchain_arch = host_cpu + } +} + +# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain. +# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64). +# The only reason it's a separate toolchain is so that it can force +# is_component_build to false in the toolchain_args() block, because +# building nacl64.exe in component style does not work. +win_toolchains("nacl_win64") { + toolchain_arch = "x64" + toolchain_args = { + is_component_build = false + } +} + +# WinUWP toolchains. Only define these when targeting them. + +if (target_os == "winuwp") { + assert(target_cpu == "x64" || target_cpu == "x86" || target_cpu == "arm" || + target_cpu == "arm64") + + # Note that //build/toolchain/win/win_toolchain_data.gni collects the output + # of setup_toolchain.py, however it's not compatible with the UWP toolchain, + # as the UWP toolchain requires the `environment.store_$CPU` variable, instead + # of the usual `environment.$CPU`. + store_cpu_toolchain_data = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + target_os, + target_cpu, + "environment.store_" + target_cpu, + ], + "scope") + + msvc_toolchain("uwp_" + target_cpu) { + environment = "environment.store_" + target_cpu + cl = "\"${store_cpu_toolchain_data.vc_bin_dir}/cl.exe\"" + toolchain_args = { + current_os = "winuwp" + current_cpu = target_cpu + is_clang = false + } + } +} diff --git a/toolchain/win/midl.gni b/toolchain/win/midl.gni new file mode 100644 index 000000000000..1f3ea409da12 --- /dev/null +++ b/toolchain/win/midl.gni @@ -0,0 +1,200 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_win) + +import("//build/config/win/visual_studio_version.gni") + +# This template defines a rule to invoke the MS IDL compiler. The generated +# source code will be compiled and linked into targets that depend on this. +# +# Parameters +# +# sources +# List of .idl file to process. +# +# header_file (optional) +# File name of generated header file. Defaults to the basename of the +# source idl file with a .h extension. +# +# out_dir (optional) +# Directory to write the generated files to. Defaults to target_gen_dir. +# +# generated_dir (optional) +# Directory where generated files were previously persisted. +# Defaults to third_party\win_build_output\midl\|out_dir|. +# +# dynamic_guids (optional) +# If the GUIDs are not constant across builds, the current GUID +# substitutions. +# |dynamic_guids| is of the form: +# "PLACEHOLDER-GUID-158428a4-6014-4978-83ba-9fad0dabe791=" +# "3d852661-c795-4d20-9b95-5561e9a1d2d9," +# "PLACEHOLDER-GUID-63B8FFB1-5314-48C9-9C57-93EC8BC6184B=" +# "D0E1CACC-C63C-4192-94AB-BF8EAD0E3B83". +# See midl.py for more details. +# +# writes_tlb (optional) +# Whether a .tlb file should be added to outputs. Defaults to false. +# +# writes_proxy(optional) +# Whether a _p.c file should be added to outputs. Defaults to true. +# +# writes_dlldata(optional) +# Whether a .dlldata.c file should be added to outputs. Defaults to true. +# +# deps (optional) +# +# defines (optional) +# Build time defines to be passed to midl.exe as /D parameter. +# +# visibility (optional) + +template("midl") { + action_name = "${target_name}_idl_action" + source_set_name = target_name + + assert(defined(invoker.sources), "Source must be defined for $target_name") + + if (defined(invoker.out_dir)) { + out_dir = invoker.out_dir + } else { + out_dir = target_gen_dir + } + + if (defined(invoker.generated_dir)) { + generated_dir = rebase_path(invoker.generated_dir, root_build_dir) + } else { + # midl.py expects 'gen' to be replaced with 'midl'. + generated_dir = + rebase_path("//third_party/win_build_output", root_build_dir) + + "/midl/" + rebase_path(out_dir, root_gen_dir) + } + + if (defined(invoker.dynamic_guids)) { + dynamic_guids = invoker.dynamic_guids + } else { + dynamic_guids = "none" + } + + if (defined(invoker.header_file)) { + header_file = invoker.header_file + } else { + header_file = "{{source_name_part}}.h" + } + + if (defined(invoker.writes_tlb)) { + writes_tlb = invoker.writes_tlb + } else { + writes_tlb = false + } + + if (defined(invoker.writes_proxy)) { + writes_proxy = invoker.writes_proxy + } else { + writes_proxy = true + } + + if (defined(invoker.writes_dlldata)) { + writes_dlldata = invoker.writes_dlldata + } else { + writes_dlldata = true + } + + if (writes_tlb) { + type_library_file = "{{source_name_part}}.tlb" + } else { + type_library_file = "none" + } + + if (writes_dlldata) { + dlldata_file = "{{source_name_part}}.dlldata.c" + } else { + dlldata_file = "none" + } + + if (writes_proxy) { + proxy_file = "{{source_name_part}}_p.c" + } else { + proxy_file = "none" + } + + interface_identifier_file = "{{source_name_part}}_i.c" + + action_foreach(action_name) { + visibility = [ ":$source_set_name" ] + script = "//build/toolchain/win/midl.py" + + sources = invoker.sources + + outputs = [ + "$out_dir/$header_file", + "$out_dir/$interface_identifier_file", + ] + + # These files are only added to outputs if the invoker so desires, as it + # they are not always generated depending on the content of the input idl + # file. + if (writes_tlb) { + outputs += [ "$out_dir/$type_library_file" ] + } + if (writes_dlldata) { + outputs += [ "$out_dir/$dlldata_file" ] + } + if (writes_proxy) { + outputs += [ "$out_dir/$proxy_file" ] + } + + if (current_cpu == "x86") { + win_tool_arch = "environment.x86" + idl_target_platform = "win32" + } else if (current_cpu == "x64") { + win_tool_arch = "environment.x64" + idl_target_platform = "x64" + } else if (current_cpu == "arm64") { + win_tool_arch = "environment.arm64" + idl_target_platform = "arm64" + } else { + assert(false, "Need environment for this arch") + } + + args = [ + win_tool_arch, + generated_dir, + rebase_path(out_dir, root_build_dir), + dynamic_guids, + type_library_file, + header_file, + dlldata_file, + interface_identifier_file, + proxy_file, + rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang-cl.exe", + root_build_dir), + "{{source}}", + "/char", + "signed", + "/env", + idl_target_platform, + "/Oicf", + ] + + if (defined(invoker.defines)) { + foreach(define, invoker.defines) { + args += [ "/D" + define ] + } + } + + forward_variables_from(invoker, [ "deps" ]) + } + + source_set(target_name) { + forward_variables_from(invoker, [ "visibility" ]) + + # We only compile the IID files from the IDL tool rather than all outputs. + sources = process_file_template(invoker.sources, + [ "$out_dir/$interface_identifier_file" ]) + + public_deps = [ ":$action_name" ] + } +} diff --git a/toolchain/win/midl.py b/toolchain/win/midl.py new file mode 100644 index 000000000000..9c0d0a4ccff6 --- /dev/null +++ b/toolchain/win/midl.py @@ -0,0 +1,486 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from __future__ import division + +import array +import difflib +import filecmp +import io +import operator +import os +import posixpath +import re +import shutil +import struct +import subprocess +import sys +import tempfile +import uuid + +from functools import reduce + + +def ZapTimestamp(filename): + contents = open(filename, 'rb').read() + # midl.exe writes timestamp 2147483647 (2^31 - 1) as creation date into its + # outputs, but using the local timezone. To make the output timezone- + # independent, replace that date with a fixed string of the same length. + # Also blank out the minor version number. + if filename.endswith('.tlb'): + # See https://chromium-review.googlesource.com/c/chromium/src/+/693223 for + # a fairly complete description of the .tlb binary format. + # TLB files start with a 54 byte header. Offset 0x20 stores how many types + # are defined in the file, and the header is followed by that many uint32s. + # After that, 15 section headers appear. Each section header is 16 bytes, + # starting with offset and length uint32s. + # Section 12 in the file contains custom() data. custom() data has a type + # (int, string, etc). Each custom data chunk starts with a uint16_t + # describing its type. Type 8 is string data, consisting of a uint32_t + # len, followed by that many data bytes, followed by 'W' bytes to pad to a + # 4 byte boundary. Type 0x13 is uint32 data, followed by 4 data bytes, + # followed by two 'W' to pad to a 4 byte boundary. + # The custom block always starts with one string containing "Created by + # MIDL version 8...", followed by one uint32 containing 0x7fffffff, + # followed by another uint32 containing the MIDL compiler version (e.g. + # 0x0801026e for v8.1.622 -- 0x26e == 622). These 3 fields take 0x54 bytes. + # There might be more custom data after that, but these 3 blocks are always + # there for file-level metadata. + # All data is little-endian in the file. + assert contents[0:8] == b'MSFT\x02\x00\x01\x00' + ntypes, = struct.unpack_from('= 0x54 + # First: Type string (0x8), followed by 0x3e characters. + assert contents[custom_off:custom_off + 6] == b'\x08\x00\x3e\x00\x00\x00' + assert re.match( + br'Created by MIDL version 8\.\d\d\.\d{4} ' + br'at ... Jan 1. ..:..:.. 2038\n', + contents[custom_off + 6:custom_off + 6 + 0x3e]) + # Second: Type uint32 (0x13) storing 0x7fffffff (followed by WW / 0x57 pad) + assert contents[custom_off+6+0x3e:custom_off+6+0x3e+8] == \ + b'\x13\x00\xff\xff\xff\x7f\x57\x57' + # Third: Type uint32 (0x13) storing MIDL compiler version. + assert contents[custom_off + 6 + 0x3e + 8:custom_off + 6 + 0x3e + 8 + + 2] == b'\x13\x00' + # Replace "Created by" string with fixed string, and fixed MIDL version with + # 8.1.622 always. + contents = ( + contents[0:custom_off + 6] + + b'Created by MIDL version 8.xx.xxxx at a redacted point in time\n' + + # uint32 (0x13) val 0x7fffffff, WW, uint32 (0x13), val 0x0801026e, WW + b'\x13\x00\xff\xff\xff\x7f\x57\x57\x13\x00\x6e\x02\x01\x08\x57\x57' + + contents[custom_off + 0x54:]) + else: + contents = re.sub( + br'File created by MIDL compiler version 8\.\d\d\.\d{4} \*/\r\n' + br'/\* at ... Jan 1. ..:..:.. 2038', + br'File created by MIDL compiler version 8.xx.xxxx */\r\n' + br'/* at a redacted point in time', contents) + contents = re.sub( + br' Oicf, W1, Zp8, env=(.....) \(32b run\), ' + br'target_arch=(AMD64|X86) 8\.\d\d\.\d{4}', + br' Oicf, W1, Zp8, env=\1 (32b run), target_arch=\2 8.xx.xxxx', + contents) + # TODO(thakis): If we need more hacks than these, try to verify checked-in + # outputs when we're using the hermetic toolchain. + # midl.exe older than 8.1.622 omit '//' after #endif, fix that: + contents = contents.replace(b'#endif !_MIDL_USE_GUIDDEF_', + b'#endif // !_MIDL_USE_GUIDDEF_') + # midl.exe puts the midl version into code in one place. To have + # predictable output, lie about the midl version if it's not 8.1.622. + # This is unfortunate, but remember that there's beauty too in imperfection. + contents = contents.replace(b'0x801026c, /* MIDL Version 8.1.620 */', + b'0x801026e, /* MIDL Version 8.1.622 */') + open(filename, 'wb').write(contents) + + +def get_tlb_contents(tlb_file): + # See ZapTimestamp() for a short overview of the .tlb format. + contents = open(tlb_file, 'rb').read() + assert contents[0:8] == b'MSFT\x02\x00\x01\x00' + ntypes, = struct.unpack_from(' debug_offset + assert header.PointerToRelocations == 0 + assert header.PointerToLineNumbers == 0 + + # Make sure the first non-empty section's data starts right after the section + # headers. + for section_header in section_headers: + if section_header.PointerToRawData == 0: + assert section_header.PointerToRelocations == 0 + assert section_header.PointerToLineNumbers == 0 + continue + assert section_header.PointerToRawData == data_start + break + + # Make sure the symbol table (and hence, string table) appear after the last + # section: + assert ( + coff_header.PointerToSymbolTable >= + section_headers[-1].PointerToRawData + section_headers[-1].SizeOfRawData) + + # The symbol table contains a symbol for the no-longer-present .debug$S + # section. If we leave it there, lld-link will complain: + # + # lld-link: error: .debug$S should not refer to non-existent section 5 + # + # so we need to remove that symbol table entry as well. This shifts symbol + # entries around and we need to update symbol table indices in: + # - relocations + # - line number records (never present) + # - one aux symbol entry (IMAGE_SYM_CLASS_CLR_TOKEN; not present in ml output) + SYM = Struct( + 'SYM', + '8s', + 'Name', + 'I', + 'Value', + 'h', + 'SectionNumber', # Note: Signed! + 'H', + 'Type', + 'B', + 'StorageClass', + 'B', + 'NumberOfAuxSymbols') + i = 0 + debug_sym = -1 + while i < coff_header.NumberOfSymbols: + sym_offset = coff_header.PointerToSymbolTable + i * SYM.size() + sym = SYM.unpack_from(objdata, sym_offset) + + # 107 is IMAGE_SYM_CLASS_CLR_TOKEN, which has aux entry "CLR Token + # Definition", which contains a symbol index. Check it's never present. + assert sym.StorageClass != 107 + + # Note: sym.SectionNumber is 1-based, debug_section_index is 0-based. + if sym.SectionNumber - 1 == debug_section_index: + assert debug_sym == -1, 'more than one .debug$S symbol found' + debug_sym = i + # Make sure the .debug$S symbol looks like we expect. + # In particular, it should have exactly one aux symbol. + assert sym.Name == b'.debug$S' + assert sym.Value == 0 + assert sym.Type == 0 + assert sym.StorageClass == 3 + assert sym.NumberOfAuxSymbols == 1 + elif sym.SectionNumber > debug_section_index: + sym = Subtract(sym, SectionNumber=1) + SYM.pack_into(objdata, sym_offset, sym) + i += 1 + sym.NumberOfAuxSymbols + assert debug_sym != -1, '.debug$S symbol not found' + + # Note: Usually the .debug$S section is the last, but for files saying + # `includelib foo.lib`, like safe_terminate_process.asm in 32-bit builds, + # this isn't true: .drectve is after .debug$S. + + # Update symbol table indices in relocations. + # There are a few processor types that have one or two relocation types + # where SymbolTableIndex has a different meaning, but not for x86. + REL = Struct('REL', 'I', 'VirtualAddress', 'I', 'SymbolTableIndex', 'H', + 'Type') + for header in section_headers[0:debug_section_index]: + for j in range(0, header.NumberOfRelocations): + rel_offset = header.PointerToRelocations + j * REL.size() + rel = REL.unpack_from(objdata, rel_offset) + assert rel.SymbolTableIndex != debug_sym + if rel.SymbolTableIndex > debug_sym: + rel = Subtract(rel, SymbolTableIndex=2) + REL.pack_into(objdata, rel_offset, rel) + + # Update symbol table indices in line numbers -- just check they don't exist. + for header in section_headers: + assert header.NumberOfLineNumbers == 0 + + # Now that all indices are updated, remove the symbol table entry referring to + # .debug$S and its aux entry. + del objdata[coff_header.PointerToSymbolTable + + debug_sym * SYM.size():coff_header.PointerToSymbolTable + + (debug_sym + 2) * SYM.size()] + + # Now we know that it's safe to write out the input data, with just the + # timestamp overwritten to 0, the last section header cut out (and the + # offsets of all other section headers decremented by the size of that + # one section header), and the last section's data cut out. The symbol + # table offset needs to be reduced by one section header and the size of + # the missing section. + # (The COFF spec only requires on-disk sections to be aligned in image files, + # for obj files it's not required. If that wasn't the case, deleting slices + # if data would not generally be safe.) + + # Update section offsets and remove .debug$S section data. + for i in range(0, debug_section_index): + header = section_headers[i] + if header.SizeOfRawData: + header = Subtract(header, PointerToRawData=SECTIONHEADER.size()) + if header.NumberOfRelocations: + header = Subtract(header, PointerToRelocations=SECTIONHEADER.size()) + if header.NumberOfLineNumbers: + header = Subtract(header, PointerToLineNumbers=SECTIONHEADER.size()) + SECTIONHEADER.pack_into(objdata, + COFFHEADER.size() + i * SECTIONHEADER.size(), + header) + for i in range(debug_section_index + 1, len(section_headers)): + header = section_headers[i] + shift = SECTIONHEADER.size() + debug_size + if header.SizeOfRawData: + header = Subtract(header, PointerToRawData=shift) + if header.NumberOfRelocations: + header = Subtract(header, PointerToRelocations=shift) + if header.NumberOfLineNumbers: + header = Subtract(header, PointerToLineNumbers=shift) + SECTIONHEADER.pack_into(objdata, + COFFHEADER.size() + i * SECTIONHEADER.size(), + header) + + del objdata[debug_offset:debug_offset + debug_size] + + # Finally, remove .debug$S section header and update coff header. + coff_header = coff_header._replace(TimeDateStamp=0) + coff_header = Subtract(coff_header, + NumberOfSections=1, + PointerToSymbolTable=SECTIONHEADER.size() + debug_size, + NumberOfSymbols=2) + COFFHEADER.pack_into(objdata, 0, coff_header) + + del objdata[COFFHEADER.size() + + debug_section_index * SECTIONHEADER.size():COFFHEADER.size() + + (debug_section_index + 1) * SECTIONHEADER.size()] + + # All done! + if sys.version_info.major == 2: + return objdata.tostring() + else: + return objdata.tobytes() + + +def main(): + ml_result = subprocess.call(sys.argv[1:]) + if ml_result != 0: + return ml_result + + objfile = None + for i in range(1, len(sys.argv)): + if sys.argv[i].startswith('/Fo'): + objfile = sys.argv[i][len('/Fo'):] + assert objfile, 'failed to find ml output' + + with open(objfile, 'rb') as f: + objdata = f.read() + objdata = MakeDeterministic(objdata) + with open(objfile, 'wb') as f: + f.write(objdata) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/toolchain/win/rc/.gitignore b/toolchain/win/rc/.gitignore new file mode 100644 index 000000000000..e8fc4d3e1f85 --- /dev/null +++ b/toolchain/win/rc/.gitignore @@ -0,0 +1,3 @@ +linux64/rc +mac/rc +win/rc.exe diff --git a/toolchain/win/rc/README.md b/toolchain/win/rc/README.md new file mode 100644 index 000000000000..e6d38f97094c --- /dev/null +++ b/toolchain/win/rc/README.md @@ -0,0 +1,30 @@ +# rc + +This contains a cross-platform reimplementation of rc.exe. + +This exists mainly to compile .rc files on non-Windows hosts for cross builds. +However, it also runs on Windows for two reasons: + +1. To compare the output of Microsoft's rc.exe and the reimplementation and to + check that they produce bitwise identical output. +2. The reimplementation supports printing resource files in /showIncludes + output, which helps getting build dependencies right. + +The resource compiler consists of two parts: + +1. A python script rc.py that serves as the driver. It does unicode + conversions, runs the input through the preprocessor, and then calls the + actual resource compiler. +2. The resource compiler, a C++ binary obtained via sha1 files from Google + Storage. The binary's code currenty lives at + https://github.com/nico/hack/tree/master/res, even though work is (slowly) + underway to upstream it into LLVM. + +To update the rc binary, run `upload_rc_binaries.sh` in this directory, on a +Mac. + +rc isn't built from source as part of the regular chrome build because +it's needed in a gn toolchain tool, and these currently cannot have deps. +Alternatively, gn could be taught about deps on tools, or rc invocations could +be not a tool but a template like e.g. yasm invocations (which can have deps), +then the prebuilt binaries wouldn't be needed. diff --git a/toolchain/win/rc/linux64/rc.sha1 b/toolchain/win/rc/linux64/rc.sha1 new file mode 100644 index 000000000000..0d132a05f349 --- /dev/null +++ b/toolchain/win/rc/linux64/rc.sha1 @@ -0,0 +1 @@ +1ca25446f5eed4151dc9b43c2a9182433e8f83c0 \ No newline at end of file diff --git a/toolchain/win/rc/mac/rc.sha1 b/toolchain/win/rc/mac/rc.sha1 new file mode 100644 index 000000000000..cd9deb9d63cd --- /dev/null +++ b/toolchain/win/rc/mac/rc.sha1 @@ -0,0 +1 @@ +95e7af85589f1102667fc07efe488fd426c483e8 \ No newline at end of file diff --git a/toolchain/win/rc/rc.py b/toolchain/win/rc/rc.py new file mode 100755 index 000000000000..a650506a1e0a --- /dev/null +++ b/toolchain/win/rc/rc.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python3 +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""usage: rc.py [options] input.res +A resource compiler for .rc files. + +options: +-h, --help Print this message. +-I Add include path, used for both headers and resources. +-imsvc Add system include path, used for preprocessing only. +/winsysroot Set winsysroot, used for preprocessing only. +-D Define a macro for the preprocessor. +/fo Set path of output .res file. +/nologo Ignored (rc.py doesn't print a logo by default). +/showIncludes Print referenced header and resource files.""" + +from collections import namedtuple +import codecs +import os +import re +import subprocess +import sys +import tempfile + + +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) +SRC_DIR = \ + os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(THIS_DIR)))) + + +def ParseFlags(): + """Parses flags off sys.argv and returns the parsed flags.""" + # Can't use optparse / argparse because of /fo flag :-/ + includes = [] + imsvcs = [] + winsysroot = [] + defines = [] + output = None + input = None + show_includes = False + # Parse. + for flag in sys.argv[1:]: + if flag == '-h' or flag == '--help': + print(__doc__) + sys.exit(0) + if flag.startswith('-I'): + includes.append(flag) + elif flag.startswith('-imsvc'): + imsvcs.append(flag) + elif flag.startswith('/winsysroot'): + winsysroot = [flag] + elif flag.startswith('-D'): + defines.append(flag) + elif flag.startswith('/fo'): + if output: + print('rc.py: error: multiple /fo flags', '/fo' + output, flag, + file=sys.stderr) + sys.exit(1) + output = flag[3:] + elif flag == '/nologo': + pass + elif flag == '/showIncludes': + show_includes = True + elif (flag.startswith('-') or + (flag.startswith('/') and not os.path.exists(flag))): + print('rc.py: error: unknown flag', flag, file=sys.stderr) + print(__doc__, file=sys.stderr) + sys.exit(1) + else: + if input: + print('rc.py: error: multiple inputs:', input, flag, file=sys.stderr) + sys.exit(1) + input = flag + # Validate and set default values. + if not input: + print('rc.py: error: no input file', file=sys.stderr) + sys.exit(1) + if not output: + output = os.path.splitext(input)[0] + '.res' + Flags = namedtuple('Flags', [ + 'includes', 'defines', 'output', 'imsvcs', 'winsysroot', 'input', + 'show_includes' + ]) + return Flags(includes=includes, + defines=defines, + output=output, + imsvcs=imsvcs, + winsysroot=winsysroot, + input=input, + show_includes=show_includes) + + +def ReadInput(input): + """"Reads input and returns it. For UTF-16LEBOM input, converts to UTF-8.""" + # Microsoft's rc.exe only supports unicode in the form of UTF-16LE with a BOM. + # Our rc binary sniffs for UTF-16LE. If that's not found, if /utf-8 is + # passed, the input is treated as UTF-8. If /utf-8 is not passed and the + # input is not UTF-16LE, then our rc errors out on characters outside of + # 7-bit ASCII. Since the driver always converts UTF-16LE to UTF-8 here (for + # the preprocessor, which doesn't support UTF-16LE), our rc will either see + # UTF-8 with the /utf-8 flag (for UTF-16LE input), or ASCII input. + # This is compatible with Microsoft rc.exe. If we wanted, we could expose + # a /utf-8 flag for the driver for UTF-8 .rc inputs too. + # TODO(thakis): Microsoft's rc.exe supports BOM-less UTF-16LE. We currently + # don't, but for chrome it currently doesn't matter. + is_utf8 = False + try: + with open(input, 'rb') as rc_file: + rc_file_data = rc_file.read() + if rc_file_data.startswith(codecs.BOM_UTF16_LE): + rc_file_data = rc_file_data[2:].decode('utf-16le').encode('utf-8') + is_utf8 = True + except IOError: + print('rc.py: failed to open', input, file=sys.stderr) + sys.exit(1) + except UnicodeDecodeError: + print('rc.py: failed to decode UTF-16 despite BOM', input, file=sys.stderr) + sys.exit(1) + return rc_file_data, is_utf8 + + +def Preprocess(rc_file_data, flags): + """Runs the input file through the preprocessor.""" + clang = os.path.join(SRC_DIR, 'third_party', 'llvm-build', + 'Release+Asserts', 'bin', 'clang-cl') + # Let preprocessor write to a temp file so that it doesn't interfere + # with /showIncludes output on stdout. + if sys.platform == 'win32': + clang += '.exe' + temp_handle, temp_file = tempfile.mkstemp(suffix='.i') + # Closing temp_handle immediately defeats the purpose of mkstemp(), but I + # can't figure out how to let write to the temp file on Windows otherwise. + os.close(temp_handle) + clang_cmd = [clang, '/P', '/DRC_INVOKED', '/TC', '-', '/Fi' + temp_file] + if flags.imsvcs: + clang_cmd += ['/X'] + if os.path.dirname(flags.input): + # This must precede flags.includes. + clang_cmd.append('-I' + os.path.dirname(flags.input)) + if flags.show_includes: + clang_cmd.append('/showIncludes') + clang_cmd += flags.imsvcs + flags.winsysroot + flags.includes + flags.defines + p = subprocess.Popen(clang_cmd, stdin=subprocess.PIPE) + p.communicate(input=rc_file_data) + if p.returncode != 0: + sys.exit(p.returncode) + preprocessed_output = open(temp_file, 'rb').read() + os.remove(temp_file) + + # rc.exe has a wacko preprocessor: + # https://msdn.microsoft.com/en-us/library/windows/desktop/aa381033(v=vs.85).aspx + # """RC treats files with the .c and .h extensions in a special manner. It + # assumes that a file with one of these extensions does not contain + # resources. If a file has the .c or .h file name extension, RC ignores all + # lines in the file except the preprocessor directives.""" + # Thankfully, the Microsoft headers are mostly good about putting everything + # in the system headers behind `if !defined(RC_INVOKED)`, so regular + # preprocessing with RC_INVOKED defined works. + return preprocessed_output + + +def RunRc(preprocessed_output, is_utf8, flags): + if sys.platform.startswith('linux'): + rc = os.path.join(THIS_DIR, 'linux64', 'rc') + elif sys.platform == 'darwin': + rc = os.path.join(THIS_DIR, 'mac', 'rc') + elif sys.platform == 'win32': + rc = os.path.join(THIS_DIR, 'win', 'rc.exe') + else: + print('rc.py: error: unsupported platform', sys.platform, file=sys.stderr) + sys.exit(1) + rc_cmd = [rc] + # Make sure rc-relative resources can be found: + if os.path.dirname(flags.input): + rc_cmd.append('/cd' + os.path.dirname(flags.input)) + rc_cmd.append('/fo' + flags.output) + if is_utf8: + rc_cmd.append('/utf-8') + # TODO(thakis): cl currently always prints full paths for /showIncludes, + # but clang-cl /P doesn't. Which one is right? + if flags.show_includes: + rc_cmd.append('/showIncludes') + # Microsoft rc.exe searches for referenced files relative to -I flags in + # addition to the pwd, so -I flags need to be passed both to both + # the preprocessor and rc. + rc_cmd += flags.includes + p = subprocess.Popen(rc_cmd, stdin=subprocess.PIPE) + p.communicate(input=preprocessed_output) + + if flags.show_includes and p.returncode == 0: + TOOL_DIR = os.path.dirname(os.path.relpath(THIS_DIR)).replace("\\", "/") + # Since tool("rc") can't have deps, add deps on this script and on rc.py + # and its deps here, so that rc edges become dirty if rc.py changes. + print('Note: including file: {}/tool_wrapper.py'.format(TOOL_DIR)) + print('Note: including file: {}/rc/rc.py'.format(TOOL_DIR)) + print( + 'Note: including file: {}/rc/linux64/rc.sha1'.format(TOOL_DIR)) + print('Note: including file: {}/rc/mac/rc.sha1'.format(TOOL_DIR)) + print( + 'Note: including file: {}/rc/win/rc.exe.sha1'.format(TOOL_DIR)) + + return p.returncode + + +def CompareToMsRcOutput(preprocessed_output, is_utf8, flags): + msrc_in = flags.output + '.preprocessed.rc' + + # Strip preprocessor line markers. + preprocessed_output = re.sub(br'^#.*$', b'', preprocessed_output, flags=re.M) + if is_utf8: + preprocessed_output = preprocessed_output.decode('utf-8').encode('utf-16le') + with open(msrc_in, 'wb') as f: + f.write(preprocessed_output) + + msrc_out = flags.output + '_ms_rc' + msrc_cmd = ['rc', '/nologo', '/x', '/fo' + msrc_out] + + # Make sure rc-relative resources can be found. rc.exe looks for external + # resource files next to the file, but the preprocessed file isn't where the + # input was. + # Note that rc searches external resource files in the order of + # 1. next to the input file + # 2. relative to cwd + # 3. next to -I directories + # Changing the cwd means we'd have to rewrite all -I flags, so just add + # the input file dir as -I flag. That technically gets the order of 1 and 2 + # wrong, but in Chromium's build the cwd is the gn out dir, and generated + # files there are in obj/ and gen/, so this difference doesn't matter in + # practice. + if os.path.dirname(flags.input): + msrc_cmd += [ '-I' + os.path.dirname(flags.input) ] + + # Microsoft rc.exe searches for referenced files relative to -I flags in + # addition to the pwd, so -I flags need to be passed both to both + # the preprocessor and rc. + msrc_cmd += flags.includes + + # Input must come last. + msrc_cmd += [ msrc_in ] + + rc_exe_exit_code = subprocess.call(msrc_cmd) + # Assert Microsoft rc.exe and rc.py produced identical .res files. + if rc_exe_exit_code == 0: + import filecmp + assert filecmp.cmp(msrc_out, flags.output) + return rc_exe_exit_code + + +def main(): + # This driver has to do these things: + # 1. Parse flags. + # 2. Convert the input from UTF-16LE to UTF-8 if needed. + # 3. Pass the input through a preprocessor (and clean up the preprocessor's + # output in minor ways). + # 4. Call rc for the heavy lifting. + flags = ParseFlags() + rc_file_data, is_utf8 = ReadInput(flags.input) + preprocessed_output = Preprocess(rc_file_data, flags) + rc_exe_exit_code = RunRc(preprocessed_output, is_utf8, flags) + + # 5. On Windows, we also call Microsoft's rc.exe and check that we produced + # the same output. + # Since Microsoft's rc has a preprocessor that only accepts 32 characters + # for macro names, feed the clang-preprocessed source into it instead + # of using ms rc's preprocessor. + if sys.platform == 'win32' and rc_exe_exit_code == 0: + rc_exe_exit_code = CompareToMsRcOutput(preprocessed_output, is_utf8, flags) + + return rc_exe_exit_code + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/toolchain/win/rc/upload_rc_binaries.sh b/toolchain/win/rc/upload_rc_binaries.sh new file mode 100755 index 000000000000..790b36a6e213 --- /dev/null +++ b/toolchain/win/rc/upload_rc_binaries.sh @@ -0,0 +1,46 @@ +#!/bin/bash +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +set -eu + +# Builds new rc binaries at head and uploads them to google storage. +# The new .sha1 files will be in the tree after this has run. + +if [[ "$OSTYPE" != "darwin"* ]]; then + echo "this script must run on a mac" + exit 1 +fi + +DIR="$(cd "$(dirname "${0}" )" && pwd)" +SRC_DIR="$DIR/../../../.." + +# Make sure Linux and Windows sysroots are installed, for distrib.py. +$SRC_DIR/build/linux/sysroot_scripts/install-sysroot.py --arch amd64 +$SRC_DIR/build/vs_toolchain.py update --force + +# Make a temporary directory. +WORK_DIR=$(mktemp -d) +if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then + echo "could not create temp dir" + exit 1 +fi +function cleanup { + rm -rf "$WORK_DIR" +} +trap cleanup EXIT + +# Check out rc and build it in the temporary directory. Copy binaries over. +pushd "$WORK_DIR" > /dev/null +git clone -q https://github.com/nico/hack +cd hack/res +./distrib.py "$SRC_DIR" +popd > /dev/null +cp "$WORK_DIR/hack/res/rc-linux64" "$DIR/linux64/rc" +cp "$WORK_DIR/hack/res/rc-mac" "$DIR/mac/rc" +cp "$WORK_DIR/hack/res/rc-win.exe" "$DIR/win/rc.exe" + +# Upload binaries to cloud storage. +upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/linux64/rc" +upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/mac/rc" +upload_to_google_storage.py -b chromium-browser-clang/rc "$DIR/win/rc.exe" diff --git a/toolchain/win/rc/win/rc.exe.sha1 b/toolchain/win/rc/win/rc.exe.sha1 new file mode 100644 index 000000000000..30c641e383dd --- /dev/null +++ b/toolchain/win/rc/win/rc.exe.sha1 @@ -0,0 +1 @@ +7d3a485bb5bae0cf3c6b8af95d21f36aa7d02832 \ No newline at end of file diff --git a/toolchain/win/setup_toolchain.py b/toolchain/win/setup_toolchain.py new file mode 100644 index 000000000000..d2f5798ce653 --- /dev/null +++ b/toolchain/win/setup_toolchain.py @@ -0,0 +1,316 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Copies the given "win tool" (which the toolchain uses to wrap compiler +# invocations) and the environment blocks for the 32-bit and 64-bit builds on +# Windows to the build directory. +# +# The arguments are the visual studio install location and the location of the +# win tool. The script assumes that the root build directory is the current dir +# and the files will be written to the current directory. + + +import errno +import json +import os +import re +import subprocess +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) +import gn_helpers + +SCRIPT_DIR = os.path.dirname(__file__) +SDK_VERSION = '10.0.22621.0' + + +def _ExtractImportantEnvironment(output_of_set): + """Extracts environment variables required for the toolchain to run from + a textual dump output by the cmd.exe 'set' command.""" + envvars_to_save = ( + 'cipd_cache_dir', # needed by vpython + 'homedrive', # needed by vpython + 'homepath', # needed by vpython + 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma. + 'include', + 'lib', + 'libpath', + 'luci_context', # needed by vpython + 'path', + 'pathext', + 'systemroot', + 'temp', + 'tmp', + 'userprofile', # needed by vpython + 'vpython_virtualenv_root' # needed by vpython + ) + env = {} + # This occasionally happens and leads to misleading SYSTEMROOT error messages + # if not caught here. + if output_of_set.count('=') == 0: + raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set) + for line in output_of_set.splitlines(): + for envvar in envvars_to_save: + if re.match(envvar + '=', line.lower()): + var, setting = line.split('=', 1) + if envvar == 'path': + # Our own rules and actions in Chromium rely on python being in the + # path. Add the path to this python here so that if it's not in the + # path when ninja is run later, python will still be found. + setting = os.path.dirname(sys.executable) + os.pathsep + setting + if envvar in ['include', 'lib']: + # Make sure that the include and lib paths point to directories that + # exist. This ensures a (relatively) clear error message if the + # required SDK is not installed. + for part in setting.split(';'): + if not os.path.exists(part) and len(part) != 0: + raise Exception( + 'Path "%s" from environment variable "%s" does not exist. ' + 'Make sure the necessary SDK is installed.' % (part, envvar)) + env[var.upper()] = setting + break + if sys.platform in ('win32', 'cygwin'): + for required in ('SYSTEMROOT', 'TEMP', 'TMP'): + if required not in env: + raise Exception('Environment variable "%s" ' + 'required to be set to valid path' % required) + return env + + +def _DetectVisualStudioPath(): + """Return path to the installed Visual Studio. + """ + + # Use the code in build/vs_toolchain.py to avoid duplicating code. + chromium_dir = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', '..')) + sys.path.append(os.path.join(chromium_dir, 'build')) + import vs_toolchain + return vs_toolchain.DetectVisualStudioPath() + + +def _LoadEnvFromBat(args): + """Given a bat command, runs it and returns env vars set by it.""" + args = args[:] + args.extend(('&&', 'set')) + popen = subprocess.Popen( + args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + variables, _ = popen.communicate() + if popen.returncode != 0: + raise Exception('"%s" failed with error %d' % (args, popen.returncode)) + return variables.decode(errors='ignore') + + +def _LoadToolchainEnv(cpu, toolchain_root, sdk_dir, target_store): + """Returns a dictionary with environment variables that must be set while + running binaries from the toolchain (e.g. INCLUDE and PATH for cl.exe).""" + # Check if we are running in the SDK command line environment and use + # the setup script from the SDK if so. |cpu| should be either + # 'x86' or 'x64' or 'arm' or 'arm64'. + assert cpu in ('x86', 'x64', 'arm', 'arm64') + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir: + # Load environment from json file. + env = os.path.normpath(os.path.join(sdk_dir, 'bin/SetEnv.%s.json' % cpu)) + env = json.load(open(env))['env'] + if env['VSINSTALLDIR'] == [["..", "..\\"]]: + # Old-style paths were relative to the win_sdk\bin directory. + json_relative_dir = os.path.join(sdk_dir, 'bin') + else: + # New-style paths are relative to the toolchain directory. + json_relative_dir = toolchain_root + for k in env: + entries = [os.path.join(*([json_relative_dir] + e)) for e in env[k]] + # clang-cl wants INCLUDE to be ;-separated even on non-Windows, + # lld-link wants LIB to be ;-separated even on non-Windows. Path gets :. + # The separator for INCLUDE here must match the one used in main() below. + sep = os.pathsep if k == 'PATH' else ';' + env[k] = sep.join(entries) + # PATH is a bit of a special case, it's in addition to the current PATH. + env['PATH'] = env['PATH'] + os.pathsep + os.environ['PATH'] + # Augment with the current env to pick up TEMP and friends. + for k in os.environ: + if k not in env: + env[k] = os.environ[k] + + varlines = [] + for k in sorted(env.keys()): + varlines.append('%s=%s' % (str(k), str(env[k]))) + variables = '\n'.join(varlines) + + # Check that the json file contained the same environment as the .cmd file. + if sys.platform in ('win32', 'cygwin'): + script = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.cmd')) + arg = '/' + cpu + json_env = _ExtractImportantEnvironment(variables) + cmd_env = _ExtractImportantEnvironment(_LoadEnvFromBat([script, arg])) + assert _LowercaseDict(json_env) == _LowercaseDict(cmd_env) + else: + if 'GYP_MSVS_OVERRIDE_PATH' not in os.environ: + os.environ['GYP_MSVS_OVERRIDE_PATH'] = _DetectVisualStudioPath() + # We only support x64-hosted tools. + script_path = os.path.normpath(os.path.join( + os.environ['GYP_MSVS_OVERRIDE_PATH'], + 'VC/vcvarsall.bat')) + if not os.path.exists(script_path): + # vcvarsall.bat for VS 2017 fails if run after running vcvarsall.bat from + # VS 2013 or VS 2015. Fix this by clearing the vsinstalldir environment + # variable. Since vcvarsall.bat appends to the INCLUDE, LIB, and LIBPATH + # environment variables we need to clear those to avoid getting double + # entries when vcvarsall.bat has been run before gn gen. vcvarsall.bat + # also adds to PATH, but there is no clean way of clearing that and it + # doesn't seem to cause problems. + if 'VSINSTALLDIR' in os.environ: + del os.environ['VSINSTALLDIR'] + if 'INCLUDE' in os.environ: + del os.environ['INCLUDE'] + if 'LIB' in os.environ: + del os.environ['LIB'] + if 'LIBPATH' in os.environ: + del os.environ['LIBPATH'] + other_path = os.path.normpath(os.path.join( + os.environ['GYP_MSVS_OVERRIDE_PATH'], + 'VC/Auxiliary/Build/vcvarsall.bat')) + if not os.path.exists(other_path): + raise Exception('%s is missing - make sure VC++ tools are installed.' % + script_path) + script_path = other_path + cpu_arg = "amd64" + if (cpu != 'x64'): + # x64 is default target CPU thus any other CPU requires a target set + cpu_arg += '_' + cpu + args = [script_path, cpu_arg, ] + # Store target must come before any SDK version declaration + if (target_store): + args.append('store') + # Explicitly specifying the SDK version to build with to avoid accidentally + # building with a new and untested SDK. This should stay in sync with the + # packaged toolchain in build/vs_toolchain.py. + args.append(SDK_VERSION) + variables = _LoadEnvFromBat(args) + return _ExtractImportantEnvironment(variables) + + +def _FormatAsEnvironmentBlock(envvar_dict): + """Format as an 'environment block' directly suitable for CreateProcess. + Briefly this is a list of key=value\0, terminated by an additional \0. See + CreateProcess documentation for more details.""" + block = '' + nul = '\0' + for key, value in envvar_dict.items(): + block += key + '=' + value + nul + block += nul + return block + + +def _LowercaseDict(d): + """Returns a copy of `d` with both key and values lowercased. + + Args: + d: dict to lowercase (e.g. {'A': 'BcD'}). + + Returns: + A dict with both keys and values lowercased (e.g.: {'a': 'bcd'}). + """ + return {k.lower(): d[k].lower() for k in d} + + +def FindFileInEnvList(env, env_name, separator, file_name, optional=False): + parts = env[env_name].split(separator) + for path in parts: + if os.path.exists(os.path.join(path, file_name)): + return os.path.realpath(path) + assert optional, "%s is not found in %s:\n%s\nCheck if it is installed." % ( + file_name, env_name, '\n'.join(parts)) + return '' + + +def main(): + if len(sys.argv) != 7: + print('Usage setup_toolchain.py ' + ' ' + ' ' + '') + sys.exit(2) + # toolchain_root and win_sdk_path are only read if the hermetic Windows + # toolchain is set, that is if DEPOT_TOOLS_WIN_TOOLCHAIN is not set to 0. + # With the hermetic Windows toolchain, the visual studio path in argv[1] + # is the root of the Windows toolchain directory. + toolchain_root = sys.argv[1] + win_sdk_path = sys.argv[2] + + runtime_dirs = sys.argv[3] + target_os = sys.argv[4] + target_cpu = sys.argv[5] + environment_block_name = sys.argv[6] + if (environment_block_name == 'none'): + environment_block_name = '' + + if (target_os == 'winuwp'): + target_store = True + else: + target_store = False + + cpus = ('x86', 'x64', 'arm', 'arm64') + assert target_cpu in cpus + vc_bin_dir = '' + include = '' + lib = '' + + # TODO(scottmg|goma): Do we need an equivalent of + # ninja_use_custom_environment_files? + + def relflag(s): # Make s relative to builddir when cwd and sdk on same drive. + try: + return os.path.relpath(s).replace('\\', '/') + except ValueError: + return s + + def q(s): # Quote s if it contains spaces or other weird characters. + return s if re.match(r'^[a-zA-Z0-9._/\\:-]*$', s) else '"' + s + '"' + + for cpu in cpus: + if cpu == target_cpu: + # Extract environment variables for subprocesses. + env = _LoadToolchainEnv(cpu, toolchain_root, win_sdk_path, target_store) + env['PATH'] = runtime_dirs + os.pathsep + env['PATH'] + + vc_bin_dir = FindFileInEnvList(env, 'PATH', os.pathsep, 'cl.exe') + + # The separator for INCLUDE here must match the one used in + # _LoadToolchainEnv() above. + include = [p.replace('"', r'\"') for p in env['INCLUDE'].split(';') if p] + include = list(map(relflag, include)) + + lib = [p.replace('"', r'\"') for p in env['LIB'].split(';') if p] + lib = list(map(relflag, lib)) + + include_I = ' '.join([q('/I' + i) for i in include]) + include_imsvc = ' '.join([q('-imsvc' + i) for i in include]) + libpath_flags = ' '.join([q('-libpath:' + i) for i in lib]) + + if (environment_block_name != ''): + env_block = _FormatAsEnvironmentBlock(env) + with open(environment_block_name, 'w', encoding='utf8') as f: + f.write(env_block) + + print('vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir)) + assert include_I + print('include_flags_I = ' + gn_helpers.ToGNString(include_I)) + assert include_imsvc + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and win_sdk_path: + print('include_flags_imsvc = ' + + gn_helpers.ToGNString(q('/winsysroot' + relflag(toolchain_root)))) + else: + print('include_flags_imsvc = ' + gn_helpers.ToGNString(include_imsvc)) + print('paths = ' + gn_helpers.ToGNString(env['PATH'])) + assert libpath_flags + print('libpath_flags = ' + gn_helpers.ToGNString(libpath_flags)) + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and win_sdk_path: + print('libpath_lldlink_flags = ' + + gn_helpers.ToGNString(q('/winsysroot:' + relflag(toolchain_root)))) + else: + print('libpath_lldlink_flags = ' + gn_helpers.ToGNString(libpath_flags)) + + +if __name__ == '__main__': + main() diff --git a/toolchain/win/tool_wrapper.py b/toolchain/win/tool_wrapper.py new file mode 100644 index 000000000000..47bbfe2a09d9 --- /dev/null +++ b/toolchain/win/tool_wrapper.py @@ -0,0 +1,191 @@ +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions for Windows builds. + +This file is copied to the build directory as part of toolchain setup and +is used to set up calls to tools used by the build that need wrappers. +""" + + +import os +import re +import shutil +import subprocess +import stat +import sys + + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) + +# A regex matching an argument corresponding to the output filename passed to +# link.exe. +_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P.+)$', re.IGNORECASE) + +def main(args): + exit_code = WinTool().Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class WinTool(object): + """This class performs all the Windows tooling steps. The methods can either + be executed directly, or dispatched from an argument list.""" + + def _UseSeparateMspdbsrv(self, env, args): + """Allows to use a unique instance of mspdbsrv.exe per linker instead of a + shared one.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + if args[0] != 'link.exe': + return + + # Use the output filename passed to the linker to generate an endpoint name + # for mspdbsrv.exe. + endpoint_name = None + for arg in args: + m = _LINK_EXE_OUT_ARG.match(arg) + if m: + endpoint_name = re.sub(r'\W+', '', + '%s_%d' % (m.group('out'), os.getpid())) + break + + if endpoint_name is None: + return + + # Adds the appropriate environment variable. This will be read by link.exe + # to know which instance of mspdbsrv.exe it should connect to (if it's + # not set then the default endpoint is used). + env['_MSPDBSRV_ENDPOINT_'] = endpoint_name + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like recursive-mirror to RecursiveMirror.""" + return name_string.title().replace('-', '') + + def _GetEnv(self, arch): + """Gets the saved environment from a file for a given architecture.""" + # The environment is saved as an "environment block" (see CreateProcess + # and msvs_emulation for details). We convert to a dict here. + # Drop last 2 NULs, one for list terminator, one for trailing vs. separator. + pairs = open(arch).read()[:-2].split('\0') + kvs = [item.split('=', 1) for item in pairs] + return dict(kvs) + + def ExecDeleteFile(self, path): + """Simple file delete command.""" + if os.path.exists(path): + os.unlink(path) + + def ExecRecursiveMirror(self, source, dest): + """Emulation of rm -rf out && cp -af in out.""" + if os.path.exists(dest): + if os.path.isdir(dest): + def _on_error(fn, path, dummy_excinfo): + # The operation failed, possibly because the file is set to + # read-only. If that's why, make it writable and try the op again. + if not os.access(path, os.W_OK): + os.chmod(path, stat.S_IWRITE) + fn(path) + shutil.rmtree(dest, onerror=_on_error) + else: + if not os.access(dest, os.W_OK): + # Attempt to make the file writable before deleting it. + os.chmod(dest, stat.S_IWRITE) + os.unlink(dest) + + if os.path.isdir(source): + shutil.copytree(source, dest) + else: + shutil.copy2(source, dest) + # Try to diagnose crbug.com/741603 + if not os.path.exists(dest): + raise Exception("Copying of %s to %s failed" % (source, dest)) + + def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): + """Filter diagnostic output from link that looks like: + ' Creating library ui.dll.lib and object ui.dll.exp' + This happens when there are exports from the dll or exe. + """ + env = self._GetEnv(arch) + if use_separate_mspdbsrv == 'True': + self._UseSeparateMspdbsrv(env, args) + if sys.platform == 'win32': + args = list(args) # *args is a tuple by default, which is read-only. + args[0] = args[0].replace('/', '\\') + # https://docs.python.org/2/library/subprocess.html: + # "On Unix with shell=True [...] if args is a sequence, the first item + # specifies the command string, and any additional items will be treated as + # additional arguments to the shell itself. That is to say, Popen does the + # equivalent of: + # Popen(['/bin/sh', '-c', args[0], args[1], ...])" + # For that reason, since going through the shell doesn't seem necessary on + # non-Windows don't do that there. + pe_name = None + for arg in args: + m = _LINK_EXE_OUT_ARG.match(arg) + if m: + pe_name = m.group('out') + link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + # Read output one line at a time as it shows up to avoid OOM failures when + # GBs of output is produced. + for line in link.stdout: + line = line.decode('utf8') + if (not line.startswith(' Creating library ') + and not line.startswith('Generating code') + and not line.startswith('Finished generating code')): + print(line.rstrip()) + return link.wait() + + def ExecAsmWrapper(self, arch, *args): + """Filter logo banner from invocations of asm.exe.""" + env = self._GetEnv(arch) + if sys.platform == 'win32': + # Windows ARM64 uses clang-cl as assembler which has '/' as path + # separator, convert it to '\\' when running on Windows. + args = list(args) # *args is a tuple by default, which is read-only + args[0] = args[0].replace('/', '\\') + # See comment in ExecLinkWrapper() for why shell=False on non-win. + popen = subprocess.Popen(args, shell=sys.platform == 'win32', env=env, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + out, _ = popen.communicate() + for line in out.decode('utf8').splitlines(): + if not line.startswith(' Assembling: '): + print(line) + return popen.returncode + + def ExecRcWrapper(self, arch, *args): + """Converts .rc files to .res files.""" + env = self._GetEnv(arch) + args = list(args) + rcpy_args = args[:] + rcpy_args[0:1] = [sys.executable, os.path.join(BASE_DIR, 'rc', 'rc.py')] + rcpy_args.append('/showIncludes') + return subprocess.call(rcpy_args, env=env) + + def ExecActionWrapper(self, arch, rspfile, *dirname): + """Runs an action command line from a response file using the environment + for |arch|. If |dirname| is supplied, use that as the working directory.""" + env = self._GetEnv(arch) + # TODO(scottmg): This is a temporary hack to get some specific variables + # through to actions that are set after GN-time. http://crbug.com/333738. + for k, v in os.environ.items(): + if k not in env: + env[k] = v + args = open(rspfile).read() + dirname = dirname[0] if dirname else None + return subprocess.call(args, shell=True, env=env, cwd=dirname) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/toolchain/win/toolchain.gni b/toolchain/win/toolchain.gni new file mode 100644 index 000000000000..968a4a20e131 --- /dev/null +++ b/toolchain/win/toolchain.gni @@ -0,0 +1,691 @@ +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/compiler/compiler.gni") +import("//build/config/rust.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/win/visual_studio_version.gni") +import("//build/toolchain/cc_wrapper.gni") +import("//build/toolchain/goma.gni") +import("//build/toolchain/rbe.gni") +import("//build/toolchain/toolchain.gni") +import("//build/toolchain/win/win_toolchain_data.gni") + +assert(is_win, "Should only be running on Windows") + +# This tool will is used as a wrapper for various commands below. +_tool_wrapper_path = + rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir) + +if (host_os == "win") { + _exe = ".exe" +} else { + _exe = "" +} + +_clang_bin_path = rebase_path("$clang_base_path/bin", root_build_dir) + +# Makes a single MSVC toolchain. Callers should normally instead invoke +# "msvc_toolchain" which might make an additional toolchain available +# without sanitizers if required. +# +# Parameters: +# environment: File name of environment file. +# +# You would also define a toolchain_args variable with at least these set: +# current_cpu: current_cpu to pass as a build arg +# current_os: current_os to pass as a build arg +template("single_msvc_toolchain") { + toolchain(target_name) { + # When invoking this toolchain not as the default one, these args will be + # passed to the build. They are ignored when this is the default toolchain. + assert(defined(invoker.toolchain_args)) + toolchain_args = { + forward_variables_from(invoker.toolchain_args, "*") + + # This value needs to be passed through unchanged. + host_toolchain = host_toolchain + + # This value needs to be passed through unchanged. + host_toolchain_no_sanitizers = host_toolchain_no_sanitizers + } + + if (defined(toolchain_args.is_clang)) { + toolchain_is_clang = toolchain_args.is_clang + } else { + toolchain_is_clang = is_clang + } + + # When the invoker has explicitly overridden use_goma or cc_wrapper in the + # toolchain args, use those values, otherwise default to the global one. + # This works because the only reasonable override that toolchains might + # supply for these values are to force-disable them. + if (defined(toolchain_args.use_remoteexec)) { + toolchain_uses_remoteexec = toolchain_args.use_remoteexec + } else { + toolchain_uses_remoteexec = use_remoteexec + } + if (defined(toolchain_args.use_goma)) { + toolchain_uses_goma = toolchain_args.use_goma + } else { + toolchain_uses_goma = use_goma + } + if (defined(toolchain_args.cc_wrapper)) { + toolchain_cc_wrapper = toolchain_args.cc_wrapper + } else { + toolchain_cc_wrapper = cc_wrapper + } + assert(!(toolchain_uses_remoteexec && toolchain_uses_goma), + "Goma and re-client can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_remoteexec), + "re-client and cc_wrapper can't be used together.") + assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma), + "Goma and cc_wrapper can't be used together.") + + if (toolchain_uses_remoteexec) { + if (toolchain_is_clang) { + cl_prefix = "${rbe_bin_dir}/rewrapper -cfg=${rbe_cc_cfg_file} -exec_root=${rbe_exec_root} " + } else { + cl_prefix = "" + } + } else if (toolchain_uses_goma) { + cl_prefix = "${goma_dir}/gomacc${_exe} " + } else if (toolchain_cc_wrapper != "" && toolchain_is_clang) { + cl_prefix = toolchain_cc_wrapper + " " + } else { + cl_prefix = "" + } + + cl = "${cl_prefix}${invoker.cl}" + if (host_os == "win") { + # Flip the slashes so that copy/paste of the command works. + cl = string_replace(cl, "/", "\\") + } + + # Make these apply to all tools below. + lib_switch = "" + lib_dir_switch = "/LIBPATH:" + + # Object files go in this directory. + object_subdir = "{{target_out_dir}}/{{label_name}}" + + env = invoker.environment + + if (use_lld) { + # lld-link includes a replacement for lib.exe that can produce thin + # archives and understands bitcode (for lto builds). + link = "${_clang_bin_path}/lld-link${_exe}" + if (host_os == "win") { + # Flip the slashes so that copy/paste of the commands works. + link = string_replace(link, "/", "\\") + } + lib = "$link /lib" + if (host_os != "win") { + # See comment adding --rsp-quoting to $cl above for more information. + link = "$link --rsp-quoting=posix" + } + } else { + lib = "lib.exe" + link = "link.exe" + } + + # If possible, pass system includes as flags to the compiler. When that's + # not possible, load a full environment file (containing %INCLUDE% and + # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just + # passing in a list of include directories isn't enough. + if (defined(invoker.sys_include_flags)) { + env_wrapper = "" + sys_include_flags = + "${invoker.sys_include_flags} " # Note trailing space. + } else { + # clang-cl doesn't need this env hoop, so omit it there. + assert(!toolchain_is_clang) + env_wrapper = "ninja -t msvc -e $env -- " # Note trailing space. + sys_include_flags = "" + } + + if (host_os != "win" || (use_lld && defined(invoker.sys_lib_flags))) { + linker_wrapper = "" + sys_lib_flags = "${invoker.sys_lib_flags} " # Note trailing space. + } else { + # link.exe must be run under a wrapper to set up the environment + # (it needs %LIB% set to find libraries), and to work around its bugs. + # Note trailing space: + linker_wrapper = + "\"$python_path\" $_tool_wrapper_path link-wrapper $env False " + sys_lib_flags = "" + } + + if (defined(toolchain_args.use_clang_coverage)) { + toolchain_use_clang_coverage = toolchain_args.use_clang_coverage + } else { + toolchain_use_clang_coverage = use_clang_coverage + } + + if (toolchain_use_clang_coverage) { + assert(toolchain_is_clang, + "use_clang_coverage should only be used with Clang") + if (defined(toolchain_args.coverage_instrumentation_input_file)) { + toolchain_coverage_instrumentation_input_file = + toolchain_args.coverage_instrumentation_input_file + } else { + toolchain_coverage_instrumentation_input_file = + coverage_instrumentation_input_file + } + + coverage_wrapper = + rebase_path("//build/toolchain/clang_code_coverage_wrapper.py", + root_build_dir) + coverage_wrapper = coverage_wrapper + " --target-os=" + target_os + if (toolchain_coverage_instrumentation_input_file != "") { + coverage_wrapper = + coverage_wrapper + " --files-to-instrument=" + + rebase_path(toolchain_coverage_instrumentation_input_file, + root_build_dir) + } + coverage_wrapper = "\"$python_path\" " + coverage_wrapper + " " + } else { + coverage_wrapper = "" + } + + # Disabled with cc_wrapper because of + # https://github.com/mozilla/sccache/issues/1013 + if (toolchain_is_clang && toolchain_cc_wrapper == "") { + # This flag omits system includes from /showIncludes output, to reduce + # the amount of data to parse and store in .ninja_deps. We do this on + # non-Windows too, and already make sure rebuilds after winsdk/libc++/ + # clang header updates happen via changing command line flags. + show_includes = "/showIncludes:user" + } else { + show_includes = "/showIncludes" + } + + tool("cc") { + precompiled_header_type = "msvc" + pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb" + + # Label names may have spaces in them so the pdbname must be quoted. The + # source and output don't need to be quoted because GN knows they're a + # full file name and will quote automatically when necessary. + depsformat = "msvc" + description = "CC {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.obj" ] + + # Note that the code coverage wrapper scripts assumes that {{source}} + # comes immediately after /c. + command = "$coverage_wrapper$env_wrapper$cl /c {{source}} /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} /Fo{{output}} /Fd\"$pdbname\"" + } + + tool("cxx") { + precompiled_header_type = "msvc" + + # The PDB name needs to be different between C and C++ compiled files. + pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb" + + # See comment in CC tool about quoting. + depsformat = "msvc" + description = "CXX {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.obj" ] + + # Note that the code coverage wrapper scripts assumes that {{source}} + # comes immediately after /c. + command = "$coverage_wrapper$env_wrapper$cl /c {{source}} /Fo{{output}} /nologo $show_includes $sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} /Fd\"$pdbname\"" + } + + tool("rc") { + command = "\"$python_path\" $_tool_wrapper_path rc-wrapper $env rc.exe /nologo $sys_include_flags{{defines}} {{include_dirs}} /fo{{output}} {{source}}" + depsformat = "msvc" + outputs = [ "$object_subdir/{{source_name_part}}.res" ] + description = "RC {{output}}" + } + + tool("asm") { + is_msvc_assembler = true + + if (toolchain_args.current_cpu == "arm64") { + if (toolchain_is_clang) { + ml = "${cl_prefix}${_clang_bin_path}/clang-cl${_exe} --target=arm64-windows" + if (host_os == "win") { + # Flip the slashes so that copy/paste of the command works. + ml = string_replace(ml, "/", "\\") + } + ml += " -c -o{{output}}" + is_msvc_assembler = false + } else { + # Only affects Arm builds with is_clang = false, implemented for + # building V8 for Windows on Arm systems with the MSVC toolchain. + ml = "armasm64.exe" + } + } else { + if (toolchain_is_clang && !disable_llvm_ml) { + prefix = rebase_path("$clang_base_path/bin", root_build_dir) + ml = "$prefix/llvm-ml${_exe}" + if (toolchain_args.current_cpu == "x64") { + ml += " -m64" + } else { + ml += " -m32" + } + } else { + if (toolchain_args.current_cpu == "x64") { + ml = "ml64.exe" + } else { + ml = "ml.exe" + } + } + } + + if (is_msvc_assembler) { + ml += " /nologo /Fo{{output}}" + + # Suppress final-stage linking on x64/x86 builds. (Armasm64 does not + # require /c because it doesn't support linking.) + if (toolchain_args.current_cpu != "arm64") { + ml += " /c" + } + if (use_lld && (!toolchain_is_clang || disable_llvm_ml)) { + # Wrap ml(64).exe with a script that makes its output deterministic. + # It's lld only because the script zaps obj Timestamp which + # link.exe /incremental looks at. + ml_py = rebase_path("//build/toolchain/win/ml.py", root_build_dir) + ml = "\"$python_path\" $ml_py $ml" + } + } + if (toolchain_args.current_cpu != "arm64" || toolchain_is_clang) { + # TODO(thakis): Stop using asm-wrapper when using clang. + command = "\"$python_path\" $_tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} {{source}}" + } else { + # armasm64.exe does not support definitions passed via the command + # line. (Fortunately, they're not needed for compiling the V8 + # snapshot, which is the only time this assembler is required.) + command = "\"$python_path\" $_tool_wrapper_path asm-wrapper $env $ml {{include_dirs}} {{asmflags}} {{source}}" + } + + description = "ASM {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.obj" ] + } + + if (toolchain_has_rust) { + rustc_wrapper = rebase_path("//build/rust/rustc_wrapper.py") + rustc = rebase_path("${rust_sysroot}/bin/rustc", root_build_dir) + rust_sysroot_relative_to_out = rebase_path(rust_sysroot, root_out_dir) + rustc_windows_args = " -Clinker=$link $rustc_common_args" + + tool("rust_staticlib") { + libname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + rspfile = "$libname.rsp" + depfile = "$libname.d" + + default_output_extension = ".lib" + output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST(STATICLIB) {{output}}" + outputs = [ libname ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $libname LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_rlib") { + # We must always prefix with `lib` even if the library already starts + # with that prefix or else our stdlib is unable to find libc.rlib (or + # actually liblibc.rlib). + rlibname = + "{{output_dir}}/lib{{target_output_name}}{{output_extension}}" + depfile = "$rlibname.d" + + # Do not use rsp files in this (common) case because they occupy the + # ninja main thread, and {{rlibs}} have shorter command lines than + # fully linked targets. + + default_output_extension = ".rlib" + + # This is prefixed unconditionally in `rlibname`. + # output_prefix = "lib" + default_output_dir = "{{root_out_dir}}" + description = "RUST {{output}}" + outputs = [ rlibname ] + + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $rlibname {{rustdeps}} {{externs}} LDFLAGS RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_bin") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "$exename.pdb" + rspfile = "$exename.rsp" + depfile = "$exename.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = ".exe" + default_output_dir = "{{root_out_dir}}" + description = "RUST(BIN) {{output}}" + outputs = [ + # The first entry here is used for dependency tracking. + exename, + pdbname, + ] + runtime_outputs = outputs + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $exename LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + } + + tool("rust_cdylib") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + libname = "$dllname.lib" # e.g. foo.dll.lib + pdbname = "$dllname.pdb" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "RUST(CDYLIB) {{output}}" + outputs = [ + # The first entry here is used for dependency tracking. Dylibs are + # linked into other targets and that linking must be done through + # the .lib file, not the .dll file. So the .lib file is the primary + # output here. + libname, + dllname, + pdbname, + ] + runtime_outputs = [ + dllname, + pdbname, + ] + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname /IMPLIB:$libname RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + + # Since the above commands only updates the .lib file when it changes, + # ask Ninja to check if the timestamp actually changed to know if + # downstream dependencies should be recompiled. + restat = true + } + + tool("rust_macro") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "$dllname.pdb" + rspfile = "$dllname.rsp" + depfile = "$dllname.d" + pool = "//build/toolchain:link_pool($default_toolchain)" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "RUST(MACRO) {{output}}" + outputs = [ + # The first entry here is used for dependency tracking. Proc macros + # are consumed as dlls directly, loaded a runtime, so the dll is the + # primary output here. If we make a .lib file the primary output, we + # end up trying to load the .lib file as a procmacro which fails. + # + # Since depending on a macro target for linking would fail (it would + # try to link primary .dll target) we omit the .lib here entirely. + dllname, + pdbname, + ] + runtime_outputs = outputs + + rspfile_content = "{{rustdeps}} {{externs}}" + command = "\"$python_path\" \"$rustc_wrapper\" --rustc=$rustc --depfile=$depfile --rsp=$rspfile -- $rustc_windows_args --emit=dep-info=$depfile,link -o $dllname LDFLAGS {{ldflags}} $sys_lib_flags /PDB:$pdbname RUSTENV {{rustenv}}" + rust_sysroot = rust_sysroot_relative_to_out + + # Since the above commands only updates the .lib file when it changes, + # ask Ninja to check if the timestamp actually changed to know if + # downstream dependencies should be recompiled. + restat = true + } + } + + tool("alink") { + rspfile = "{{output}}.rsp" + command = + "$linker_wrapper$lib /OUT:{{output}} /nologo {{arflags}} @$rspfile" + description = "LIB {{output}}" + outputs = [ + # Ignore {{output_extension}} and always use .lib, there's no reason to + # allow targets to override this extension on Windows. + "{{output_dir}}/{{target_output_name}}.lib", + ] + default_output_extension = ".lib" + default_output_dir = "{{target_out_dir}}" + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = "{{inputs_newline}}" + } + + tool("solink") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + libname = "${dllname}.lib" # e.g. foo.dll.lib + pdbname = "${dllname}.pdb" + rspfile = "${dllname}.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/IMPLIB:$libname /DLL /PDB:$pdbname @$rspfile" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "LINK(DLL) {{output}}" + outputs = [ + dllname, + libname, + pdbname, + ] + link_output = libname + depend_output = libname + runtime_outputs = [ + dllname, + pdbname, + ] + + # Since the above commands only updates the .lib file when it changes, + # ask Ninja to check if the timestamp actually changed to know if + # downstream dependencies should be recompiled. + restat = true + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = + "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}" + } + + tool("solink_module") { + # E.g. "foo.dll": + dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "${dllname}.pdb" + rspfile = "${dllname}.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$linker_wrapper$link /OUT:$dllname /nologo ${sys_lib_flags}/DLL /PDB:$pdbname @$rspfile" + + default_output_extension = ".dll" + default_output_dir = "{{root_out_dir}}" + description = "LINK_MODULE(DLL) {{output}}" + outputs = [ + dllname, + pdbname, + ] + runtime_outputs = outputs + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = + "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}} {{rlibs}}" + } + + tool("link") { + exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}" + pdbname = "$exename.pdb" + rspfile = "$exename.rsp" + pool = "//build/toolchain:link_pool($default_toolchain)" + + command = "$linker_wrapper$link /OUT:$exename /nologo ${sys_lib_flags} /PDB:$pdbname @$rspfile" + + default_output_extension = ".exe" + default_output_dir = "{{root_out_dir}}" + description = "LINK {{output}}" + outputs = [ + exename, + pdbname, + ] + runtime_outputs = outputs + + # The use of inputs_newline is to work around a fixed per-line buffer + # size in the linker. + rspfile_content = + "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}} {{rlibs}}" + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + pool = "//build/toolchain:action_pool($default_toolchain)" + } + tool("copy") { + command = copy_command + description = copy_description + pool = "//build/toolchain:action_pool($default_toolchain)" + } + + tool("action") { + pool = "//build/toolchain:action_pool($default_toolchain)" + } + } +} + +# Makes a single MSVC toolchain, or possibly two if we +# need an additional toolchain without sanitizers enabled. +template("msvc_toolchain") { + single_msvc_toolchain(target_name) { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "visibility", + "test_only", + ]) + + # No need to forward visibility and test_only as they apply to targets not + # toolchains, but presubmit checks require that we explicitly exclude them + } + + if (using_sanitizer) { + # Make an additional toolchain with no sanitizers. + single_msvc_toolchain("${target_name}_no_sanitizers") { + assert(defined(invoker.toolchain_args), + "Toolchains must declare toolchain_args") + forward_variables_from(invoker, + "*", + [ + "toolchain_args", + "visibility", + "test_only", + ]) + toolchain_args = { + # Populate toolchain args from the invoker. + forward_variables_from(invoker.toolchain_args, "*") + toolchain_disables_sanitizers = true + } + } + } +} + +template("win_toolchains") { + assert(defined(invoker.toolchain_arch)) + toolchain_arch = invoker.toolchain_arch + + if (toolchain_arch == "x86") { + win_toolchain_data = win_toolchain_data_x86 + } else if (toolchain_arch == "x64") { + win_toolchain_data = win_toolchain_data_x64 + } else if (toolchain_arch == "arm64") { + win_toolchain_data = win_toolchain_data_arm64 + } else { + error("Unsupported toolchain_arch, add it to win_toolchain_data.gni") + } + + # The toolchain using MSVC only makes sense when not doing cross builds. + # Chromium exclusively uses the win_clang_ toolchain below, but V8 and + # WebRTC still use this MSVC toolchain in some cases. + if (host_os == "win") { + if (defined(invoker.cl_toolchain_prefix)) { + cl_toolchain_prefix = invoker.cl_toolchain_prefix + } else { + cl_toolchain_prefix = "" + } + msvc_toolchain(cl_toolchain_prefix + target_name) { + environment = "environment." + toolchain_arch + cl = "\"${win_toolchain_data.vc_bin_dir}/cl.exe\"" + + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + is_clang = false + use_clang_coverage = false + current_os = "win" + current_cpu = toolchain_arch + } + } + } + + if (defined(invoker.clang_toolchain_prefix)) { + clang_toolchain_prefix = invoker.clang_toolchain_prefix + } else { + clang_toolchain_prefix = "win_clang_" + } + msvc_toolchain(clang_toolchain_prefix + target_name) { + environment = "environment." + toolchain_arch + cl = "${_clang_bin_path}/clang-cl${_exe}" + _clang_lib_dir = + rebase_path("$clang_base_path/lib/clang/$clang_version/lib/windows", + root_build_dir) + if (host_os == "win") { + # And to match the other -libpath flags. + _clang_lib_dir = string_replace(_clang_lib_dir, "/", "\\") + } + + sys_include_flags = "${win_toolchain_data.include_flags_imsvc}" + if (use_lld) { + sys_lib_flags = + "-libpath:$_clang_lib_dir ${win_toolchain_data.libpath_lldlink_flags}" + + # TODO(thakis): Remove once crbug.com/1300005 is fixed + assert(toolchain_arch == "x64" || toolchain_arch == "x86" || + toolchain_arch == "arm" || toolchain_arch == "arm64", + "Only supports x64, x86, arm and arm64 CPUs") + if (toolchain_arch == "x64") { + sys_lib_flags += " /MACHINE:X64" + } else if (toolchain_arch == "x86") { + sys_lib_flags += " /MACHINE:X86" + } else if (toolchain_arch == "arm") { + sys_lib_flags += " /MACHINE:ARM" + } else if (toolchain_arch == "arm64") { + sys_lib_flags += " /MACHINE:ARM64" + } + } + + toolchain_args = { + if (defined(invoker.toolchain_args)) { + forward_variables_from(invoker.toolchain_args, "*") + } + is_clang = true + current_os = "win" + current_cpu = toolchain_arch + } + } +} diff --git a/toolchain/win/win_toolchain_data.gni b/toolchain/win/win_toolchain_data.gni new file mode 100644 index 000000000000..505d0ce5049f --- /dev/null +++ b/toolchain/win/win_toolchain_data.gni @@ -0,0 +1,43 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/win/visual_studio_version.gni") + +declare_args() { + win_toolchain_data_x86 = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "win", + "x86", + "environment.x86", + ], + "scope") + + win_toolchain_data_x64 = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "win", + "x64", + "environment.x64", + ], + "scope") + + win_toolchain_data_arm64 = + exec_script("//build/toolchain/win/setup_toolchain.py", + [ + visual_studio_path, + windows_sdk_path, + visual_studio_runtime_dirs, + "win", + "arm64", + "environment.arm64", + ], + "scope") +} diff --git a/toolchain/wrapper_utils.py b/toolchain/wrapper_utils.py new file mode 100644 index 000000000000..f01e159fd69c --- /dev/null +++ b/toolchain/wrapper_utils.py @@ -0,0 +1,99 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Helper functions for gcc_toolchain.gni wrappers.""" + +import gzip +import os +import re +import subprocess +import shlex +import shutil +import sys +import threading + +import whole_archive + +_BAT_PREFIX = 'cmd /c call ' + + +def _GzipThenDelete(src_path, dest_path): + # Results for Android map file with GCC on a z620: + # Uncompressed: 207MB + # gzip -9: 16.4MB, takes 8.7 seconds. + # gzip -1: 21.8MB, takes 2.0 seconds. + # Piping directly from the linker via -print-map (or via -Map with a fifo) + # adds a whopping 30-45 seconds! + with open(src_path, 'rb') as f_in, gzip.GzipFile(dest_path, 'wb', 1) as f_out: + shutil.copyfileobj(f_in, f_out) + os.unlink(src_path) + + +def CommandToRun(command): + """Generates commands compatible with Windows. + + When running on a Windows host and using a toolchain whose tools are + actually wrapper scripts (i.e. .bat files on Windows) rather than binary + executables, the |command| to run has to be prefixed with this magic. + The GN toolchain definitions take care of that for when GN/Ninja is + running the tool directly. When that command is passed in to this + script, it appears as a unitary string but needs to be split up so that + just 'cmd' is the actual command given to Python's subprocess module. + + Args: + command: List containing the UNIX style |command|. + + Returns: + A list containing the Windows version of the |command|. + """ + if command[0].startswith(_BAT_PREFIX): + command = command[0].split(None, 3) + command[1:] + return command + + +def RunLinkWithOptionalMapFile(command, env=None, map_file=None): + """Runs the given command, adding in -Wl,-Map when |map_file| is given. + + Also takes care of gzipping when |map_file| ends with .gz. + + Args: + command: List of arguments comprising the command. + env: Environment variables. + map_file: Path to output map_file. + + Returns: + The exit code of running |command|. + """ + tmp_map_path = None + if map_file and map_file.endswith('.gz'): + tmp_map_path = map_file + '.tmp' + command.append('-Wl,-Map,' + tmp_map_path) + elif map_file: + command.append('-Wl,-Map,' + map_file) + + # We want to link rlibs as --whole-archive if they are part of a unit test + # target. This is determined by switch `-LinkWrapper,add-whole-archive`. + command = whole_archive.wrap_with_whole_archive(command) + + result = subprocess.call(command, env=env) + + if tmp_map_path and result == 0: + threading.Thread( + target=lambda: _GzipThenDelete(tmp_map_path, map_file)).start() + elif tmp_map_path and os.path.exists(tmp_map_path): + os.unlink(tmp_map_path) + + return result + + +def CaptureCommandStderr(command, env=None): + """Returns the stderr of a command. + + Args: + command: A list containing the command and arguments. + env: Environment variables for the new process. + """ + child = subprocess.Popen(command, stderr=subprocess.PIPE, env=env) + _, stderr = child.communicate() + return child.returncode, stderr diff --git a/toolchain/zos/BUILD.gn b/toolchain/zos/BUILD.gn new file mode 100644 index 000000000000..3af5f8033d82 --- /dev/null +++ b/toolchain/zos/BUILD.gn @@ -0,0 +1,174 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is based on gcc_toolchain.gni and customized for z/OS. + +import("//build/toolchain/gcc_toolchain.gni") + +toolchain("s390x") { + cc = "xlclang" + cxx = "xlclang++" + asm = "xlclang" + ar = "ar" + ld = cxx + + toolchain_args = { + current_cpu = "s390x" + current_os = "zos" + } + + rebuild_string = "" + default_shlib_extension = ".so" + default_shlib_subdir = "" + extra_cflags = "" + extra_cppflags = "" + extra_cxxflags = "" + extra_asmflags = "" + extra_ldflags = "" + + # These library switches can apply to all tools below. + lib_switch = "-l" + lib_dir_switch = "-L" + + # Object files go in this directory. + object_subdir = "{{target_out_dir}}/{{label_name}}" + + tool("cc") { + depfile = "{{output}}.d" + command = "$cc -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CC {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("cxx") { + depfile = "{{output}}.d" + command = "$cxx -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "CXX {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("asm") { + # Just use the C compiler to compile assembly. + depfile = "{{output}}.d" + command = "$asm -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}" + depsformat = "gcc" + description = "ASM {{output}}" + outputs = [ "$object_subdir/{{source_name_part}}.o" ] + } + + tool("alink") { + command = "$ar {{arflags}} -r -c -s {{output}} {{inputs}}" + + # Remove the output file first so that ar doesn't try to modify the + # existing file. + command = "rm -f {{output}} && $command" + + # Almost all targets build with //build/config/compiler:thin_archive which + # adds -T to arflags. + description = "AR {{output}}" + outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ] + + # Shared libraries go in the target out directory by default so we can + # generate different targets with the same name and not have them collide. + default_output_dir = "{{target_out_dir}}" + default_output_extension = ".a" + output_prefix = "lib" + } + + tool("solink") { + soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". + sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir. + xfile = "{{output_dir}}/{{target_output_name}}.x" + rspfile = sofile + ".rsp" + + # These variables are not built into GN but are helpers that + # implement (1) linking to produce a .so, (2) extracting the symbols + # from that file (3) if the extracted list differs from the existing + # .TOC file, overwrite it, otherwise, don't change it. + link_command = "$ld -Wl,DLL {{ldflags}}${extra_ldflags} -o \"$sofile\" `cat $rspfile`" + + solink_wrapper = + rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir) + command = "$python_path \"$solink_wrapper\" --output=\"$sofile\" -- $link_command" + + rspfile_content = "{{inputs}} {{solibs}} {{libs}}" + + description = "SOLINK $sofile" + + # Use this for {{output_extension}} expansions unless a target manually + # overrides it (in which case {{output_extension}} will be what the target + # specifies). + default_output_extension = default_shlib_extension + + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + + output_prefix = "lib" + + # Since the above commands only updates the .TOC file when it changes, ask + # Ninja to check if the timestamp actually changed to know if downstream + # dependencies should be recompiled. + restat = true + + # Tell GN about the output files. It will link to the sofile but use the + # tocfile for dependency management. + outputs = [ xfile ] + outputs += [ sofile ] + + link_output = xfile + depend_output = xfile + } + + tool("solink_module") { + soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so". + sofile = "{{output_dir}}/$soname" + xfile = "{{output_dir}}/{{target_output_name}}.x" + + rspfile = sofile + ".rsp" + + command = "$ld {{ldflags}}${extra_ldflags} -o \"$sofile\" `cat $rspfile`" + + rspfile_content = "{{inputs}} {{solibs}} {{libs}}" + + description = "SOLINK_MODULE $sofile" + + default_output_dir = "{{root_out_dir}}${default_shlib_subdir}" + + output_prefix = "lib" + outputs = [ xfile ] + outputs += [ sofile ] + } + + tool("link") { + exename = "{{target_output_name}}{{output_extension}}" + outfile = "{{output_dir}}/$exename" + rspfile = "$outfile.rsp" + + default_output_dir = "{{root_out_dir}}" + + link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$outfile\" `cat $rspfile` {{solibs}} {{libs}}" + + link_wrapper = + rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir) + + command = "$python_path \"$link_wrapper\" --output=\"$outfile\" -- $link_command" + + description = "LINK $outfile" + rspfile_content = "{{inputs}}" + outputs = [ outfile ] + } + + # These two are really entirely generic, but have to be repeated in + # each toolchain because GN doesn't allow a template to be used here. + # See //build/toolchain/toolchain.gni for details. + tool("stamp") { + command = stamp_command + description = stamp_description + } + tool("copy") { + command = copy_command + description = copy_description + } +} diff --git a/tree_truth.sh b/tree_truth.sh new file mode 100755 index 000000000000..00150f0740ff --- /dev/null +++ b/tree_truth.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# Script for printing recent commits in a buildbot run. + +# Return the sha1 of the given tag. If not present, return "". +# $1: path to repo +# $2: tag name +tt_sha1_for_tag() { + oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null) + if [ $? -eq 0 ] ; then + echo $oneline + fi +} + +# Return the sha1 of HEAD, or "" +# $1: path to repo +tt_sha1_for_head() { + ( cd $1 && git log HEAD -n1 --format='%H' | cat ) +} + +# For the given repo, set tag to HEAD. +# $1: path to repo +# $2: tag name +tt_tag_head() { + ( cd $1 && git tag -f $2 ) +} + +# For the given repo, delete the tag. +# $1: path to repo +# $2: tag name +tt_delete_tag() { + ( cd $1 && git tag -d $2 ) +} + +# For the given repo, set tag to "three commits ago" (for testing). +# $1: path to repo +# $2: tag name +tt_tag_three_ago() { + local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}') + ( cd $1 && git tag -f $2 $sh ) +} + +# List the commits between the given tag and HEAD. +# If the tag does not exist, only list the last few. +# If the tag is at HEAD, list nothing. +# Output format has distinct build steps for repos with changes. +# $1: path to repo +# $2: tag name +# $3: simple/short repo name to use for display +tt_list_commits() { + local tag_sha1=$(tt_sha1_for_tag $1 $2) + local head_sha1=$(tt_sha1_for_head $1) + local display_name=$(echo $3 | sed 's#/#_#g') + if [ "${tag_sha1}" = "${head_sha1}" ] ; then + return + fi + if [ "${tag_sha1}" = "" ] ; then + echo "@@@BUILD_STEP Recent commits in repo $display_name@@@" + echo "NOTE: git tag was not found so we have no baseline." + echo "Here are some recent commits, but they may not be new for this build." + ( cd $1 && git log -n 10 --stat | cat) + else + echo "@@@BUILD_STEP New commits in repo $display_name@@@" + ( cd $1 && git log -n 500 $2..HEAD --stat | cat) + fi +} + +# Clean out the tree truth tags in all repos. For testing. +tt_clean_all() { + for project in $@; do + tt_delete_tag $CHROME_SRC/../$project tree_truth + done +} + +# Print tree truth for all clank repos. +tt_print_all() { + for project in $@; do + local full_path=$CHROME_SRC/../$project + tt_list_commits $full_path tree_truth $project + tt_tag_head $full_path tree_truth + done +} + +# Print a summary of the last 10 commits for each repo. +tt_brief_summary() { + echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@" + for project in $@; do + echo $project: + local full_path=$CHROME_SRC/../$project + (cd $full_path && git log -n 10 --format=" %H %s %an, %ad" | cat) + echo "=================================================================" + done +} + +CHROME_SRC=$1 +shift +PROJECT_LIST=$@ +tt_brief_summary $PROJECT_LIST +tt_print_all $PROJECT_LIST diff --git a/update-linux-sandbox.sh b/update-linux-sandbox.sh new file mode 100755 index 000000000000..1d2442483c5d --- /dev/null +++ b/update-linux-sandbox.sh @@ -0,0 +1,82 @@ +#!/bin/sh + +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +BUILDTYPE="${BUILDTYPE:-Debug}" +CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}" +CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}" +CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox" +CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox" +CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH") + +TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null) +if [ $? -ne 0 ]; then + echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}" + exit 1 +fi + +# Make sure the path is not on NFS. +if [ "${TARGET_DIR_TYPE}" = "6969" ]; then + echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!" + exit 1 +fi + +installsandbox() { + echo "(using sudo so you may be asked for your password)" + sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \ + "${CHROME_SANDBOX_INST_PATH}" && + sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" && + sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}" + return $? +} + +if [ ! -d "${CHROME_OUT_DIR}" ]; then + echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" " + echo "If you are building in Release mode" + exit 1 +fi + +if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then + echo "Could not find ${CHROME_SANDBOX_BUILD_PATH}" + echo -n "BUILDTYPE is $BUILDTYPE, use \"BUILDTYPE= ${0}\" to override " + echo "after you build the chrome_sandbox target" + exit 1 +fi + +if readelf -d "${CHROME_SANDBOX_BUILD_PATH}" | \ + grep "(RPATH)" > /dev/null 2>&1; then + echo "Build requires is_component_build=false in ${CHROME_OUT_DIR}/args.gn." + exit 1 +fi + +if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then + echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, " + echo "installing it now." + installsandbox +fi + +if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then + echo "Failed to install ${CHROME_SANDBOX_INST_PATH}" + exit 1 +fi + +CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api) +INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api) + +if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then + echo "Your installed setuid sandbox is too old, installing it now." + if ! installsandbox; then + echo "Failed to install ${CHROME_SANDBOX_INST_PATH}" + exit 1 + fi +else + echo "Your setuid sandbox is up to date" + if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then + echo -n "Make sure you have \"export " + echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" " + echo "somewhere in your .bashrc" + echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}" + fi +fi diff --git a/util/BUILD.gn b/util/BUILD.gn new file mode 100644 index 000000000000..a96d326776a9 --- /dev/null +++ b/util/BUILD.gn @@ -0,0 +1,37 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/util/lastchange.gni") + +action("chromium_git_revision") { + script = "version.py" + + template_file = "chromium_git_revision.h.in" + inputs = [ + lastchange_file, + template_file, + ] + + output_file = "$target_gen_dir/chromium_git_revision.h" + outputs = [ output_file ] + + args = [ + # LASTCHANGE contains "-". The user agent only wants the + # "" bit, so chop off everything after it. + "-e", + "LASTCHANGE=LASTCHANGE[:LASTCHANGE.find('-')]", + "-f", + rebase_path(lastchange_file, root_build_dir), + rebase_path(template_file, root_build_dir), + rebase_path(output_file, root_build_dir), + ] +} + +group("test_results") { + data = [ + "//.vpython3", + "//build/util/lib/__init__.py", + "//build/util/lib/results/", + ] +} diff --git a/util/LASTCHANGE.dummy b/util/LASTCHANGE.dummy new file mode 100644 index 000000000000..21bb3c33c745 --- /dev/null +++ b/util/LASTCHANGE.dummy @@ -0,0 +1 @@ +LASTCHANGE=0000000000000000000000000000000000000000-0000000000000000000000000000000000000000 diff --git a/util/PRESUBMIT.py b/util/PRESUBMIT.py new file mode 100644 index 000000000000..88fd9bf5b323 --- /dev/null +++ b/util/PRESUBMIT.py @@ -0,0 +1,64 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import re +"""Presubmit for build/util""" + + +USE_PYTHON3 = True + + +def _GetFilesToSkip(input_api): + files_to_skip = [] + affected_files = input_api.change.AffectedFiles() + version_script_change = next( + (f for f in affected_files + if re.search('\\/version\\.py$|\\/version_test\\.py$', f.LocalPath())), + None) + + if version_script_change is None: + files_to_skip.append('version_test\\.py$') + + android_chrome_version_script_change = next( + (f for f in affected_files if re.search( + '\\/android_chrome_version\\.py$|' + '\\/android_chrome_version_test\\.py$', f.LocalPath())), None) + + if android_chrome_version_script_change is None: + files_to_skip.append('android_chrome_version_test\\.py$') + + return files_to_skip + + +def _GetPythonUnitTests(input_api, output_api): + # No need to test if files are unchanged + files_to_skip = _GetFilesToSkip(input_api) + + return input_api.canned_checks.GetUnitTestsRecursively( + input_api, + output_api, + input_api.PresubmitLocalPath(), + files_to_check=['.*_test\\.py$'], + files_to_skip=files_to_skip, + run_on_python2=False, + run_on_python3=True, + skip_shebang_check=True) + + +def CommonChecks(input_api, output_api): + """Presubmit checks run on both upload and commit. + """ + checks = [] + checks.extend(_GetPythonUnitTests(input_api, output_api)) + return input_api.RunTests(checks, False) + + +def CheckChangeOnUpload(input_api, output_api): + """Presubmit checks on CL upload.""" + return CommonChecks(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + """Presubmit checks on commit.""" + return CommonChecks(input_api, output_api) diff --git a/util/action_remote.py b/util/action_remote.py new file mode 100755 index 000000000000..ea2e132442db --- /dev/null +++ b/util/action_remote.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python3 +# Copyright 2022 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Wrapper script to run action remotely through rewrapper with gn. + +Also includes Chromium-specific input processors which don't make sense to +be reclient inbuilt input processors.""" + +import argparse +import json +import os +import subprocess +import sys +from enum import Enum + +_THIS_DIR = os.path.realpath(os.path.dirname(__file__)) +_SRC_DIR = os.path.dirname(os.path.dirname(_THIS_DIR)) +_MOJOM_DIR = os.path.join(_SRC_DIR, 'mojo', 'public', 'tools', 'mojom') + + +class CustomProcessor(Enum): + mojom_parser = 'mojom_parser' + + def __str__(self): + return self.value + + +def _process_build_metadata_json(bm_file, input_roots, output_root, re_outputs, + processed_inputs): + """Recursively find mojom_parser inputs from a build_metadata file.""" + # Import Mojo-specific dep here so non-Mojo remote actions don't need it. + if _MOJOM_DIR not in sys.path: + sys.path.insert(0, _MOJOM_DIR) + from mojom_parser import RebaseAbsolutePath + + if bm_file in processed_inputs: + return + + processed_inputs.add(bm_file) + + bm_dir = os.path.dirname(bm_file) + + with open(bm_file) as f: + bm = json.load(f) + + # All sources and corresponding module files are inputs. + for s in bm["sources"]: + src = os.path.normpath(os.path.join(bm_dir, s)) + if src not in processed_inputs and os.path.exists(src): + processed_inputs.add(src) + src_module = os.path.join( + output_root, + RebaseAbsolutePath(os.path.abspath(src), input_roots) + "-module") + if src_module in re_outputs: + continue + if src_module not in processed_inputs and os.path.exists(src_module): + processed_inputs.add(src_module) + + # Recurse into build_metadata deps. + for d in bm["deps"]: + dep = os.path.normpath(os.path.join(bm_dir, d)) + _process_build_metadata_json(dep, input_roots, output_root, re_outputs, + processed_inputs) + + +def _get_mojom_parser_inputs(exec_root, output_files, extra_args): + """Get mojom inputs by walking generated build_metadata files. + + This is less complexity and disk I/O compared to parsing mojom files for + imports and finding all imports. + + Start from the root build_metadata file passed to mojom_parser's + --check-imports flag. + """ + argparser = argparse.ArgumentParser() + argparser.add_argument('--check-imports', dest='check_imports', required=True) + argparser.add_argument('--output-root', dest='output_root', required=True) + argparser.add_argument('--input-root', + default=[], + action='append', + dest='input_root_paths') + mojom_parser_args, _ = argparser.parse_known_args(args=extra_args) + + input_roots = list(map(os.path.abspath, mojom_parser_args.input_root_paths)) + output_root = os.path.abspath(mojom_parser_args.output_root) + processed_inputs = set() + _process_build_metadata_json(mojom_parser_args.check_imports, input_roots, + output_root, output_files, processed_inputs) + + # Rebase paths onto rewrapper exec root. + return map(lambda dep: os.path.normpath(os.path.relpath(dep, exec_root)), + processed_inputs) + + +def main(): + # Set up argparser with some rewrapper flags. + argparser = argparse.ArgumentParser(description='rewrapper executor for gn', + allow_abbrev=False) + argparser.add_argument('--custom_processor', + type=CustomProcessor, + choices=list(CustomProcessor)) + argparser.add_argument('rewrapper_path') + argparser.add_argument('--input_list_paths') + argparser.add_argument('--output_list_paths') + argparser.add_argument('--exec_root') + parsed_args, extra_args = argparser.parse_known_args() + + # This script expects to be calling rewrapper. + args = [parsed_args.rewrapper_path] + + # Get the output files list. + output_files = set() + with open(parsed_args.output_list_paths, 'r') as file: + for line in file: + output_files.add(line.rstrip('\n')) + + # Scan for and add explicit inputs for rewrapper if necessary. + # These should be in a new input list paths file, as using --inputs can fail + # if the list is extremely large. + if parsed_args.custom_processor == CustomProcessor.mojom_parser: + root, ext = os.path.splitext(parsed_args.input_list_paths) + extra_inputs = _get_mojom_parser_inputs(parsed_args.exec_root, output_files, + extra_args) + extra_input_list_path = '%s__extra%s' % (root, ext) + with open(extra_input_list_path, 'w') as file: + with open(parsed_args.input_list_paths, 'r') as inputs: + file.write(inputs.read()) + file.write("\n".join(extra_inputs)) + args += ["--input_list_paths=%s" % extra_input_list_path] + else: + args += ["--input_list_paths=%s" % parsed_args.input_list_paths] + + # Filter out --custom_processor= which is a flag for this script, + # and filter out --input_list_paths= because we replace it above. + # Pass on the rest of the args to rewrapper. + args_rest = filter(lambda arg: '--custom_processor=' not in arg, sys.argv[2:]) + args += filter(lambda arg: '--input_list_paths=' not in arg, args_rest) + + # Run rewrapper. + proc = subprocess.run(args) + return proc.returncode + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/util/android_chrome_version.py b/util/android_chrome_version.py new file mode 100755 index 000000000000..151081af209e --- /dev/null +++ b/util/android_chrome_version.py @@ -0,0 +1,354 @@ +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Different build variants of Chrome for Android have different version codes. + +For targets that have the same package name (e.g. Chrome, Chrome Modern, +Monochrome, Trichrome), Play Store considers them the same app and will push the +supported app with the highest version code to devices. Note that Play Store +does not support hosting two different apps with same version code and package +name. + +Each version code generated by this script will be used by one or more APKs. + +Webview channels must have unique version codes for a couple reasons: +a) Play Store does not support having the same version code for different + versions of a package. Without unique codes, promoting a beta apk to stable + would require first removing the beta version. +b) Firebase project support (used by official builders) requires unique + [version code + package name]. + We cannot add new webview package names for new channels because webview + packages are allowlisted by Android as webview providers. + +WEBVIEW_STABLE, WEBVIEW_BETA, WEBVIEW_DEV are all used for standalone webview, +whereas the others are used for various chrome APKs. + +TRICHROME_BETA is used for TrichromeChrome, TrichromeWebView, and +TrichromeLibrary when these are compiled to use the stable package name. Similar +to how WEBVIEW_STABLE/WEBVIEW_BETA work, this allows users to opt into the open +Beta Track for the stable package. When Trichrome is configured to use a +distinct package name for the Beta package, the version code will use TRICHROME +instead of TRICHROME_BETA. + +Note that a package digit of '3' for Webview is reserved for Trichrome Webview. +The same versionCode is used for both Trichrome Chrome and Trichrome Webview. + +Version code values are constructed like this: + + {full BUILD number}{3 digits: PATCH}{1 digit: package}{1 digit: ABIs}. + +For example: + + Build 3721, patch 0, ChromeModern (1), on ARM64 (5): 372100015 + Build 3721, patch 9, Monochrome (2), on ARM (0): 372100920 + +""" + +import argparse +from collections import namedtuple + +# Package name version bits. +_PACKAGE_NAMES = { + 'CHROME': 0, + 'CHROME_MODERN': 10, + 'MONOCHROME': 20, + 'TRICHROME': 30, + 'TRICHROME_BETA': 40, + 'TRICHROME_AUTO': 50, + 'WEBVIEW_STABLE': 0, + 'WEBVIEW_BETA': 10, + 'WEBVIEW_DEV': 20, +} +""" "Next" builds get +500 on their patch number. + +This ensures that they are considered "newer" than any non-next build of the +same branch number; this is a workaround for Android requiring a total ordering +of versions when we only really have a partial ordering. This assumes that the +actual patch number will never reach 500, which has never even come close in +the past. +""" +_NEXT_BUILD_VERSION_CODE_DIFF = 50000 +"""List of version numbers to be created for each build configuration. +Tuple format: + + (version code name), (package name), (supported ABIs) + +Here, (supported ABIs) is referring to the combination of browser ABI and +webview library ABI present in a particular APK. For example, 64_32 implies a +64-bit browser with an extra 32-bit Webview library. See also +_ABIS_TO_DIGIT_MASK. +""" +_APKS = { + '32': [ + ('CHROME', 'CHROME', '32'), + ('CHROME_MODERN', 'CHROME_MODERN', '32'), + ('MONOCHROME', 'MONOCHROME', '32'), + ('TRICHROME', 'TRICHROME', '32'), + ('TRICHROME_BETA', 'TRICHROME_BETA', '32'), + ('WEBVIEW_STABLE', 'WEBVIEW_STABLE', '32'), + ('WEBVIEW_BETA', 'WEBVIEW_BETA', '32'), + ('WEBVIEW_DEV', 'WEBVIEW_DEV', '32'), + ], + '64': [ + ('CHROME', 'CHROME', '64'), + ('CHROME_MODERN', 'CHROME_MODERN', '64'), + ('MONOCHROME', 'MONOCHROME', '32_64'), + ('MONOCHROME_32', 'MONOCHROME', '32'), + ('MONOCHROME_32_64', 'MONOCHROME', '32_64'), + ('MONOCHROME_64_32', 'MONOCHROME', '64_32'), + ('MONOCHROME_64', 'MONOCHROME', '64'), + ('TRICHROME', 'TRICHROME', '32_64'), + ('TRICHROME_32', 'TRICHROME', '32'), + ('TRICHROME_32_64', 'TRICHROME', '32_64'), + ('TRICHROME_64_32', 'TRICHROME', '64_32'), + ('TRICHROME_64_32_HIGH', 'TRICHROME', '64_32_high'), + ('TRICHROME_64', 'TRICHROME', '64'), + ('TRICHROME_AUTO_64_32', 'TRICHROME_AUTO', '64_32'), + ('TRICHROME_BETA', 'TRICHROME_BETA', '32_64'), + ('TRICHROME_32_BETA', 'TRICHROME_BETA', '32'), + ('TRICHROME_32_64_BETA', 'TRICHROME_BETA', '32_64'), + ('TRICHROME_64_32_BETA', 'TRICHROME_BETA', '64_32'), + ('TRICHROME_64_32_HIGH_BETA', 'TRICHROME_BETA', '64_32_high'), + ('TRICHROME_64_BETA', 'TRICHROME_BETA', '64'), + ('WEBVIEW_STABLE', 'WEBVIEW_STABLE', '32_64'), + ('WEBVIEW_BETA', 'WEBVIEW_BETA', '32_64'), + ('WEBVIEW_DEV', 'WEBVIEW_DEV', '32_64'), + ('WEBVIEW_32_STABLE', 'WEBVIEW_STABLE', '32'), + ('WEBVIEW_32_BETA', 'WEBVIEW_BETA', '32'), + ('WEBVIEW_32_DEV', 'WEBVIEW_DEV', '32'), + ('WEBVIEW_64_STABLE', 'WEBVIEW_STABLE', '64'), + ('WEBVIEW_64_BETA', 'WEBVIEW_BETA', '64'), + ('WEBVIEW_64_DEV', 'WEBVIEW_DEV', '64'), + ] +} + +# Splits input build config architecture to manufacturer and bitness. +_ARCH_TO_MFG_AND_BITNESS = { + 'arm': ('arm', '32'), + 'arm64': ('arm', '64'), + 'x86': ('intel', '32'), + 'x64': ('intel', '64'), +} + +# Expose the available choices to other scripts. +ARCH_CHOICES = _ARCH_TO_MFG_AND_BITNESS.keys() +""" +The architecture preference is encoded into the version_code for devices +that support multiple architectures. (exploiting play store logic that pushes +apk with highest version code) + +Detail: +Many Android devices support multiple architectures, and can run applications +built for any of them; the Play Store considers all of the supported +architectures compatible and does not, itself, have any preference for which +is "better". The common cases here: + +- All production arm64 devices can also run arm +- All production x64 devices can also run x86 +- Pretty much all production x86/x64 devices can also run arm (via a binary + translator) + +Since the Play Store has no particular preferences, you have to encode your own +preferences into the ordering of the version codes. There's a few relevant +things here: + +- For any android app, it's theoretically preferable to ship a 64-bit version to + 64-bit devices if it exists, because the 64-bit architectures are supposed to + be "better" than their 32-bit predecessors (unfortunately this is not always + true due to the effect on memory usage, but we currently deal with this by + simply not shipping a 64-bit version *at all* on the configurations where we + want the 32-bit version to be used). +- For any android app, it's definitely preferable to ship an x86 version to x86 + devices if it exists instead of an arm version, because running things through + the binary translator is a performance hit. +- For WebView, Monochrome, and Trichrome specifically, they are a special class + of APK called "multiarch" which means that they actually need to *use* more + than one architecture at runtime (rather than simply being compatible with + more than one). The 64-bit builds of these multiarch APKs contain both 32-bit + and 64-bit code, so that Webview is available for both ABIs. If you're + multiarch you *must* have a version that supports both 32-bit and 64-bit + version on a 64-bit device, otherwise it won't work properly. So, the 64-bit + version needs to be a higher versionCode, as otherwise a 64-bit device would + prefer the 32-bit version that does not include any 64-bit code, and fail. +""" + + +def _GetAbisToDigitMask(build_number): + """Return the correct digit mask based on build number. + + Updated from build 5750: Some intel devices advertise support for arm, + so arm codes must be lower than x86 codes to prevent providing an + arm-optimized build to intel devices. + + Cherry-picked to 5735 to support releasing the new + version code schema earlier. + + Returns: + A dictionary of architecture mapped to bitness + mapped to version code suffix. + """ + + if build_number < 5750 and build_number != 5735: + return { + 'arm': { + '32': 0, + '32_64': 3, + '64_32': 4, + '64': 5, + '64_32_high': 9, + }, + 'intel': { + '32': 1, + '32_64': 6, + '64_32': 7, + '64': 8, + }, + } + return { + 'arm': { + '32': 0, + '32_64': 1, + '64_32': 2, + '64_32_high': 3, + '64': 4, + }, + 'intel': { + '32': 6, + '32_64': 7, + '64_32': 8, + '64': 9, + }, + } + + +VersionCodeComponents = namedtuple('VersionCodeComponents', [ + 'build_number', + 'patch_number', + 'package_name', + 'abi', + 'is_next_build', +]) + + +def TranslateVersionCode(version_code, is_webview=False): + """Translates a version code to its component parts. + + Returns: + A 5-tuple (VersionCodeComponents) with the form: + - Build number - integer + - Patch number - integer + - Package name - string + - ABI - string : if the build is 32_64 or 64_32 or 64, that is just + appended to 'arm' or 'x86' with an underscore + - Whether the build is a "next" build - boolean + + So, for build 100.0.5678.99, built for Monochrome on arm 64_32, not a next + build, you should get: + 5678, 99, 'MONOCHROME', 'arm_64_32', False + """ + if len(version_code) == 9: + build_number = int(version_code[:4]) + else: + # At one branch per day, we'll hit 5 digits in the year 2035. + build_number = int(version_code[:5]) + + is_next_build = False + patch_number_plus_extra = int(version_code[-5:]) + if patch_number_plus_extra >= _NEXT_BUILD_VERSION_CODE_DIFF: + is_next_build = True + patch_number_plus_extra -= _NEXT_BUILD_VERSION_CODE_DIFF + patch_number = patch_number_plus_extra // 100 + + # From branch 3992 the name and abi bits in the version code are swapped. + if build_number >= 3992: + abi_digit = int(version_code[-1]) + package_digit = int(version_code[-2]) + else: + abi_digit = int(version_code[-2]) + package_digit = int(version_code[-1]) + + # Before branch 4844 we added 5 to the package digit to indicate a 'next' + # build. + if build_number < 4844 and package_digit >= 5: + is_next_build = True + package_digit -= 5 + + for package, number in _PACKAGE_NAMES.items(): + if number == package_digit * 10: + if is_webview == ('WEBVIEW' in package): + package_name = package + break + + for arch, bitness_to_number in _GetAbisToDigitMask(build_number).items(): + for bitness, number in bitness_to_number.items(): + if abi_digit == number: + abi = arch if arch != 'intel' else 'x86' + if bitness != '32': + abi += '_' + bitness + break + + return VersionCodeComponents(build_number, patch_number, package_name, abi, + is_next_build) + + +def GenerateVersionCodes(version_values, arch, is_next_build): + """Build dict of version codes for the specified build architecture. Eg: + + { + 'CHROME_VERSION_CODE': '378100010', + 'MONOCHROME_VERSION_CODE': '378100013', + ... + } + + versionCode values are built like this: + {full BUILD int}{3 digits: PATCH}{1 digit: package}{1 digit: ABIs}. + + MAJOR and MINOR values are not used for generating versionCode. + - MINOR is always 0. It was used for something long ago in Chrome's history + but has not been used since, and has never been nonzero on Android. + - MAJOR is cosmetic and controlled by the release managers. MAJOR and BUILD + always have reasonable sort ordering: for two version codes A and B, it's + always the case that (A.MAJOR < B.MAJOR) implies (A.BUILD < B.BUILD), and + that (A.MAJOR > B.MAJOR) implies (A.BUILD > B.BUILD). This property is just + maintained by the humans who set MAJOR. + + Thus, this method is responsible for the final two digits of versionCode. + """ + + base_version_code = int( + '%s%03d00' % (version_values['BUILD'], int(version_values['PATCH']))) + + if is_next_build: + base_version_code += _NEXT_BUILD_VERSION_CODE_DIFF + + mfg, bitness = _ARCH_TO_MFG_AND_BITNESS[arch] + + version_codes = {} + + abi_to_digit_mask = _GetAbisToDigitMask(int(version_values['BUILD'])) + for apk, package, abis in _APKS[bitness]: + if abis == '64_32_high' and arch != 'arm64': + continue + abi_part = abi_to_digit_mask[mfg][abis] + package_part = _PACKAGE_NAMES[package] + + version_code_name = apk + '_VERSION_CODE' + version_code_val = base_version_code + package_part + abi_part + version_codes[version_code_name] = str(version_code_val) + + return version_codes + + +def main(): + parser = argparse.ArgumentParser(description='Parses version codes.') + parser.add_argument('version_code', help='Version code (e.g. 529700010).') + parser.add_argument('--webview', + action='store_true', + help='Whether this is a webview version code.') + args = parser.parse_args() + print(TranslateVersionCode(args.version_code, is_webview=args.webview)) + + +if __name__ == '__main__': + main() diff --git a/util/android_chrome_version_test.py b/util/android_chrome_version_test.py new file mode 100644 index 000000000000..4ebd007a24de --- /dev/null +++ b/util/android_chrome_version_test.py @@ -0,0 +1,958 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import unittest + +from android_chrome_version import GenerateVersionCodes +from android_chrome_version import TranslateVersionCode + +EXAMPLE_VERSION_VALUES = { + 'MAJOR': '99', + 'MINOR': '0', + 'BUILD': '4844', + 'PATCH': '0', +} + +EXAMPLE_GROUPED_VERSION_VALUES = { + 'MAJOR': '99', + 'MINOR': '0', + 'BUILD': '5750', + 'PATCH': '0', +} + + +class _VersionTest(unittest.TestCase): + """Unittests for the android_chrome_version module. + """ + + def testGenerateVersionCodesAndroidChrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(chrome_version_code, '484400000') + + def testGenerateVersionCodesAndroidChromeModern(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + chrome_modern_version_code = output['CHROME_MODERN_VERSION_CODE'] + + self.assertEqual(chrome_modern_version_code, '484400010') + + def testGenerateVersionCodesAndroidMonochrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + + self.assertEqual(monochrome_version_code, '484400020') + + def testGenerateVersionCodesAndroidTrichrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + trichrome_version_code = output['TRICHROME_VERSION_CODE'] + + self.assertEqual(trichrome_version_code, '484400030') + + def testGenerateVersionCodesAndroidWebviewStable(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + + self.assertEqual(webview_stable_version_code, '484400000') + + def testGenerateVersionCodesAndroidWebviewBeta(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertEqual(webview_beta_version_code, '484400010') + + def testGenerateVersionCodesAndroidWebviewDev(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] + + self.assertEqual(webview_dev_version_code, '484400020') + + def testGenerateVersionCodesAndroidNextBuild(self): + """Assert it handles "next" builds correctly""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=True) + + # Get just a sample of values + chrome_version_code = output['CHROME_VERSION_CODE'] + monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertEqual(chrome_version_code, '484450000') + self.assertEqual(monochrome_version_code, '484450020') + self.assertEqual(webview_stable_version_code, '484450000') + self.assertEqual(webview_beta_version_code, '484450010') + + def testGenerateVersionCodesAndroidArchArm(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docs in android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '484400000') + + def testGenerateVersionCodesAndroidArchX86(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x86', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '484400001') + + def testGenerateVersionCodesAndroidArchArm64(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm64', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '484400005') + + def testGenerateVersionCodesAndroidArchArm64Variants(self): + """Assert it handles 64-bit-specific additional version codes correctly. + + Some additional version codes are generated for 64-bit architectures. + See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm64', + is_next_build=False) + arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] + arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] + arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE'] + arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE'] + arch_trichrome_version_code = output['TRICHROME_VERSION_CODE'] + arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE'] + arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE'] + arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] + arch_trichrome_64_32_high_version_code = output[ + 'TRICHROME_64_32_HIGH_VERSION_CODE'] + arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '484400020') + self.assertEqual(arch_monochrome_32_64_version_code, '484400023') + self.assertEqual(arch_monochrome_version_code, '484400023') + self.assertEqual(arch_monochrome_64_32_version_code, '484400024') + self.assertEqual(arch_monochrome_64_version_code, '484400025') + self.assertEqual(arch_trichrome_32_version_code, '484400030') + self.assertEqual(arch_trichrome_32_64_version_code, '484400033') + self.assertEqual(arch_trichrome_version_code, '484400033') + self.assertEqual(arch_trichrome_64_32_version_code, '484400034') + self.assertEqual(arch_trichrome_64_32_high_version_code, '484400039') + self.assertEqual(arch_trichrome_64_version_code, '484400035') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '484400054') + + def testGenerateVersionCodesAndroidArchX64(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x64', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '484400008') + + def testGenerateVersionCodesAndroidArchX64Variants(self): + """Assert it handles 64-bit-specific additional version codes correctly. + + Some additional version codes are generated for 64-bit architectures. + See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x64', + is_next_build=False) + arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] + arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] + arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE'] + arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE'] + arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE'] + arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE'] + arch_trichrome_version_code = output['TRICHROME_VERSION_CODE'] + arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] + arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '484400021') + self.assertEqual(arch_monochrome_32_64_version_code, '484400026') + self.assertEqual(arch_monochrome_version_code, '484400026') + self.assertEqual(arch_monochrome_64_32_version_code, '484400027') + self.assertEqual(arch_monochrome_64_version_code, '484400028') + self.assertEqual(arch_trichrome_32_version_code, '484400031') + self.assertEqual(arch_trichrome_32_64_version_code, '484400036') + self.assertEqual(arch_trichrome_version_code, '484400036') + self.assertEqual(arch_trichrome_64_32_version_code, '484400037') + self.assertEqual(arch_trichrome_64_version_code, '484400038') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '484400057') + + def testGenerateVersionCodesAndroidArchOrderArm(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + + Test arm-related values. + """ + arm_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + arm64_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm64', + is_next_build=False) + + arm_chrome_version_code = arm_output['CHROME_VERSION_CODE'] + arm64_chrome_version_code = arm64_output['CHROME_VERSION_CODE'] + + self.assertLess(arm_chrome_version_code, arm64_chrome_version_code) + + def testGenerateVersionCodesAndroidArchOrderX86(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + + Test x86-related values. + """ + x86_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x86', + is_next_build=False) + x64_output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='x64', + is_next_build=False) + + x86_chrome_version_code = x86_output['CHROME_VERSION_CODE'] + x64_chrome_version_code = x64_output['CHROME_VERSION_CODE'] + + self.assertLess(x86_chrome_version_code, x64_chrome_version_code) + + def testGenerateVersionCodesAndroidWebviewChannelOrderBeta(self): + """Assert webview beta channel is higher than stable. + + The channel-specific version codes for standalone webview needs to follow + the order stable < beta < dev. + + This allows that if a user opts into beta track, they will always have the + beta apk, including any finch experiments targeted at beta users, even when + beta and stable channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertGreater(webview_beta_version_code, webview_stable_version_code) + + def testGenerateVersionCodesAndroidWebviewChannelOrderDev(self): + """Assert webview dev channel is higher than beta. + + The channel-specific version codes for standalone webview needs to follow + the order stable < beta < dev. + + This allows that if a user opts into dev track, they will always have the + dev apk, including any finch experiments targeted at dev users, even when + dev and beta channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] + + self.assertGreater(webview_dev_version_code, webview_beta_version_code) + + def testGenerateVersionCodesTrichromeChannelOrderBeta(self): + """Assert Trichrome beta channel is higher than stable. + + When Trichrome channels are compiled to use the stable channel's package + name, their version codes need to follow the order stable < beta. + + This allows that if a user opts into beta track, they will always have the + beta apk, including any finch experiments targeted at beta users, even when + beta and stable channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + trichrome_stable_version_code = output['TRICHROME_VERSION_CODE'] + trichrome_beta_version_code = output['TRICHROME_BETA_VERSION_CODE'] + + self.assertGreater(trichrome_beta_version_code, + trichrome_stable_version_code) + + +class _VersionGroupedTest(unittest.TestCase): + """Unittests for the android_chrome_version module (grouped). + """ + def testGenerateVersionCodesAndroidChrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(chrome_version_code, '575000000') + + def testGenerateVersionCodesAndroidChromeModern(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + chrome_modern_version_code = output['CHROME_MODERN_VERSION_CODE'] + + self.assertEqual(chrome_modern_version_code, '575000010') + + def testGenerateVersionCodesAndroidMonochrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + + self.assertEqual(monochrome_version_code, '575000020') + + def testGenerateVersionCodesAndroidTrichrome(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + trichrome_version_code = output['TRICHROME_VERSION_CODE'] + + self.assertEqual(trichrome_version_code, '575000030') + + def testGenerateVersionCodesAndroidWebviewStable(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + + self.assertEqual(webview_stable_version_code, '575000000') + + def testGenerateVersionCodesAndroidWebviewBeta(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertEqual(webview_beta_version_code, '575000010') + + def testGenerateVersionCodesAndroidWebviewDev(self): + """Assert it gives correct values for standard/example inputs""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] + + self.assertEqual(webview_dev_version_code, '575000020') + + def testGenerateVersionCodesAndroidNextBuild(self): + """Assert it handles "next" builds correctly""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=True) + + # Get just a sample of values + chrome_version_code = output['CHROME_VERSION_CODE'] + monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertEqual(chrome_version_code, '575050000') + self.assertEqual(monochrome_version_code, '575050020') + self.assertEqual(webview_stable_version_code, '575050000') + self.assertEqual(webview_beta_version_code, '575050010') + + def testGenerateVersionCodesAndroidArchArm(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docs in android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '575000000') + + def testGenerateVersionCodesAndroidArchX86(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x86', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '575000006') + + def testGenerateVersionCodesAndroidArchArm64(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm64', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '575000004') + + def testGenerateVersionCodesAndroidArchArm64Variants(self): + """Assert it handles 64-bit-specific additional version codes correctly. + + Some additional version codes are generated for 64-bit architectures. + See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm64', + is_next_build=False) + arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] + arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] + arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE'] + arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE'] + arch_trichrome_version_code = output['TRICHROME_VERSION_CODE'] + arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE'] + arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE'] + arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] + arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '575000020') + self.assertEqual(arch_monochrome_32_64_version_code, '575000021') + self.assertEqual(arch_monochrome_version_code, '575000021') + self.assertEqual(arch_monochrome_64_32_version_code, '575000022') + self.assertEqual(arch_monochrome_64_version_code, '575000024') + self.assertEqual(arch_trichrome_32_version_code, '575000030') + self.assertEqual(arch_trichrome_32_64_version_code, '575000031') + self.assertEqual(arch_trichrome_version_code, '575000031') + self.assertEqual(arch_trichrome_64_32_version_code, '575000032') + self.assertEqual(arch_trichrome_64_version_code, '575000034') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '575000052') + + def testGenerateVersionCodesAndroidArchX64(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x64', + is_next_build=False) + arch_chrome_version_code = output['CHROME_VERSION_CODE'] + + self.assertEqual(arch_chrome_version_code, '575000009') + + def testGenerateVersionCodesAndroidArchX64Variants(self): + """Assert it handles 64-bit-specific additional version codes correctly. + + Some additional version codes are generated for 64-bit architectures. + See docstring on android_chrome_version.ARCH64_APK_VARIANTS for more info. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x64', + is_next_build=False) + arch_monochrome_32_version_code = output['MONOCHROME_32_VERSION_CODE'] + arch_monochrome_32_64_version_code = output['MONOCHROME_32_64_VERSION_CODE'] + arch_monochrome_version_code = output['MONOCHROME_VERSION_CODE'] + arch_monochrome_64_32_version_code = output['MONOCHROME_64_32_VERSION_CODE'] + arch_monochrome_64_version_code = output['MONOCHROME_64_VERSION_CODE'] + arch_trichrome_32_version_code = output['TRICHROME_32_VERSION_CODE'] + arch_trichrome_32_64_version_code = output['TRICHROME_32_64_VERSION_CODE'] + arch_trichrome_version_code = output['TRICHROME_VERSION_CODE'] + arch_trichrome_64_32_version_code = output['TRICHROME_64_32_VERSION_CODE'] + arch_trichrome_64_version_code = output['TRICHROME_64_VERSION_CODE'] + arch_trichrome_auto_64_32_version_code = output[ + 'TRICHROME_AUTO_64_32_VERSION_CODE'] + + self.assertEqual(arch_monochrome_32_version_code, '575000026') + self.assertEqual(arch_monochrome_32_64_version_code, '575000027') + self.assertEqual(arch_monochrome_version_code, '575000027') + self.assertEqual(arch_monochrome_64_32_version_code, '575000028') + self.assertEqual(arch_monochrome_64_version_code, '575000029') + self.assertEqual(arch_trichrome_32_version_code, '575000036') + self.assertEqual(arch_trichrome_32_64_version_code, '575000037') + self.assertEqual(arch_trichrome_version_code, '575000037') + self.assertEqual(arch_trichrome_64_32_version_code, '575000038') + self.assertEqual(arch_trichrome_64_version_code, '575000039') + self.assertEqual(arch_trichrome_auto_64_32_version_code, '575000058') + + def testGenerateVersionCodesAndroidArchOrderArm(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + + Test arm-related values. + """ + arm_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + arm64_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm64', + is_next_build=False) + + arm_chrome_version_code = arm_output['CHROME_VERSION_CODE'] + arm64_chrome_version_code = arm64_output['CHROME_VERSION_CODE'] + + self.assertLess(arm_chrome_version_code, arm64_chrome_version_code) + + def testGenerateVersionCodesAndroidArchOrderX86(self): + """Assert it handles different architectures correctly. + + Version codes for different builds need to be distinct and maintain a + certain ordering. + See docstring on android_chrome_version._ABIS_TO_BIT_MASK for + reasoning. + + Test x86-related values. + """ + x86_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x86', + is_next_build=False) + x64_output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='x64', + is_next_build=False) + + x86_chrome_version_code = x86_output['CHROME_VERSION_CODE'] + x64_chrome_version_code = x64_output['CHROME_VERSION_CODE'] + + self.assertLess(x86_chrome_version_code, x64_chrome_version_code) + + def testGenerateVersionCodesAndroidWebviewChannelOrderBeta(self): + """Assert webview beta channel is higher than stable. + + The channel-specific version codes for standalone webview needs to follow + the order stable < beta < dev. + + This allows that if a user opts into beta track, they will always have the + beta apk, including any finch experiments targeted at beta users, even when + beta and stable channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_stable_version_code = output['WEBVIEW_STABLE_VERSION_CODE'] + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + + self.assertGreater(webview_beta_version_code, webview_stable_version_code) + + def testGenerateVersionCodesAndroidWebviewChannelOrderDev(self): + """Assert webview dev channel is higher than beta. + + The channel-specific version codes for standalone webview needs to follow + the order stable < beta < dev. + + This allows that if a user opts into dev track, they will always have the + dev apk, including any finch experiments targeted at dev users, even when + dev and beta channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + webview_beta_version_code = output['WEBVIEW_BETA_VERSION_CODE'] + webview_dev_version_code = output['WEBVIEW_DEV_VERSION_CODE'] + + self.assertGreater(webview_dev_version_code, webview_beta_version_code) + + def testGenerateVersionCodesTrichromeChannelOrderBeta(self): + """Assert Trichrome beta channel is higher than stable. + + When Trichrome channels are compiled to use the stable channel's package + name, their version codes need to follow the order stable < beta. + + This allows that if a user opts into beta track, they will always have the + beta apk, including any finch experiments targeted at beta users, even when + beta and stable channels are otherwise on the same version. + """ + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + trichrome_stable_version_code = output['TRICHROME_VERSION_CODE'] + trichrome_beta_version_code = output['TRICHROME_BETA_VERSION_CODE'] + + self.assertGreater(trichrome_beta_version_code, + trichrome_stable_version_code) + + +class _VersionCodeTest(unittest.TestCase): + def testGenerateThenTranslate(self): + """Assert it gives correct values for a version code that we generated.""" + output = GenerateVersionCodes(EXAMPLE_VERSION_VALUES, + arch='arm', + is_next_build=False) + + version_code = output['MONOCHROME_VERSION_CODE'] + + build, patch, package, abi, is_next_build = TranslateVersionCode( + version_code) + self.assertEqual(build, int(EXAMPLE_VERSION_VALUES['BUILD'])) + self.assertEqual(patch, int(EXAMPLE_VERSION_VALUES['PATCH'])) + self.assertEqual(package, 'MONOCHROME') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + + def testPre3992Translate(self): + """Test for an old build when the abi and apk bits were swapped.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '378100010') + self.assertEqual(build, 3781) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME') + self.assertEqual(abi, 'x86') + self.assertEqual(is_next_build, False) + + def testNextBuildTranslate(self): + """Test for a build with next.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499961210') + self.assertEqual(build, 4999) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testPre4844NextBuildTranslate(self): + """Test for a build with next when we added 50 to version code.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '400011260') + self.assertEqual(build, 4000) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testPre3992NextBuildTranslate(self): + """Test for a build with next when we added 5 to version code.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '300011206') + self.assertEqual(build, 3000) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testArm_64BuildTranslate(self): + """Test for a build with arm_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499911215') + self.assertEqual(build, 4999) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_64') + self.assertEqual(is_next_build, False) + + def testArm_32_64Translate(self): + """Test for a build with arm_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900013') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_32_64') + self.assertEqual(is_next_build, False) + + def testArm_64_32Translate(self): + """Test for a build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900034') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_Auto_64_32Translate(self): + """Test for an auto build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900054') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_64_32HighTranslate(self): + """Test for a build with Trichrome and arm_64_32_high.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '534613739') + self.assertEqual(build, 5346) + self.assertEqual(patch, 137) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32_high') + self.assertEqual(is_next_build, False) + + def testX86_64Translate(self): + """Test for a build with x86_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900018') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64') + self.assertEqual(is_next_build, False) + + def testX86_32_64Translate(self): + """Test for a build with x86_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900016') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_32_64') + self.assertEqual(is_next_build, False) + + def testX86_64_32Translate(self): + """Test for a build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900017') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testX86_Auto_64_32Translate(self): + """Test for an auto build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900057') + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testWebviewTranslate(self): + """Test for a build with Webview.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '499900000', is_webview=True) + self.assertEqual(build, 4999) + self.assertEqual(patch, 0) + self.assertEqual(package, 'WEBVIEW_STABLE') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + + +class _VersionCodeGroupedTest(unittest.TestCase): + def testGenerateThenTranslate(self): + """Assert it gives correct values for a version code that we generated.""" + output = GenerateVersionCodes(EXAMPLE_GROUPED_VERSION_VALUES, + arch='arm', + is_next_build=False) + + version_code = output['MONOCHROME_VERSION_CODE'] + + build, patch, package, abi, is_next_build = TranslateVersionCode( + version_code) + self.assertEqual(build, int(EXAMPLE_GROUPED_VERSION_VALUES['BUILD'])) + self.assertEqual(patch, int(EXAMPLE_GROUPED_VERSION_VALUES['PATCH'])) + self.assertEqual(package, 'MONOCHROME') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + + def testNextBuildTranslate(self): + """Test for a build with next.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575061210') + self.assertEqual(build, 5750) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, True) + + def testArm_64BuildTranslate(self): + """Test for a build with arm_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575011214') + self.assertEqual(build, 5750) + self.assertEqual(patch, 112) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_64') + self.assertEqual(is_next_build, False) + + def testArm_32_64Translate(self): + """Test for a build with arm_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000011') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'arm_32_64') + self.assertEqual(is_next_build, False) + + def testArm_64_32Translate(self): + """Test for a build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000032') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_Auto_64_32Translate(self): + """Test for an auto build with Trichrome and arm_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000052') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'arm_64_32') + self.assertEqual(is_next_build, False) + + def testArm_64_32HighTranslate(self): + """Test for a build with Trichrome and arm_64_32_high.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '534613739') + self.assertEqual(build, 5346) + self.assertEqual(patch, 137) + self.assertEqual(package, 'TRICHROME') + self.assertEqual(abi, 'arm_64_32_high') + self.assertEqual(is_next_build, False) + + def testX86_64Translate(self): + """Test for a build with x86_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000019') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64') + self.assertEqual(is_next_build, False) + + def testX86_32_64Translate(self): + """Test for a build with x86_32_64.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000017') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_32_64') + self.assertEqual(is_next_build, False) + + def testX86_64_32Translate(self): + """Test for a build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000018') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'CHROME_MODERN') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testX86_Auto_64_32Translate(self): + """Test for an auto build with x86_64_32.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000058') + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'TRICHROME_AUTO') + self.assertEqual(abi, 'x86_64_32') + self.assertEqual(is_next_build, False) + + def testWebviewTranslate(self): + """Test for a build with Webview.""" + build, patch, package, abi, is_next_build = TranslateVersionCode( + '575000000', is_webview=True) + self.assertEqual(build, 5750) + self.assertEqual(patch, 0) + self.assertEqual(package, 'WEBVIEW_STABLE') + self.assertEqual(abi, 'arm') + self.assertEqual(is_next_build, False) + + +if __name__ == '__main__': + unittest.main() diff --git a/util/branding.gni b/util/branding.gni new file mode 100644 index 000000000000..1d4ffd68a75d --- /dev/null +++ b/util/branding.gni @@ -0,0 +1,45 @@ +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This exposes the Chrome branding as GN variables for use in build files. +# +# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively. +# However, it is far better to write an action to generate a file at +# build-time with the information you need. This allows better dependency +# checking and GN will run faster. +# +# These values should only be used if you REALLY need to depend on them at +# build-time, for example, in the computation of output file names. + +import("//build/config/chrome_build.gni") + +_branding_dictionary_template = + "full_name = \"@PRODUCT_FULLNAME@\" " + + "short_name = \"@PRODUCT_SHORTNAME@\" " + + "bundle_id = \"@MAC_BUNDLE_ID@\" " + + "creator_code = \"@MAC_CREATOR_CODE@\" " + + "installer_full_name = \"@PRODUCT_INSTALLER_FULLNAME@\" " + + "installer_short_name = \"@PRODUCT_INSTALLER_SHORTNAME@\" " + + "team_id = \"@MAC_TEAM_ID@\" " + +_result = exec_script("version.py", + [ + "-f", + rebase_path(branding_file_path, root_build_dir), + "-t", + _branding_dictionary_template, + ], + "scope", + [ branding_file_path ]) + +chrome_product_full_name = _result.full_name +chrome_product_short_name = _result.short_name +chrome_product_installer_full_name = _result.installer_full_name +chrome_product_installer_short_name = _result.installer_short_name + +if (is_mac) { + chrome_mac_bundle_id = _result.bundle_id + chrome_mac_creator_code = _result.creator_code + chrome_mac_team_id = _result.team_id +} diff --git a/util/chromium_git_revision.h.in b/util/chromium_git_revision.h.in new file mode 100644 index 000000000000..365961cdc943 --- /dev/null +++ b/util/chromium_git_revision.h.in @@ -0,0 +1,8 @@ +// Copyright 2021 The Chromium Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// chromium_git_revision.h is generated from chromium_git_revision.h.in. Edit +// the source! + +#define CHROMIUM_GIT_REVISION "@@LASTCHANGE@" diff --git a/util/generate_wrapper.gni b/util/generate_wrapper.gni new file mode 100644 index 000000000000..e2ceccc9e3f0 --- /dev/null +++ b/util/generate_wrapper.gni @@ -0,0 +1,98 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Wraps a target and any of its arguments to an executable script. +# +# Many executable targets have build-time-constant arguments. This +# template allows those to be wrapped into a single, user- or bot-friendly +# script at build time. +# +# Paths to be wrapped should be relative to root_build_dir and should be +# wrapped in "@WrappedPath(...)"; see Example below. +# +# Variables: +# generator_script: Path to the script to use to perform the wrapping. +# Defaults to //build/util/generate_wrapper.py. Generally should only +# be set by other templates. +# wrapper_script: Output path. +# executable: Path to the executable to wrap. Can be a script or a +# build product. Paths can be relative to the containing gn file +# or source-absolute. +# executable_args: List of arguments to write into the wrapper. +# +# Example wrapping a checked-in script: +# generate_wrapper("sample_wrapper") { +# executable = "//for/bar/sample.py" +# wrapper_script = "$root_build_dir/bin/run_sample" +# +# _sample_argument_path = "//sample/$target_cpu/lib/sample_lib.so" +# _rebased_sample_argument_path = rebase_path( +# _sample_argument_path, +# root_build_dir) +# executable_args = [ +# "--sample-lib", "@WrappedPath(${_rebased_sample_argument_path})", +# ] +# } +# +# Example wrapping a build product: +# generate_wrapper("sample_wrapper") { +# executable = "$root_build_dir/sample_build_product" +# wrapper_script = "$root_build_dir/bin/run_sample_build_product" +# } +template("generate_wrapper") { + action(target_name) { + if (defined(invoker.generator_script)) { + script = invoker.generator_script + } else { + script = "//build/util/generate_wrapper.py" + } + _wrapper_script = invoker.wrapper_script + if (is_win) { + _wrapper_script += ".bat" + } + + data = [ + _wrapper_script, + "//.vpython3", + ] + if (defined(invoker.data)) { + data += invoker.data + } + outputs = [ _wrapper_script ] + + _rebased_executable_to_wrap = + rebase_path(invoker.executable, root_build_dir) + _rebased_wrapper_script = rebase_path(_wrapper_script, root_build_dir) + if (is_win) { + _script_language = "batch" + } else { + _script_language = "bash" + } + args = [ + "--executable", + "@WrappedPath(${_rebased_executable_to_wrap})", + "--wrapper-script", + _rebased_wrapper_script, + "--output-directory", + rebase_path(root_build_dir, root_build_dir), + "--script-language", + _script_language, + ] + + if (defined(invoker.executable_args)) { + args += [ "--" ] + invoker.executable_args + } + + forward_variables_from(invoker, TESTONLY_AND_VISIBILITY) + forward_variables_from(invoker, + "*", + TESTONLY_AND_VISIBILITY + [ + "data", + "executable", + "executable_args", + "generator_script", + "wrapper_script", + ]) + } +} diff --git a/util/generate_wrapper.py b/util/generate_wrapper.py new file mode 100755 index 000000000000..b45f5f3bf9af --- /dev/null +++ b/util/generate_wrapper.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python3 +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wraps an executable and any provided arguments into an executable script.""" + +import argparse +import os +import sys +import textwrap + + +# The bash template passes the python script into vpython via stdin. +# The interpreter doesn't know about the script, so we have bash +# inject the script location. +BASH_TEMPLATE = textwrap.dedent("""\ + #!/usr/bin/env vpython3 + _SCRIPT_LOCATION = __file__ + {script} + """) + + +# The batch template reruns the batch script with vpython, with the -x +# flag instructing the interpreter to ignore the first line. The interpreter +# knows about the (batch) script in this case, so it can get the file location +# directly. +BATCH_TEMPLATE = textwrap.dedent("""\ + @SETLOCAL ENABLEDELAYEDEXPANSION \ + & vpython3.bat -x "%~f0" %* \ + & EXIT /B !ERRORLEVEL! + _SCRIPT_LOCATION = __file__ + {script} + """) + + +SCRIPT_TEMPLATES = { + 'bash': BASH_TEMPLATE, + 'batch': BATCH_TEMPLATE, +} + + +PY_TEMPLATE = textwrap.dedent("""\ + import os + import re + import shlex + import signal + import subprocess + import sys + import time + + _WRAPPED_PATH_RE = re.compile(r'@WrappedPath\(([^)]+)\)') + _PATH_TO_OUTPUT_DIR = '{path_to_output_dir}' + _SCRIPT_DIR = os.path.dirname(os.path.realpath(_SCRIPT_LOCATION)) + + + def ExpandWrappedPath(arg): + m = _WRAPPED_PATH_RE.match(arg) + if m: + relpath = os.path.join( + os.path.relpath(_SCRIPT_DIR), _PATH_TO_OUTPUT_DIR, m.group(1)) + npath = os.path.normpath(relpath) + if os.path.sep not in npath: + # If the original path points to something in the current directory, + # returning the normalized version of it can be a problem. + # normpath() strips off the './' part of the path + # ('./foo' becomes 'foo'), which can be a problem if the result + # is passed to something like os.execvp(); in that case + # osexecvp() will search $PATH for the executable, rather than + # just execing the arg directly, and if '.' isn't in $PATH, this + # results in an error. + # + # So, we need to explicitly return './foo' (or '.\\foo' on windows) + # instead of 'foo'. + # + # Hopefully there are no cases where this causes a problem; if + # there are, we will either need to change the interface to + # WrappedPath() somehow to distinguish between the two, or + # somehow ensure that the wrapped executable doesn't hit cases + # like this. + return '.' + os.path.sep + npath + return npath + return arg + + + def ExpandWrappedPaths(args): + for i, arg in enumerate(args): + args[i] = ExpandWrappedPath(arg) + return args + + + def FindIsolatedOutdir(raw_args): + outdir = None + i = 0 + remaining_args = [] + while i < len(raw_args): + if raw_args[i] == '--isolated-outdir' and i < len(raw_args)-1: + outdir = raw_args[i+1] + i += 2 + elif raw_args[i].startswith('--isolated-outdir='): + outdir = raw_args[i][len('--isolated-outdir='):] + i += 1 + else: + remaining_args.append(raw_args[i]) + i += 1 + if not outdir and 'ISOLATED_OUTDIR' in os.environ: + outdir = os.environ['ISOLATED_OUTDIR'] + return outdir, remaining_args + + def InsertWrapperScriptArgs(args): + if '--wrapper-script-args' in args: + idx = args.index('--wrapper-script-args') + args.insert(idx + 1, shlex.join(sys.argv)) + + def FilterIsolatedOutdirBasedArgs(outdir, args): + rargs = [] + i = 0 + while i < len(args): + if 'ISOLATED_OUTDIR' in args[i]: + if outdir: + # Rewrite the arg. + rargs.append(args[i].replace('${{ISOLATED_OUTDIR}}', + outdir).replace( + '$ISOLATED_OUTDIR', outdir)) + i += 1 + else: + # Simply drop the arg. + i += 1 + elif (not outdir and + args[i].startswith('-') and + '=' not in args[i] and + i < len(args) - 1 and + 'ISOLATED_OUTDIR' in args[i+1]): + # Parsing this case is ambiguous; if we're given + # `--foo $ISOLATED_OUTDIR` we can't tell if $ISOLATED_OUTDIR + # is meant to be the value of foo, or if foo takes no argument + # and $ISOLATED_OUTDIR is the first positional arg. + # + # We assume the former will be much more common, and so we + # need to drop --foo and $ISOLATED_OUTDIR. + i += 2 + else: + rargs.append(args[i]) + i += 1 + return rargs + + def ForwardSignals(proc): + def _sig_handler(sig, _): + if proc.poll() is not None: + return + # SIGBREAK is defined only for win32. + # pylint: disable=no-member + if sys.platform == 'win32' and sig == signal.SIGBREAK: + print("Received signal(%d), sending CTRL_BREAK_EVENT to process %d" % (sig, proc.pid)) + proc.send_signal(signal.CTRL_BREAK_EVENT) + else: + print("Forwarding signal(%d) to process %d" % (sig, proc.pid)) + proc.send_signal(sig) + # pylint: enable=no-member + if sys.platform == 'win32': + signal.signal(signal.SIGBREAK, _sig_handler) # pylint: disable=no-member + else: + signal.signal(signal.SIGTERM, _sig_handler) + signal.signal(signal.SIGINT, _sig_handler) + + def Popen(*args, **kwargs): + assert 'creationflags' not in kwargs + if sys.platform == 'win32': + # Necessary for signal handling. See crbug.com/733612#c6. + kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP + return subprocess.Popen(*args, **kwargs) + + def RunCommand(cmd): + process = Popen(cmd) + ForwardSignals(process) + while process.poll() is None: + time.sleep(0.1) + return process.returncode + + + def main(raw_args): + executable_path = ExpandWrappedPath('{executable_path}') + outdir, remaining_args = FindIsolatedOutdir(raw_args) + args = {executable_args} + InsertWrapperScriptArgs(args) + args = FilterIsolatedOutdirBasedArgs(outdir, args) + executable_args = ExpandWrappedPaths(args) + cmd = [executable_path] + executable_args + remaining_args + if executable_path.endswith('.py'): + cmd = [sys.executable] + cmd + return RunCommand(cmd) + + + if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + """) + + +def Wrap(args): + """Writes a wrapped script according to the provided arguments. + + Arguments: + args: an argparse.Namespace object containing command-line arguments + as parsed by a parser returned by CreateArgumentParser. + """ + path_to_output_dir = os.path.relpath( + args.output_directory, + os.path.dirname(args.wrapper_script)) + + with open(args.wrapper_script, 'w') as wrapper_script: + py_contents = PY_TEMPLATE.format( + path_to_output_dir=path_to_output_dir, + executable_path=str(args.executable), + executable_args=str(args.executable_args)) + template = SCRIPT_TEMPLATES[args.script_language] + wrapper_script.write(template.format(script=py_contents)) + os.chmod(args.wrapper_script, 0o750) + + return 0 + + +def CreateArgumentParser(): + """Creates an argparse.ArgumentParser instance.""" + parser = argparse.ArgumentParser() + parser.add_argument( + '--executable', + help='Executable to wrap.') + parser.add_argument( + '--wrapper-script', + help='Path to which the wrapper script will be written.') + parser.add_argument( + '--output-directory', + help='Path to the output directory.') + parser.add_argument( + '--script-language', + choices=SCRIPT_TEMPLATES.keys(), + help='Language in which the wrapper script will be written.') + parser.add_argument( + 'executable_args', nargs='*', + help='Arguments to wrap into the executable.') + return parser + + +def main(raw_args): + parser = CreateArgumentParser() + args = parser.parse_args(raw_args) + return Wrap(args) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/util/java_action.gni b/util/java_action.gni new file mode 100644 index 000000000000..81f6da5e18b9 --- /dev/null +++ b/util/java_action.gni @@ -0,0 +1,99 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +jarrunner = "//build/util/java_action.py" + +# Declare a target that runs a java command a single time. +# +# This target type allows you to run a java command a single time to produce +# one or more output files. If you want to run a java command for each of a +# set of input files, see "java_action_foreach". +# +# See "gn help action" for more information on how to use this target. This +# template is based on the "action" and supports the same variables. +template("java_action") { + assert(defined(invoker.script), + "Need script in $target_name listing the .jar file to run.") + assert(defined(invoker.outputs), + "Need outputs in $target_name listing the generated outputs.") + + jarscript = invoker.script + action(target_name) { + script = jarrunner + + inputs = [ jarscript ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + + args = [ + "-jar", + rebase_path(jarscript, root_build_dir), + ] + if (defined(invoker.args)) { + args += invoker.args + } + + forward_variables_from(invoker, + [ + "console", + "data", + "data_deps", + "depfile", + "deps", + "outputs", + "sources", + "testonly", + "visibility", + ]) + } +} + +# Declare a target that runs a java command over a set of files. +# +# This target type allows you to run a java command once-per-file over a set of +# sources. If you want to run a java command once that takes many files as +# input, see "java_action". +# +# See "gn help action_foreach" for more information on how to use this target. +# This template is based on the "action_foreach" supports the same variables. +template("java_action_foreach") { + assert(defined(invoker.script), + "Need script in $target_name listing the .jar file to run.") + assert(defined(invoker.outputs), + "Need outputs in $target_name listing the generated outputs.") + assert(defined(invoker.sources), + "Need sources in $target_name listing the target inputs.") + + jarscript = invoker.script + action_foreach(target_name) { + script = jarrunner + + inputs = [ jarscript ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + + args = [ + "-jar", + rebase_path(jarscript, root_build_dir), + ] + if (defined(invoker.args)) { + args += invoker.args + } + + forward_variables_from(invoker, + [ + "console", + "data", + "data_deps", + "depfile", + "deps", + "outputs", + "sources", + "testonly", + "visibility", + ]) + } +} diff --git a/util/java_action.py b/util/java_action.py new file mode 100755 index 000000000000..6382dc23bf30 --- /dev/null +++ b/util/java_action.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wrapper script to run java command as action with gn.""" + +import os +import subprocess +import sys + +EXIT_SUCCESS = 0 +EXIT_FAILURE = 1 + + +def IsExecutable(path): + """Returns whether file at |path| exists and is executable. + + Args: + path: absolute or relative path to test. + + Returns: + True if the file at |path| exists, False otherwise. + """ + return os.path.isfile(path) and os.access(path, os.X_OK) + + +def FindCommand(command): + """Looks up for |command| in PATH. + + Args: + command: name of the command to lookup, if command is a relative or + absolute path (i.e. contains some path separator) then only that + path will be tested. + + Returns: + Full path to command or None if the command was not found. + + On Windows, this respects the PATHEXT environment variable when the + command name does not have an extension. + """ + fpath, _ = os.path.split(command) + if fpath: + if IsExecutable(command): + return command + + if sys.platform == 'win32': + # On Windows, if the command does not have an extension, cmd.exe will + # try all extensions from PATHEXT when resolving the full path. + command, ext = os.path.splitext(command) + if not ext: + exts = os.environ['PATHEXT'].split(os.path.pathsep) + else: + exts = [ext] + else: + exts = [''] + + for path in os.environ['PATH'].split(os.path.pathsep): + for ext in exts: + path = os.path.join(path, command) + ext + if IsExecutable(path): + return path + + return None + + +def main(): + java_path = FindCommand('java') + if not java_path: + sys.stderr.write('java: command not found\n') + sys.exit(EXIT_FAILURE) + + args = sys.argv[1:] + if len(args) < 2 or args[0] != '-jar': + sys.stderr.write('usage: %s -jar JARPATH [java_args]...\n' % sys.argv[0]) + sys.exit(EXIT_FAILURE) + + return subprocess.check_call([java_path] + args) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/util/lastchange.gni b/util/lastchange.gni new file mode 100644 index 000000000000..909b9789ff73 --- /dev/null +++ b/util/lastchange.gni @@ -0,0 +1,16 @@ +# Copyright 2018 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This file is used to inject fixed dummy commit for commit independent +# reproducible binaries. + +declare_args() { + use_dummy_lastchange = false +} + +if (use_dummy_lastchange) { + lastchange_file = "//build/util/LASTCHANGE.dummy" +} else { + lastchange_file = "//build/util/LASTCHANGE" +} diff --git a/util/lastchange.py b/util/lastchange.py new file mode 100755 index 000000000000..98a6360b4687 --- /dev/null +++ b/util/lastchange.py @@ -0,0 +1,344 @@ +#!/usr/bin/env python3 +# Copyright 2012 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +lastchange.py -- Chromium revision fetching utility. +""" + +import argparse +import collections +import datetime +import logging +import os +import subprocess +import sys + +VersionInfo = collections.namedtuple("VersionInfo", + ("revision_id", "revision", "timestamp")) + +class GitError(Exception): + pass + +# This function exists for compatibility with logic outside this +# repository that uses this file as a library. +# TODO(eliribble) remove this function after it has been ported into +# the repositories that depend on it +def RunGitCommand(directory, command): + """ + Launches git subcommand. + + Errors are swallowed. + + Returns: + A process object or None. + """ + command = ['git'] + command + # Force shell usage under cygwin. This is a workaround for + # mysterious loss of cwd while invoking cygwin's git. + # We can't just pass shell=True to Popen, as under win32 this will + # cause CMD to be used, while we explicitly want a cygwin shell. + if sys.platform == 'cygwin': + command = ['sh', '-c', ' '.join(command)] + try: + proc = subprocess.Popen(command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=directory, + shell=(sys.platform=='win32')) + return proc + except OSError as e: + logging.error('Command %r failed: %s' % (' '.join(command), e)) + return None + + +def _RunGitCommand(directory, command): + """Launches git subcommand. + + Returns: + The stripped stdout of the git command. + Raises: + GitError on failure, including a nonzero return code. + """ + command = ['git'] + command + # Force shell usage under cygwin. This is a workaround for + # mysterious loss of cwd while invoking cygwin's git. + # We can't just pass shell=True to Popen, as under win32 this will + # cause CMD to be used, while we explicitly want a cygwin shell. + if sys.platform == 'cygwin': + command = ['sh', '-c', ' '.join(command)] + try: + logging.info("Executing '%s' in %s", ' '.join(command), directory) + proc = subprocess.Popen(command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=directory, + shell=(sys.platform=='win32')) + stdout, stderr = tuple(x.decode(encoding='utf_8') + for x in proc.communicate()) + stdout = stdout.strip() + logging.debug("returncode: %d", proc.returncode) + logging.debug("stdout: %s", stdout) + logging.debug("stderr: %s", stderr) + if proc.returncode != 0 or not stdout: + raise GitError(( + "Git command '{}' in {} failed: " + "rc={}, stdout='{}' stderr='{}'").format( + " ".join(command), directory, proc.returncode, stdout, stderr)) + return stdout + except OSError as e: + raise GitError("Git command 'git {}' in {} failed: {}".format( + " ".join(command), directory, e)) + + +def GetMergeBase(directory, ref): + """ + Return the merge-base of HEAD and ref. + + Args: + directory: The directory containing the .git directory. + ref: The ref to use to find the merge base. + Returns: + The git commit SHA of the merge-base as a string. + """ + logging.debug("Calculating merge base between HEAD and %s in %s", + ref, directory) + command = ['merge-base', 'HEAD', ref] + return _RunGitCommand(directory, command) + + +def FetchGitRevision(directory, commit_filter, start_commit="HEAD"): + """ + Fetch the Git hash (and Cr-Commit-Position if any) for a given directory. + + Args: + directory: The directory containing the .git directory. + commit_filter: A filter to supply to grep to filter commits + start_commit: A commit identifier. The result of this function + will be limited to only consider commits before the provided + commit. + Returns: + A VersionInfo object. On error all values will be 0. + """ + hash_ = '' + + git_args = ['log', '-1', '--format=%H %ct'] + if commit_filter is not None: + git_args.append('--grep=' + commit_filter) + + git_args.append(start_commit) + + output = _RunGitCommand(directory, git_args) + hash_, commit_timestamp = output.split() + if not hash_: + return VersionInfo('0', '0', 0) + + revision = hash_ + output = _RunGitCommand(directory, ['cat-file', 'commit', hash_]) + for line in reversed(output.splitlines()): + if line.startswith('Cr-Commit-Position:'): + pos = line.rsplit()[-1].strip() + logging.debug("Found Cr-Commit-Position '%s'", pos) + revision = "{}-{}".format(hash_, pos) + break + return VersionInfo(hash_, revision, int(commit_timestamp)) + + +def GetHeaderGuard(path): + """ + Returns the header #define guard for the given file path. + This treats everything after the last instance of "src/" as being a + relevant part of the guard. If there is no "src/", then the entire path + is used. + """ + src_index = path.rfind('src/') + if src_index != -1: + guard = path[src_index + 4:] + else: + guard = path + guard = guard.upper() + return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_' + + +def GetHeaderContents(path, define, version): + """ + Returns what the contents of the header file should be that indicate the given + revision. + """ + header_guard = GetHeaderGuard(path) + + header_contents = """/* Generated by lastchange.py, do not edit.*/ + +#ifndef %(header_guard)s +#define %(header_guard)s + +#define %(define)s "%(version)s" + +#endif // %(header_guard)s +""" + header_contents = header_contents % { 'header_guard': header_guard, + 'define': define, + 'version': version } + return header_contents + + +def GetGitTopDirectory(source_dir): + """Get the top git directory - the directory that contains the .git directory. + + Args: + source_dir: The directory to search. + Returns: + The output of "git rev-parse --show-toplevel" as a string + """ + return _RunGitCommand(source_dir, ['rev-parse', '--show-toplevel']) + + +def WriteIfChanged(file_name, contents): + """ + Writes the specified contents to the specified file_name + iff the contents are different than the current contents. + Returns if new data was written. + """ + try: + old_contents = open(file_name, 'r').read() + except EnvironmentError: + pass + else: + if contents == old_contents: + return False + os.unlink(file_name) + open(file_name, 'w').write(contents) + return True + + +def main(argv=None): + if argv is None: + argv = sys.argv + + parser = argparse.ArgumentParser(usage="lastchange.py [options]") + parser.add_argument("-m", "--version-macro", + help=("Name of C #define when using --header. Defaults to " + "LAST_CHANGE.")) + parser.add_argument("-o", + "--output", + metavar="FILE", + help=("Write last change to FILE. " + "Can be combined with other file-output-related " + "options to write multiple files.")) + parser.add_argument("--header", + metavar="FILE", + help=("Write last change to FILE as a C/C++ header. " + "Can be combined with other file-output-related " + "options to write multiple files.")) + parser.add_argument("--revision", + metavar="FILE", + help=("Write last change to FILE as a one-line revision. " + "Can be combined with other file-output-related " + "options to write multiple files.")) + parser.add_argument("--merge-base-ref", + default=None, + help=("Only consider changes since the merge " + "base between HEAD and the provided ref")) + parser.add_argument("--revision-id-only", action='store_true', + help=("Output the revision as a VCS revision ID only (in " + "Git, a 40-character commit hash, excluding the " + "Cr-Commit-Position).")) + parser.add_argument("--revision-id-prefix", + metavar="PREFIX", + help=("Adds a string prefix to the VCS revision ID.")) + parser.add_argument("--print-only", action="store_true", + help=("Just print the revision string. Overrides any " + "file-output-related options.")) + parser.add_argument("-s", "--source-dir", metavar="DIR", + help="Use repository in the given directory.") + parser.add_argument("--filter", metavar="REGEX", + help=("Only use log entries where the commit message " + "matches the supplied filter regex. Defaults to " + "'^Change-Id:' to suppress local commits."), + default='^Change-Id:') + + args, extras = parser.parse_known_args(argv[1:]) + + logging.basicConfig(level=logging.WARNING) + + out_file = args.output + header = args.header + revision = args.revision + commit_filter=args.filter + + while len(extras) and out_file is None: + if out_file is None: + out_file = extras.pop(0) + if extras: + sys.stderr.write('Unexpected arguments: %r\n\n' % extras) + parser.print_help() + sys.exit(2) + + source_dir = args.source_dir or os.path.dirname(os.path.abspath(__file__)) + try: + git_top_dir = GetGitTopDirectory(source_dir) + except GitError as e: + logging.error("Failed to get git top directory from '%s': %s", + source_dir, e) + return 2 + + if args.merge_base_ref: + try: + merge_base_sha = GetMergeBase(git_top_dir, args.merge_base_ref) + except GitError as e: + logging.error("You requested a --merge-base-ref value of '%s' but no " + "merge base could be found between it and HEAD. Git " + "reports: %s", args.merge_base_ref, e) + return 3 + else: + merge_base_sha = 'HEAD' + + try: + version_info = FetchGitRevision(git_top_dir, commit_filter, merge_base_sha) + except GitError as e: + logging.error("Failed to get version info: %s", e) + logging.info(("Falling back to a version of 0.0.0 to allow script to " + "finish. This is normal if you are bootstrapping a new environment " + "or do not have a git repository for any other reason. If not, this " + "could represent a serious error.")) + version_info = VersionInfo('0', '0', 0) + + revision_string = version_info.revision + if args.revision_id_only: + revision_string = version_info.revision_id + + if args.revision_id_prefix: + revision_string = args.revision_id_prefix + revision_string + + if args.print_only: + print(revision_string) + else: + lastchange_year = datetime.datetime.utcfromtimestamp( + version_info.timestamp).year + contents_lines = [ + "LASTCHANGE=%s" % revision_string, + "LASTCHANGE_YEAR=%s" % lastchange_year, + ] + contents = '\n'.join(contents_lines) + '\n' + if not out_file and not header and not revision: + sys.stdout.write(contents) + else: + if out_file: + committime_file = out_file + '.committime' + out_changed = WriteIfChanged(out_file, contents) + if out_changed or not os.path.exists(committime_file): + with open(committime_file, 'w') as timefile: + timefile.write(str(version_info.timestamp)) + if header: + WriteIfChanged(header, + GetHeaderContents(header, args.version_macro, + revision_string)) + if revision: + WriteIfChanged(revision, revision_string) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/util/lib/__init__.py b/util/lib/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/util/lib/common/PRESUBMIT.py b/util/lib/common/PRESUBMIT.py new file mode 100644 index 000000000000..b0477fd589a5 --- /dev/null +++ b/util/lib/common/PRESUBMIT.py @@ -0,0 +1,24 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +USE_PYTHON3 = True + + +def _RunTests(input_api, output_api): + return (input_api.canned_checks.RunUnitTestsInDirectory( + input_api, + output_api, + '.', + files_to_check=[r'.+_test.py$'], + run_on_python2=False, + skip_shebang_check=True)) + + +def CheckChangeOnUpload(input_api, output_api): + return _RunTests(input_api, output_api) + + +def CheckChangeOnCommit(input_api, output_api): + return _RunTests(input_api, output_api) diff --git a/util/lib/common/__init__.py b/util/lib/common/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/util/lib/common/chrome_test_server_spawner.py b/util/lib/common/chrome_test_server_spawner.py new file mode 100644 index 000000000000..62788f2b0291 --- /dev/null +++ b/util/lib/common/chrome_test_server_spawner.py @@ -0,0 +1,485 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A "Test Server Spawner" that handles killing/stopping per-test test servers. + +It's used to accept requests from the device to spawn and kill instances of the +chrome test server on the host. +""" +# pylint: disable=W0702 + +import json +import logging +import os +import select +import struct +import subprocess +import sys +import threading +import time + +from six.moves import BaseHTTPServer, urllib + + +SERVER_TYPES = { + 'http': '', + 'ftp': '-f', + 'ws': '--websocket', +} + + +_DIR_SOURCE_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, + os.pardir)) + + +_logger = logging.getLogger(__name__) + + +# Path that are needed to import necessary modules when launching a testserver. +os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + ( + ':%s:%s' % (os.path.join(_DIR_SOURCE_ROOT, 'third_party'), + os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'))) + + +def _GetServerTypeCommandLine(server_type): + """Returns the command-line by the given server type. + + Args: + server_type: the server type to be used (e.g. 'http'). + + Returns: + A string containing the command-line argument. + """ + if server_type not in SERVER_TYPES: + raise NotImplementedError('Unknown server type: %s' % server_type) + return SERVER_TYPES[server_type] + + +class PortForwarder: + def Map(self, port_pairs): + pass + + def GetDevicePortForHostPort(self, host_port): + """Returns the device port that corresponds to a given host port.""" + return host_port + + def WaitHostPortAvailable(self, port): + """Returns True if |port| is available.""" + return True + + def WaitPortNotAvailable(self, port): + """Returns True if |port| is not available.""" + return True + + def WaitDevicePortReady(self, port): + """Returns whether the provided port is used.""" + return True + + def Unmap(self, device_port): + """Unmaps specified port""" + pass + + +class TestServerThread(threading.Thread): + """A thread to run the test server in a separate process.""" + + def __init__(self, ready_event, arguments, port_forwarder): + """Initialize TestServerThread with the following argument. + + Args: + ready_event: event which will be set when the test server is ready. + arguments: dictionary of arguments to run the test server. + device: An instance of DeviceUtils. + tool: instance of runtime error detection tool. + """ + threading.Thread.__init__(self) + self.wait_event = threading.Event() + self.stop_event = threading.Event() + self.ready_event = ready_event + self.ready_event.clear() + self.arguments = arguments + self.port_forwarder = port_forwarder + self.test_server_process = None + self.is_ready = False + self.host_port = 0 + self.host_ocsp_port = 0 + assert isinstance(self.host_port, int) + # The forwarder device port now is dynamically allocated. + self.forwarder_device_port = 0 + self.forwarder_ocsp_device_port = 0 + self.process = None + self.command_line = [] + + def _WaitToStartAndGetPortFromTestServer(self, pipe_in): + """Waits for the Python test server to start and gets the port it is using. + + The port information is passed by the Python test server with a pipe given + by |pipe_in|. It is written as a result to |self.host_port|. + + Returns: + Whether the port used by the test server was successfully fetched. + """ + (in_fds, _, _) = select.select([pipe_in], [], []) + if len(in_fds) == 0: + _logger.error('Failed to wait to the Python test server to be started.') + return False + # First read the data length as an unsigned 4-byte value. This + # is _not_ using network byte ordering since the Python test server packs + # size as native byte order and all Chromium platforms so far are + # configured to use little-endian. + # TODO(jnd): Change the Python test server and local_test_server_*.cc to + # use a unified byte order (either big-endian or little-endian). + data_length = os.read(pipe_in, struct.calcsize('=L')) + if data_length: + (data_length,) = struct.unpack('=L', data_length) + assert data_length + if not data_length: + _logger.error('Failed to get length of server data.') + return False + server_data_json = os.read(pipe_in, data_length) + if not server_data_json: + _logger.error('Failed to get server data.') + return False + _logger.info('Got port json data: %s', server_data_json) + + parsed_server_data = None + try: + parsed_server_data = json.loads(server_data_json) + except ValueError: + pass + + if not isinstance(parsed_server_data, dict): + _logger.error('Failed to parse server_data: %s' % server_data_json) + return False + + if not isinstance(parsed_server_data.get('port'), int): + _logger.error('Failed to get port information from the server data.') + return False + + self.host_port = parsed_server_data['port'] + self.host_ocsp_port = parsed_server_data.get('ocsp_port', 0) + + return self.port_forwarder.WaitPortNotAvailable(self.host_port) + + def _GenerateCommandLineArguments(self, pipe_out): + """Generates the command line to run the test server. + + Note that all options are processed by following the definitions in + testserver.py. + """ + if self.command_line: + return + + args_copy = dict(self.arguments) + + # Translate the server type. + type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type')) + if type_cmd: + self.command_line.append(type_cmd) + + # Use a pipe to get the port given by the Python test server. + self.command_line.append('--startup-pipe=%d' % pipe_out) + + # Pass the remaining arguments as-is. + for key, values in args_copy.items(): + if not isinstance(values, list): + values = [values] + for value in values: + if value is None: + self.command_line.append('--%s' % key) + else: + self.command_line.append('--%s=%s' % (key, value)) + + def _CloseUnnecessaryFDsForTestServerProcess(self, pipe_out): + # This is required to avoid subtle deadlocks that could be caused by the + # test server child process inheriting undesirable file descriptors such as + # file lock file descriptors. Note stdin, stdout, and stderr (0-2) are left + # alone and redirected with subprocess.Popen. It is important to leave those + # fds filled, or the test server will accidentally open other fds at those + # numbers. + for fd in range(3, 1024): + if fd != pipe_out: + try: + os.close(fd) + except: + pass + + def run(self): + _logger.info('Start running the thread!') + self.wait_event.clear() + + # Set up a pipe for the server to report when it has started. + pipe_in, pipe_out = os.pipe() + + # TODO(crbug.com/941669): Remove if condition after python3 migration. + if hasattr(os, 'set_inheritable'): + os.set_inheritable(pipe_out, True) + + try: + self._GenerateCommandLineArguments(pipe_out) + # TODO(crbug.com/941669): When this script is ported to Python 3, replace + # 'vpython3' below with sys.executable. + command = [ + 'vpython3', + os.path.join(_DIR_SOURCE_ROOT, 'net', 'tools', 'testserver', + 'testserver.py') + ] + self.command_line + _logger.info('Running: %s', command) + + # Disable PYTHONUNBUFFERED because it has a bad interaction with the + # testserver. Remove once this interaction is fixed. + unbuf = os.environ.pop('PYTHONUNBUFFERED', None) + + # Pass _DIR_SOURCE_ROOT as the child's working directory so that relative + # paths in the arguments are resolved correctly. devnull can be replaced + # with subprocess.DEVNULL in Python 3. + with open(os.devnull, 'r+b') as devnull: + self.process = subprocess.Popen( + command, + preexec_fn=lambda: self._CloseUnnecessaryFDsForTestServerProcess( + pipe_out), + stdin=devnull, + # Preserve stdout and stderr from the test server. + stdout=None, + stderr=None, + cwd=_DIR_SOURCE_ROOT, + close_fds=False) + + # Close pipe_out early. If self.process crashes, this will be visible + # in _WaitToStartAndGetPortFromTestServer's select loop. + os.close(pipe_out) + pipe_out = -1 + if unbuf: + os.environ['PYTHONUNBUFFERED'] = unbuf + self.is_ready = self._WaitToStartAndGetPortFromTestServer(pipe_in) + + if self.is_ready: + port_map = [(0, self.host_port)] + if self.host_ocsp_port: + port_map.extend([(0, self.host_ocsp_port)]) + self.port_forwarder.Map(port_map) + + self.forwarder_device_port = \ + self.port_forwarder.GetDevicePortForHostPort(self.host_port) + if self.host_ocsp_port: + self.forwarder_ocsp_device_port = \ + self.port_forwarder.GetDevicePortForHostPort(self.host_ocsp_port) + + # Check whether the forwarder is ready on the device. + self.is_ready = self.forwarder_device_port and \ + self.port_forwarder.WaitDevicePortReady(self.forwarder_device_port) + + # Wake up the request handler thread. + self.ready_event.set() + # Keep thread running until Stop() gets called. + self.stop_event.wait() + if self.process.poll() is None: + self.process.kill() + # Wait for process to actually terminate. + # (crbug.com/946475) + self.process.wait() + + self.port_forwarder.Unmap(self.forwarder_device_port) + self.process = None + self.is_ready = False + finally: + if pipe_in >= 0: + os.close(pipe_in) + if pipe_out >= 0: + os.close(pipe_out) + _logger.info('Test-server has died.') + self.wait_event.set() + + def Stop(self): + """Blocks until the loop has finished. + + Note that this must be called in another thread. + """ + if not self.process: + return + self.stop_event.set() + self.wait_event.wait() + + +class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + """A handler used to process http GET/POST request.""" + + def _SendResponse(self, response_code, response_reason, additional_headers, + contents): + """Generates a response sent to the client from the provided parameters. + + Args: + response_code: number of the response status. + response_reason: string of reason description of the response. + additional_headers: dict of additional headers. Each key is the name of + the header, each value is the content of the header. + contents: string of the contents we want to send to client. + """ + self.send_response(response_code, response_reason) + self.send_header('Content-Type', 'text/html') + # Specify the content-length as without it the http(s) response will not + # be completed properly (and the browser keeps expecting data). + self.send_header('Content-Length', len(contents)) + for header_name in additional_headers: + self.send_header(header_name, additional_headers[header_name]) + self.end_headers() + self.wfile.write(contents.encode('utf8')) + self.wfile.flush() + + def _StartTestServer(self): + """Starts the test server thread.""" + _logger.info('Handling request to spawn a test server.') + content_type = self.headers.get('content-type') + if content_type != 'application/json': + raise Exception('Bad content-type for start request.') + content_length = self.headers.get('content-length') + if not content_length: + content_length = 0 + try: + content_length = int(content_length) + except: + raise Exception('Bad content-length for start request.') + _logger.info(content_length) + test_server_argument_json = self.rfile.read(content_length) + _logger.info(test_server_argument_json) + + if len(self.server.test_servers) >= self.server.max_instances: + self._SendResponse(400, 'Invalid request', {}, + 'Too many test servers running') + return + + ready_event = threading.Event() + new_server = TestServerThread(ready_event, + json.loads(test_server_argument_json), + self.server.port_forwarder) + new_server.setDaemon(True) + new_server.start() + ready_event.wait() + if new_server.is_ready: + response = {'port': new_server.forwarder_device_port, + 'message': 'started'}; + if new_server.forwarder_ocsp_device_port: + response['ocsp_port'] = new_server.forwarder_ocsp_device_port + self._SendResponse(200, 'OK', {}, json.dumps(response)) + _logger.info('Test server is running on port %d forwarded to %d.' % + (new_server.forwarder_device_port, new_server.host_port)) + port = new_server.forwarder_device_port + assert port not in self.server.test_servers + self.server.test_servers[port] = new_server + else: + new_server.Stop() + self._SendResponse(500, 'Test Server Error.', {}, '') + _logger.info('Encounter problem during starting a test server.') + + def _KillTestServer(self, params): + """Stops the test server instance.""" + try: + port = int(params['port'][0]) + except ValueError: + port = None + if port == None or port <= 0: + self._SendResponse(400, 'Invalid request.', {}, 'port must be specified') + return + + if port not in self.server.test_servers: + self._SendResponse(400, 'Invalid request.', {}, + "testserver isn't running on port %d" % port) + return + + server = self.server.test_servers.pop(port) + + _logger.info('Handling request to kill a test server on port: %d.', port) + server.Stop() + + # Make sure the status of test server is correct before sending response. + if self.server.port_forwarder.WaitHostPortAvailable(port): + self._SendResponse(200, 'OK', {}, 'killed') + _logger.info('Test server on port %d is killed', port) + else: + # We expect the port to be free, but nothing stops the system from + # binding something else to that port, so don't throw error. + # (crbug.com/946475) + self._SendResponse(200, 'OK', {}, '') + _logger.warn('Port %s is not free after killing test server.' % port) + + def log_message(self, format, *args): + # Suppress the default HTTP logging behavior if the logging level is higher + # than INFO. + if _logger.getEffectiveLevel() <= logging.INFO: + pass + + def do_POST(self): + parsed_path = urllib.parse.urlparse(self.path) + action = parsed_path.path + _logger.info('Action for POST method is: %s.', action) + if action == '/start': + self._StartTestServer() + else: + self._SendResponse(400, 'Unknown request.', {}, '') + _logger.info('Encounter unknown request: %s.', action) + + def do_GET(self): + parsed_path = urllib.parse.urlparse(self.path) + action = parsed_path.path + params = urllib.parse.parse_qs(parsed_path.query, keep_blank_values=1) + _logger.info('Action for GET method is: %s.', action) + for param in params: + _logger.info('%s=%s', param, params[param][0]) + if action == '/kill': + self._KillTestServer(params) + elif action == '/ping': + # The ping handler is used to check whether the spawner server is ready + # to serve the requests. We don't need to test the status of the test + # server when handling ping request. + self._SendResponse(200, 'OK', {}, 'ready') + _logger.info('Handled ping request and sent response.') + else: + self._SendResponse(400, 'Unknown request', {}, '') + _logger.info('Encounter unknown request: %s.', action) + + +class SpawningServer(object): + """The class used to start/stop a http server.""" + + def __init__(self, test_server_spawner_port, port_forwarder, max_instances): + self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port), + SpawningServerRequestHandler) + self.server_port = self.server.server_port + _logger.info('Started test server spawner on port: %d.', self.server_port) + + self.server.port_forwarder = port_forwarder + self.server.test_servers = {} + self.server.max_instances = max_instances + + def _Listen(self): + _logger.info('Starting test server spawner.') + self.server.serve_forever() + + def Start(self): + """Starts the test server spawner.""" + listener_thread = threading.Thread(target=self._Listen) + listener_thread.setDaemon(True) + listener_thread.start() + + def Stop(self): + """Stops the test server spawner. + + Also cleans the server state. + """ + self.CleanupState() + self.server.shutdown() + + def CleanupState(self): + """Cleans up the spawning server state. + + This should be called if the test server spawner is reused, + to avoid sharing the test server instance. + """ + if self.server.test_servers: + _logger.warning('Not all test servers were stopped.') + for port in self.server.test_servers: + _logger.warning('Stopping test server on port %d' % port) + self.server.test_servers[port].Stop() + self.server.test_servers = {} diff --git a/util/lib/common/perf_result_data_type.py b/util/lib/common/perf_result_data_type.py new file mode 100644 index 000000000000..a75c916d793c --- /dev/null +++ b/util/lib/common/perf_result_data_type.py @@ -0,0 +1,20 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +DEFAULT = 'default' +UNIMPORTANT = 'unimportant' +HISTOGRAM = 'histogram' +UNIMPORTANT_HISTOGRAM = 'unimportant-histogram' +INFORMATIONAL = 'informational' + +ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM, + INFORMATIONAL] + + +def IsValidType(datatype): + return datatype in ALL_TYPES + + +def IsHistogram(datatype): + return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM) diff --git a/util/lib/common/perf_tests_results_helper.py b/util/lib/common/perf_tests_results_helper.py new file mode 100644 index 000000000000..8246e206c0b1 --- /dev/null +++ b/util/lib/common/perf_tests_results_helper.py @@ -0,0 +1,201 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import re +import sys + +import json +import logging +import math + +import perf_result_data_type + + +# Mapping from result type to test output +RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ', + perf_result_data_type.DEFAULT: '*RESULT ', + perf_result_data_type.INFORMATIONAL: '', + perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ', + perf_result_data_type.HISTOGRAM: '*HISTOGRAM '} + + +def _EscapePerfResult(s): + """Escapes |s| for use in a perf result.""" + return re.sub('[\:|=/#&,]', '_', s) + + +def FlattenList(values): + """Returns a simple list without sub-lists.""" + ret = [] + for entry in values: + if isinstance(entry, list): + ret.extend(FlattenList(entry)) + else: + ret.append(entry) + return ret + + +def GeomMeanAndStdDevFromHistogram(histogram_json): + histogram = json.loads(histogram_json) + # Handle empty histograms gracefully. + if not 'buckets' in histogram: + return 0.0, 0.0 + count = 0 + sum_of_logs = 0 + for bucket in histogram['buckets']: + if 'high' in bucket: + bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0 + else: + bucket['mean'] = bucket['low'] + if bucket['mean'] > 0: + sum_of_logs += math.log(bucket['mean']) * bucket['count'] + count += bucket['count'] + + if count == 0: + return 0.0, 0.0 + + sum_of_squares = 0 + geom_mean = math.exp(sum_of_logs / count) + for bucket in histogram['buckets']: + if bucket['mean'] > 0: + sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count'] + return geom_mean, math.sqrt(sum_of_squares / count) + + +def _ValueToString(v): + # Special case for floats so we don't print using scientific notation. + if isinstance(v, float): + return '%f' % v + else: + return str(v) + + +def _MeanAndStdDevFromList(values): + avg = None + sd = None + if len(values) > 1: + try: + value = '[%s]' % ','.join([_ValueToString(v) for v in values]) + avg = sum([float(v) for v in values]) / len(values) + sqdiffs = [(float(v) - avg) ** 2 for v in values] + variance = sum(sqdiffs) / (len(values) - 1) + sd = math.sqrt(variance) + except ValueError: + value = ', '.join(values) + else: + value = values[0] + return value, avg, sd + + +def PrintPages(page_list): + """Prints list of pages to stdout in the format required by perf tests.""" + print('Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])) + + +def PrintPerfResult(measurement, trace, values, units, + result_type=perf_result_data_type.DEFAULT, + print_to_stdout=True): + """Prints numerical data to stdout in the format required by perf tests. + + The string args may be empty but they must not contain any colons (:) or + equals signs (=). + This is parsed by the buildbot using: + http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py + + Args: + measurement: A description of the quantity being measured, e.g. "vm_peak". + On the dashboard, this maps to a particular graph. Mandatory. + trace: A description of the particular data point, e.g. "reference". + On the dashboard, this maps to a particular "line" in the graph. + Mandatory. + values: A list of numeric measured values. An N-dimensional list will be + flattened and treated as a simple list. + units: A description of the units of measure, e.g. "bytes". + result_type: Accepts values of perf_result_data_type.ALL_TYPES. + print_to_stdout: If True, prints the output in stdout instead of returning + the output to caller. + + Returns: + String of the formated perf result. + """ + assert perf_result_data_type.IsValidType(result_type), \ + 'result type: %s is invalid' % result_type + + trace_name = _EscapePerfResult(trace) + + if (result_type == perf_result_data_type.UNIMPORTANT or + result_type == perf_result_data_type.DEFAULT or + result_type == perf_result_data_type.INFORMATIONAL): + assert isinstance(values, list) + assert '/' not in measurement + flattened_values = FlattenList(values) + assert len(flattened_values) + value, avg, sd = _MeanAndStdDevFromList(flattened_values) + output = '%s%s: %s%s%s %s' % ( + RESULT_TYPES[result_type], + _EscapePerfResult(measurement), + trace_name, + # Do not show equal sign if the trace is empty. Usually it happens when + # measurement is enough clear to describe the result. + '= ' if trace_name else '', + value, + units) + else: + assert perf_result_data_type.IsHistogram(result_type) + assert isinstance(values, list) + # The histograms can only be printed individually, there's no computation + # across different histograms. + assert len(values) == 1 + value = values[0] + output = '%s%s: %s= %s %s' % ( + RESULT_TYPES[result_type], + _EscapePerfResult(measurement), + trace_name, + value, + units) + avg, sd = GeomMeanAndStdDevFromHistogram(value) + + if avg: + output += '\nAvg %s: %f%s' % (measurement, avg, units) + if sd: + output += '\nSd %s: %f%s' % (measurement, sd, units) + if print_to_stdout: + print(output) + sys.stdout.flush() + return output + + +def ReportPerfResult(chart_data, graph_title, trace_title, value, units, + improvement_direction='down', important=True): + """Outputs test results in correct format. + + If chart_data is None, it outputs data in old format. If chart_data is a + dictionary, formats in chartjson format. If any other format defaults to + old format. + + Args: + chart_data: A dictionary corresponding to perf results in the chartjson + format. + graph_title: A string containing the name of the chart to add the result + to. + trace_title: A string containing the name of the trace within the chart + to add the result to. + value: The value of the result being reported. + units: The units of the value being reported. + improvement_direction: A string denoting whether higher or lower is + better for the result. Either 'up' or 'down'. + important: A boolean denoting whether the result is important or not. + """ + if chart_data and isinstance(chart_data, dict): + chart_data['charts'].setdefault(graph_title, {}) + chart_data['charts'][graph_title][trace_title] = { + 'type': 'scalar', + 'value': value, + 'units': units, + 'improvement_direction': improvement_direction, + 'important': important + } + else: + PrintPerfResult(graph_title, trace_title, [value], units) diff --git a/util/lib/common/unittest_util.py b/util/lib/common/unittest_util.py new file mode 100644 index 000000000000..4779c7d05bfa --- /dev/null +++ b/util/lib/common/unittest_util.py @@ -0,0 +1,155 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utilities for dealing with the python unittest module.""" + +import fnmatch +import re +import sys +import unittest + + +class _TextTestResult(unittest._TextTestResult): + """A test result class that can print formatted text results to a stream. + + Results printed in conformance with gtest output format, like: + [ RUN ] autofill.AutofillTest.testAutofillInvalid: "test desc." + [ OK ] autofill.AutofillTest.testAutofillInvalid + [ RUN ] autofill.AutofillTest.testFillProfile: "test desc." + [ OK ] autofill.AutofillTest.testFillProfile + [ RUN ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test." + [ OK ] autofill.AutofillTest.testFillProfileCrazyCharacters + """ + def __init__(self, stream, descriptions, verbosity): + unittest._TextTestResult.__init__(self, stream, descriptions, verbosity) + self._fails = set() + + def _GetTestURI(self, test): + return '%s.%s.%s' % (test.__class__.__module__, + test.__class__.__name__, + test._testMethodName) + + def getDescription(self, test): + return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription()) + + def startTest(self, test): + unittest.TestResult.startTest(self, test) + self.stream.writeln('[ RUN ] %s' % self.getDescription(test)) + + def addSuccess(self, test): + unittest.TestResult.addSuccess(self, test) + self.stream.writeln('[ OK ] %s' % self._GetTestURI(test)) + + def addError(self, test, err): + unittest.TestResult.addError(self, test, err) + self.stream.writeln('[ ERROR ] %s' % self._GetTestURI(test)) + self._fails.add(self._GetTestURI(test)) + + def addFailure(self, test, err): + unittest.TestResult.addFailure(self, test, err) + self.stream.writeln('[ FAILED ] %s' % self._GetTestURI(test)) + self._fails.add(self._GetTestURI(test)) + + def getRetestFilter(self): + return ':'.join(self._fails) + + +class TextTestRunner(unittest.TextTestRunner): + """Test Runner for displaying test results in textual format. + + Results are displayed in conformance with google test output. + """ + + def __init__(self, verbosity=1): + unittest.TextTestRunner.__init__(self, stream=sys.stderr, + verbosity=verbosity) + + def _makeResult(self): + return _TextTestResult(self.stream, self.descriptions, self.verbosity) + + +def GetTestsFromSuite(suite): + """Returns all the tests from a given test suite.""" + tests = [] + for x in suite: + if isinstance(x, unittest.TestSuite): + tests += GetTestsFromSuite(x) + else: + tests += [x] + return tests + + +def GetTestNamesFromSuite(suite): + """Returns a list of every test name in the given suite.""" + return map(lambda x: GetTestName(x), GetTestsFromSuite(suite)) + + +def GetTestName(test): + """Gets the test name of the given unittest test.""" + return '.'.join([test.__class__.__module__, + test.__class__.__name__, + test._testMethodName]) + + +def FilterTestSuite(suite, gtest_filter): + """Returns a new filtered tests suite based on the given gtest filter. + + See https://github.com/google/googletest/blob/main/docs/advanced.md + for gtest_filter specification. + """ + return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter)) + + +def FilterTests(all_tests, gtest_filter): + """Filter a list of tests based on the given gtest filter. + + Args: + all_tests: List of tests (unittest.TestSuite) + gtest_filter: Filter to apply. + + Returns: + Filtered subset of the given list of tests. + """ + test_names = [GetTestName(test) for test in all_tests] + filtered_names = FilterTestNames(test_names, gtest_filter) + return [test for test in all_tests if GetTestName(test) in filtered_names] + + +def FilterTestNames(all_tests, gtest_filter): + """Filter a list of test names based on the given gtest filter. + + See https://github.com/google/googletest/blob/main/docs/advanced.md + for gtest_filter specification. + + Args: + all_tests: List of test names. + gtest_filter: Filter to apply. + + Returns: + Filtered subset of the given list of test names. + """ + pattern_groups = gtest_filter.split('-') + positive_patterns = ['*'] + if pattern_groups[0]: + positive_patterns = pattern_groups[0].split(':') + negative_patterns = [] + if len(pattern_groups) > 1: + negative_patterns = pattern_groups[1].split(':') + + neg_pats = None + if negative_patterns: + neg_pats = re.compile('|'.join(fnmatch.translate(p) for p in + negative_patterns)) + + tests = [] + test_set = set() + for pattern in positive_patterns: + pattern_tests = [ + test for test in all_tests + if (fnmatch.fnmatch(test, pattern) + and not (neg_pats and neg_pats.match(test)) + and test not in test_set)] + tests.extend(pattern_tests) + test_set.update(pattern_tests) + return tests diff --git a/util/lib/common/unittest_util_test.py b/util/lib/common/unittest_util_test.py new file mode 100755 index 000000000000..e775e1a5ebc1 --- /dev/null +++ b/util/lib/common/unittest_util_test.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# pylint: disable=protected-access + +import logging +import sys +import unittest +import unittest_util + + +class FilterTestNamesTest(unittest.TestCase): + + possible_list = ["Foo.One", + "Foo.Two", + "Foo.Three", + "Bar.One", + "Bar.Two", + "Bar.Three", + "Quux.One", + "Quux.Two", + "Quux.Three"] + + def testMatchAll(self): + x = unittest_util.FilterTestNames(self.possible_list, "*") + self.assertEquals(x, self.possible_list) + + def testMatchPartial(self): + x = unittest_util.FilterTestNames(self.possible_list, "Foo.*") + self.assertEquals(x, ["Foo.One", "Foo.Two", "Foo.Three"]) + + def testMatchFull(self): + x = unittest_util.FilterTestNames(self.possible_list, "Foo.Two") + self.assertEquals(x, ["Foo.Two"]) + + def testMatchTwo(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*") + self.assertEquals(x, ["Bar.One", + "Bar.Two", + "Bar.Three", + "Foo.One", + "Foo.Two", + "Foo.Three"]) + + def testMatchWithNegative(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*-*.Three") + self.assertEquals(x, ["Bar.One", + "Bar.Two", + "Foo.One", + "Foo.Two"]) + + def testMatchOverlapping(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:*.Two") + self.assertEquals(x, ["Bar.One", + "Bar.Two", + "Bar.Three", + "Foo.Two", + "Quux.Two"]) + + +if __name__ == '__main__': + logging.getLogger().setLevel(logging.DEBUG) + unittest.main(verbosity=2) diff --git a/util/lib/common/util.py b/util/lib/common/util.py new file mode 100644 index 000000000000..33c58e237b69 --- /dev/null +++ b/util/lib/common/util.py @@ -0,0 +1,151 @@ +# Copyright 2013 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generic utilities for all python scripts.""" + +import atexit +import httplib +import os +import signal +import stat +import subprocess +import sys +import tempfile +import urlparse + + +def GetPlatformName(): + """Return a string to be used in paths for the platform.""" + if IsWindows(): + return 'win' + if IsMac(): + return 'mac' + if IsLinux(): + return 'linux' + raise NotImplementedError('Unknown platform "%s".' % sys.platform) + + +def IsWindows(): + return sys.platform == 'cygwin' or sys.platform.startswith('win') + + +def IsLinux(): + return sys.platform.startswith('linux') + + +def IsMac(): + return sys.platform.startswith('darwin') + + +def _DeleteDir(path): + """Deletes a directory recursively, which must exist.""" + # Don't use shutil.rmtree because it can't delete read-only files on Win. + for root, dirs, files in os.walk(path, topdown=False): + for name in files: + filename = os.path.join(root, name) + os.chmod(filename, stat.S_IWRITE) + os.remove(filename) + for name in dirs: + os.rmdir(os.path.join(root, name)) + os.rmdir(path) + + +def Delete(path): + """Deletes the given file or directory (recursively), which must exist.""" + if os.path.isdir(path): + _DeleteDir(path) + else: + os.remove(path) + + +def MaybeDelete(path): + """Deletes the given file or directory (recurisvely), if it exists.""" + if os.path.exists(path): + Delete(path) + + +def MakeTempDir(parent_dir=None): + """Creates a temporary directory and returns an absolute path to it. + + The temporary directory is automatically deleted when the python interpreter + exits normally. + + Args: + parent_dir: the directory to create the temp dir in. If None, the system + temp dir is used. + + Returns: + The absolute path to the temporary directory. + """ + path = tempfile.mkdtemp(dir=parent_dir) + atexit.register(MaybeDelete, path) + return path + + +def Unzip(zip_path, output_dir): + """Unzips the given zip file using a system installed unzip tool. + + Args: + zip_path: zip file to unzip. + output_dir: directory to unzip the contents of the zip file. The directory + must exist. + + Raises: + RuntimeError if the unzip operation fails. + """ + if IsWindows(): + unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y'] + else: + unzip_cmd = ['unzip', '-o'] + unzip_cmd += [zip_path] + if RunCommand(unzip_cmd, output_dir) != 0: + raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir)) + + +def Kill(pid): + """Terminate the given pid.""" + if IsWindows(): + subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)]) + else: + os.kill(pid, signal.SIGTERM) + + +def RunCommand(cmd, cwd=None): + """Runs the given command and returns the exit code. + + Args: + cmd: list of command arguments. + cwd: working directory to execute the command, or None if the current + working directory should be used. + + Returns: + The exit code of the command. + """ + process = subprocess.Popen(cmd, cwd=cwd) + process.wait() + return process.returncode + + +def DoesUrlExist(url): + """Determines whether a resource exists at the given URL. + + Args: + url: URL to be verified. + + Returns: + True if url exists, otherwise False. + """ + parsed = urlparse.urlparse(url) + try: + conn = httplib.HTTPConnection(parsed.netloc) + conn.request('HEAD', parsed.path) + response = conn.getresponse() + except (socket.gaierror, socket.error): + return False + finally: + conn.close() + # Follow both permanent (301) and temporary (302) redirects. + if response.status == 302 or response.status == 301: + return DoesUrlExist(response.getheader('location')) + return response.status == 200 diff --git a/util/lib/results/DIR_METADATA b/util/lib/results/DIR_METADATA new file mode 100644 index 000000000000..aea61c0bffb5 --- /dev/null +++ b/util/lib/results/DIR_METADATA @@ -0,0 +1,11 @@ +# Metadata information for this directory. +# +# For more information on DIR_METADATA files, see: +# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/README.md +# +# For the schema of this file, see Metadata message: +# https://source.chromium.org/chromium/infra/infra/+/main:go/src/infra/tools/dirmd/proto/dir_metadata.proto + +monorail { + component: "Infra>Client>Chrome" +} diff --git a/util/lib/results/OWNERS b/util/lib/results/OWNERS new file mode 100644 index 000000000000..b2f010585605 --- /dev/null +++ b/util/lib/results/OWNERS @@ -0,0 +1 @@ +bjoyce@chromium.org diff --git a/util/lib/results/__init__.py b/util/lib/results/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/util/lib/results/result_sink.py b/util/lib/results/result_sink.py new file mode 100644 index 000000000000..3996b65d8d4d --- /dev/null +++ b/util/lib/results/result_sink.py @@ -0,0 +1,207 @@ +# Copyright 2020 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +from __future__ import absolute_import +import base64 +import json +import logging +import os + +import six + +import requests # pylint: disable=import-error +from lib.results import result_types + +# Maps result_types to the luci test-result.proto. +# https://godoc.org/go.chromium.org/luci/resultdb/proto/v1#TestStatus +RESULT_MAP = { + result_types.UNKNOWN: 'ABORT', + result_types.PASS: 'PASS', + result_types.FAIL: 'FAIL', + result_types.CRASH: 'CRASH', + result_types.TIMEOUT: 'ABORT', + result_types.SKIP: 'SKIP', + result_types.NOTRUN: 'SKIP', +} + + +def TryInitClient(): + """Tries to initialize a result_sink_client object. + + Assumes that rdb stream is already running. + + Returns: + A ResultSinkClient for the result_sink server else returns None. + """ + try: + with open(os.environ['LUCI_CONTEXT']) as f: + sink = json.load(f)['result_sink'] + return ResultSinkClient(sink) + except KeyError: + return None + + +class ResultSinkClient(object): + """A class to store the sink's post configurations and make post requests. + + This assumes that the rdb stream has been called already and that the + server is listening. + """ + + def __init__(self, context): + base_url = 'http://%s/prpc/luci.resultsink.v1.Sink' % context['address'] + self.test_results_url = base_url + '/ReportTestResults' + self.report_artifacts_url = base_url + '/ReportInvocationLevelArtifacts' + + headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + 'Authorization': 'ResultSink %s' % context['auth_token'], + } + self.session = requests.Session() + self.session.headers.update(headers) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + """Closes the session backing the sink.""" + self.session.close() + + def Post(self, + test_id, + status, + duration, + test_log, + test_file, + variant=None, + artifacts=None, + failure_reason=None, + html_artifact=None): + """Uploads the test result to the ResultSink server. + + This assumes that the rdb stream has been called already and that + server is ready listening. + + Args: + test_id: A string representing the test's name. + status: A string representing if the test passed, failed, etc... + duration: An int representing time in ms. + test_log: A string representing the test's output. + test_file: A string representing the file location of the test. + variant: An optional dict of variant key value pairs as the + additional variant sent from test runners, which can override + or add to the variants passed to `rdb stream` command. + artifacts: An optional dict of artifacts to attach to the test. + failure_reason: An optional string with the reason why the test failed. + Should be None if the test did not fail. + html_artifact: An optional html-formatted string to prepend to the test's + log. Useful to encode click-able URL links in the test log, since that + won't be formatted in the test_log. + + Returns: + N/A + """ + assert status in RESULT_MAP + expected = status in (result_types.PASS, result_types.SKIP) + result_db_status = RESULT_MAP[status] + + tr = { + 'expected': + expected, + 'status': + result_db_status, + 'tags': [ + { + 'key': 'test_name', + 'value': test_id, + }, + { + # Status before getting mapped to result_db statuses. + 'key': 'raw_status', + 'value': status, + } + ], + 'testId': + test_id, + 'testMetadata': { + 'name': test_id, + } + } + + if variant: + tr['variant'] = {'def': variant} + + artifacts = artifacts or {} + tr['summaryHtml'] = html_artifact if html_artifact else '' + if test_log: + # Upload the original log without any modifications. + b64_log = six.ensure_str(base64.b64encode(six.ensure_binary(test_log))) + artifacts.update({'Test Log': {'contents': b64_log}}) + tr['summaryHtml'] += '' + if artifacts: + tr['artifacts'] = artifacts + if failure_reason: + tr['failureReason'] = { + 'primaryErrorMessage': _TruncateToUTF8Bytes(failure_reason, 1024) + } + + if duration is not None: + # Duration must be formatted to avoid scientific notation in case + # number is too small or too large. Result_db takes seconds, not ms. + # Need to use float() otherwise it does substitution first then divides. + tr['duration'] = '%.9fs' % float(duration / 1000.0) + + if test_file and str(test_file).startswith('//'): + tr['testMetadata']['location'] = { + 'file_name': test_file, + 'repo': 'https://chromium.googlesource.com/chromium/src', + } + + res = self.session.post(url=self.test_results_url, + data=json.dumps({'testResults': [tr]})) + res.raise_for_status() + + def ReportInvocationLevelArtifacts(self, artifacts): + """Uploads invocation-level artifacts to the ResultSink server. + + This is for artifacts that don't apply to a single test but to the test + invocation as a whole (eg: system logs). + + Args: + artifacts: A dict of artifacts to attach to the invocation. + """ + req = {'artifacts': artifacts} + res = self.session.post(url=self.report_artifacts_url, data=json.dumps(req)) + res.raise_for_status() + + +def _TruncateToUTF8Bytes(s, length): + """ Truncates a string to a given number of bytes when encoded as UTF-8. + + Ensures the given string does not take more than length bytes when encoded + as UTF-8. Adds trailing ellipsis (...) if truncation occurred. A truncated + string may end up encoding to a length slightly shorter than length because + only whole Unicode codepoints are dropped. + + Args: + s: The string to truncate. + length: the length (in bytes) to truncate to. + """ + try: + encoded = s.encode('utf-8') + # When encode throws UnicodeDecodeError in py2, it usually means the str is + # already encoded and has non-ascii chars. So skip re-encoding it. + except UnicodeDecodeError: + encoded = s + if len(encoded) > length: + # Truncate, leaving space for trailing ellipsis (...). + encoded = encoded[:length - 3] + # Truncating the string encoded as UTF-8 may have left the final codepoint + # only partially present. Pass 'ignore' to acknowledge and ensure this is + # dropped. + return encoded.decode('utf-8', 'ignore') + "..." + return s diff --git a/util/lib/results/result_sink_test.py b/util/lib/results/result_sink_test.py new file mode 100755 index 000000000000..7d65677776b7 --- /dev/null +++ b/util/lib/results/result_sink_test.py @@ -0,0 +1,138 @@ +#!/usr/bin/env vpython3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import os +import sys +import unittest + +# The following non-std imports are fetched via vpython. See the list at +# //.vpython3 +import mock # pylint: disable=import-error +import six + +_BUILD_UTIL_PATH = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..')) +if _BUILD_UTIL_PATH not in sys.path: + sys.path.insert(0, _BUILD_UTIL_PATH) + +from lib.results import result_sink +from lib.results import result_types + +_FAKE_CONTEXT = { + 'address': 'some-ip-address', + 'auth_token': 'some-auth-token', +} + + +class InitClientTest(unittest.TestCase): + @mock.patch.dict(os.environ, {}, clear=True) + def testEmptyClient(self): + # No LUCI_CONTEXT env var should prevent a client from being created. + client = result_sink.TryInitClient() + self.assertIsNone(client) + + @mock.patch.dict(os.environ, {'LUCI_CONTEXT': 'some-file.json'}) + def testBasicClient(self): + luci_context_json = { + 'result_sink': _FAKE_CONTEXT, + } + if six.PY2: + open_builtin_path = '__builtin__.open' + else: + open_builtin_path = 'builtins.open' + with mock.patch(open_builtin_path, + mock.mock_open(read_data=json.dumps(luci_context_json))): + client = result_sink.TryInitClient() + self.assertEqual( + client.test_results_url, + 'http://some-ip-address/prpc/luci.resultsink.v1.Sink/ReportTestResults') + self.assertEqual(client.session.headers['Authorization'], + 'ResultSink some-auth-token') + + @mock.patch('requests.Session') + def testReuseSession(self, mock_session): + client = result_sink.ResultSinkClient(_FAKE_CONTEXT) + client.Post('some-test', result_types.PASS, 0, 'some-test-log', None) + client.Post('some-test', result_types.PASS, 0, 'some-test-log', None) + self.assertEqual(mock_session.call_count, 1) + self.assertEqual(client.session.post.call_count, 2) + + @mock.patch('requests.Session.close') + def testCloseClient(self, mock_close): + client = result_sink.ResultSinkClient(_FAKE_CONTEXT) + client.close() + mock_close.assert_called_once() + + @mock.patch('requests.Session.close') + def testClientAsContextManager(self, mock_close): + with result_sink.ResultSinkClient(_FAKE_CONTEXT) as client: + mock_close.assert_not_called() + mock_close.assert_called_once() + + +class ClientTest(unittest.TestCase): + def setUp(self): + self.client = result_sink.ResultSinkClient(_FAKE_CONTEXT) + + @mock.patch('requests.Session.post') + def testPostPassingTest(self, mock_post): + self.client.Post('some-test', result_types.PASS, 0, 'some-test-log', None) + self.assertEqual( + mock_post.call_args[1]['url'], + 'http://some-ip-address/prpc/luci.resultsink.v1.Sink/ReportTestResults') + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual(data['testResults'][0]['testId'], 'some-test') + self.assertEqual(data['testResults'][0]['status'], 'PASS') + + @mock.patch('requests.Session.post') + def testPostFailingTest(self, mock_post): + self.client.Post('some-test', + result_types.FAIL, + 0, + 'some-test-log', + None, + failure_reason='omg test failure') + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual(data['testResults'][0]['status'], 'FAIL') + self.assertEqual(data['testResults'][0]['testMetadata']['name'], + 'some-test') + self.assertEqual( + data['testResults'][0]['failureReason']['primaryErrorMessage'], + 'omg test failure') + + @mock.patch('requests.Session.post') + def testPostWithTestFile(self, mock_post): + self.client.Post('some-test', result_types.PASS, 0, 'some-test-log', + '//some/test.cc') + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual( + data['testResults'][0]['testMetadata']['location']['file_name'], + '//some/test.cc') + self.assertEqual(data['testResults'][0]['testMetadata']['name'], + 'some-test') + self.assertIsNotNone(data['testResults'][0]['summaryHtml']) + + @mock.patch('requests.Session.post') + def testPostWithVariant(self, mock_post): + self.client.Post('some-test', + result_types.PASS, + 0, + 'some-test-log', + None, + variant={ + 'key1': 'value1', + 'key2': 'value2' + }) + data = json.loads(mock_post.call_args[1]['data']) + self.assertEqual(data['testResults'][0]['variant'], + {'def': { + 'key1': 'value1', + 'key2': 'value2' + }}) + + +if __name__ == '__main__': + unittest.main() diff --git a/util/lib/results/result_types.py b/util/lib/results/result_types.py new file mode 100644 index 000000000000..f8b52c173e0d --- /dev/null +++ b/util/lib/results/result_types.py @@ -0,0 +1,25 @@ +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Module containing base test results classes.""" + +# The test passed. +PASS = 'SUCCESS' + +# The test was intentionally skipped. +SKIP = 'SKIPPED' + +# The test failed. +FAIL = 'FAILURE' + +# The test caused the containing process to crash. +CRASH = 'CRASH' + +# The test timed out. +TIMEOUT = 'TIMEOUT' + +# The test ran, but we couldn't determine what happened. +UNKNOWN = 'UNKNOWN' + +# The test did not run. +NOTRUN = 'NOTRUN' diff --git a/util/process_version.gni b/util/process_version.gni new file mode 100644 index 000000000000..cd9671ccdb82 --- /dev/null +++ b/util/process_version.gni @@ -0,0 +1,128 @@ +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/util/lastchange.gni") + +# Runs the version processing script over the given template file to produce +# an output file. This is used for generating various forms of files that +# incorporate the product name and version. +# +# Unlike GYP, this will actually compile the resulting file, so you don't need +# to add it separately to the sources, just depend on the target. +# +# In GYP this is a rule that runs once per ".ver" file. In GN this just +# processes one file per invocation of the template so you may have to have +# multiple targets. +# +# Parameters: +# sources (optional): +# List of file names to read. When converting a GYP target, this should +# list the 'source' (see above) as well as any extra_variable_files. +# The files will be passed to version.py in the order specified here. +# +# output: +# File name of file to write. In GYP this is unspecified and it will +# make up a file name for you based on the input name, and tack on +# "_version.rc" to the end. But in GN you need to specify the full name. +# +# template_file (optional): +# Template file to use (not a list). Most Windows users that want to use +# this to process a .rc template should use process_version_rc_template(), +# defined in //chrome/process_version_rc_template.gni, instead. +# +# extra_args (optional): +# Extra arguments to pass to version.py. Any "-f " args should +# use sources instead. +# +# process_only (optional, defaults to false) +# Set to generate only one action that processes the version file and +# doesn't attempt to link the result into a source set. This is for if +# you are processing the version as data only. +# +# visibility (optional) +# +# Example: +# process_version("myversion") { +# sources = [ +# "//chrome/VERSION" +# "myfile.h.in" +# ] +# output = "$target_gen_dir/myfile.h" +# extra_args = [ "-e", "FOO=42" ] +# } +template("process_version") { + assert(defined(invoker.output), "Output must be defined for $target_name") + + process_only = defined(invoker.process_only) && invoker.process_only + + if (process_only) { + action_name = target_name + } else { + action_name = target_name + "_action" + source_set_name = target_name + } + + action(action_name) { + script = "//build/util/version.py" + + inputs = [ lastchange_file ] + if (defined(invoker.inputs)) { + inputs += invoker.inputs + } + if (defined(invoker.template_file)) { + inputs += [ invoker.template_file ] + } + + outputs = [ invoker.output ] + + args = [] + + if (is_official_build) { + args += [ "--official" ] + } + + if (defined(invoker.sources)) { + inputs += invoker.sources + foreach(i, invoker.sources) { + args += [ + "-f", + rebase_path(i, root_build_dir), + ] + } + } + + if (defined(invoker.extra_args)) { + args += invoker.extra_args + } + args += [ + "-o", + rebase_path(invoker.output, root_build_dir), + ] + if (defined(invoker.template_file)) { + args += [ rebase_path(invoker.template_file, root_build_dir) ] + } + + forward_variables_from(invoker, [ "deps" ]) + + if (process_only) { + # When processing only, visibility gets applied to this target. + forward_variables_from(invoker, [ "visibility" ]) + } else { + # When linking the result, only the source set can depend on the action. + visibility = [ ":$source_set_name" ] + } + } + + if (!process_only) { + source_set(source_set_name) { + forward_variables_from(invoker, + [ + "visibility", + "deps", + ]) + sources = get_target_outputs(":$action_name") + public_deps = [ ":$action_name" ] + } + } +} diff --git a/util/version.py b/util/version.py new file mode 100755 index 000000000000..9bf51cd35122 --- /dev/null +++ b/util/version.py @@ -0,0 +1,266 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +version.py -- Chromium version string substitution utility. +""" + + +import argparse +import os +import sys + +import android_chrome_version + + +def FetchValuesFromFile(values_dict, file_name): + """ + Fetches KEYWORD=VALUE settings from the specified file. + + Everything to the left of the first '=' is the keyword, + everything to the right is the value. No stripping of + white space, so beware. + + The file must exist, otherwise you get the Python exception from open(). + """ + with open(file_name, 'r') as f: + for line in f.readlines(): + key, val = line.rstrip('\r\n').split('=', 1) + values_dict[key] = val + + +def FetchValues(file_list, is_official_build=None): + """ + Returns a dictionary of values to be used for substitution. + + Populates the dictionary with KEYWORD=VALUE settings from the files in + 'file_list'. + + Explicitly adds the following value from internal calculations: + + OFFICIAL_BUILD + """ + CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE') + if CHROME_BUILD_TYPE == '_official' or is_official_build: + official_build = '1' + else: + official_build = '0' + + values = dict( + OFFICIAL_BUILD = official_build, + ) + + for file_name in file_list: + FetchValuesFromFile(values, file_name) + + script_dirname = os.path.dirname(os.path.realpath(__file__)) + lastchange_filename = os.path.join(script_dirname, "LASTCHANGE") + lastchange_values = {} + FetchValuesFromFile(lastchange_values, lastchange_filename) + + for placeholder_key, placeholder_value in values.items(): + values[placeholder_key] = SubstTemplate(placeholder_value, + lastchange_values) + + return values + + +def SubstTemplate(contents, values): + """ + Returns the template with substituted values from the specified dictionary. + + Keywords to be substituted are surrounded by '@': @KEYWORD@. + + No attempt is made to avoid recursive substitution. The order + of evaluation is random based on the order of the keywords returned + by the Python dictionary. So do NOT substitute a value that + contains any @KEYWORD@ strings expecting them to be recursively + substituted, okay? + """ + for key, val in values.items(): + try: + contents = contents.replace('@' + key + '@', val) + except TypeError: + print(repr(key), repr(val)) + return contents + + +def SubstFile(file_name, values): + """ + Returns the contents of the specified file_name with substituted values. + + Substituted values come from the specified dictionary. + + This is like SubstTemplate, except it operates on a file. + """ + template = open(file_name, 'r').read() + return SubstTemplate(template, values) + + +def WriteIfChanged(file_name, contents): + """ + Writes the specified contents to the specified file_name. + + Does nothing if the contents aren't different than the current contents. + """ + try: + old_contents = open(file_name, 'r').read() + except EnvironmentError: + pass + else: + if contents == old_contents: + return + os.unlink(file_name) + open(file_name, 'w').write(contents) + + +def BuildParser(): + """Build argparse parser, with added arguments.""" + parser = argparse.ArgumentParser() + parser.add_argument('-f', '--file', action='append', default=[], + help='Read variables from FILE.') + parser.add_argument('-i', '--input', default=None, + help='Read strings to substitute from FILE.') + parser.add_argument('-o', '--output', default=None, + help='Write substituted strings to FILE.') + parser.add_argument('-t', '--template', default=None, + help='Use TEMPLATE as the strings to substitute.') + parser.add_argument( + '-e', + '--eval', + action='append', + default=[], + help='Evaluate VAL after reading variables. Can be used ' + 'to synthesize variables. e.g. -e \'PATCH_HI=int(' + 'PATCH)//256.') + parser.add_argument( + '-a', + '--arch', + default=None, + choices=android_chrome_version.ARCH_CHOICES, + help='Set which cpu architecture the build is for.') + parser.add_argument('--os', default=None, help='Set the target os.') + parser.add_argument('--official', action='store_true', + help='Whether the current build should be an official ' + 'build, used in addition to the environment ' + 'variable.') + parser.add_argument('--next', + action='store_true', + help='Whether the current build should be a "next" ' + 'build, which targets pre-release versions of Android.') + parser.add_argument('args', nargs=argparse.REMAINDER, + help='For compatibility: INPUT and OUTPUT can be ' + 'passed as positional arguments.') + return parser + + +def BuildEvals(options, parser): + """Construct a dict of passed '-e' arguments for evaluating.""" + evals = {} + for expression in options.eval: + try: + evals.update(dict([expression.split('=', 1)])) + except ValueError: + parser.error('-e requires VAR=VAL') + return evals + + +def ModifyOptionsCompat(options, parser): + """Support compatibility with old versions. + + Specifically, for old versions that considered the first two + positional arguments shorthands for --input and --output. + """ + while len(options.args) and (options.input is None or options.output is None): + if options.input is None: + options.input = options.args.pop(0) + elif options.output is None: + options.output = options.args.pop(0) + if options.args: + parser.error('Unexpected arguments: %r' % options.args) + + +def GenerateValues(options, evals): + """Construct a dict of raw values used to generate output. + + e.g. this could return a dict like + { + 'BUILD': 74, + } + + which would be used to resolve a template like + 'build = "@BUILD@"' into 'build = "74"' + + """ + values = FetchValues(options.file, options.official) + + for key, val in evals.items(): + values[key] = str(eval(val, globals(), values)) + + if options.os == 'android': + android_chrome_version_codes = android_chrome_version.GenerateVersionCodes( + values, options.arch, options.next) + values.update(android_chrome_version_codes) + + return values + + +def GenerateOutputContents(options, values): + """Construct output string (e.g. from template). + + Arguments: + options -- argparse parsed arguments + values -- dict with raw values used to resolve the keywords in a template + string + """ + + if options.template is not None: + return SubstTemplate(options.template, values) + elif options.input: + return SubstFile(options.input, values) + else: + # Generate a default set of version information. + return """MAJOR=%(MAJOR)s +MINOR=%(MINOR)s +BUILD=%(BUILD)s +PATCH=%(PATCH)s +LASTCHANGE=%(LASTCHANGE)s +OFFICIAL_BUILD=%(OFFICIAL_BUILD)s +""" % values + + +def BuildOutput(args): + """Gets all input and output values needed for writing output.""" + # Build argparse parser with arguments + parser = BuildParser() + options = parser.parse_args(args) + + # Get dict of passed '-e' arguments for evaluating + evals = BuildEvals(options, parser) + # For compatibility with interface that considered first two positional + # arguments shorthands for --input and --output. + ModifyOptionsCompat(options, parser) + + # Get the raw values that will be used the generate the output + values = GenerateValues(options, evals) + # Get the output string + contents = GenerateOutputContents(options, values) + + return {'options': options, 'contents': contents} + + +def main(): + output = BuildOutput(sys.argv[1:]) + + if output['options'].output is not None: + WriteIfChanged(output['options'].output, output['contents']) + else: + print(output['contents']) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/util/version_test.py b/util/version_test.py new file mode 100644 index 000000000000..f9b468f0505d --- /dev/null +++ b/util/version_test.py @@ -0,0 +1,164 @@ +# Copyright 2019 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import unittest + +import mock +import version + + +def _ReplaceArgs(args, *replacements): + new_args = args[:] + for flag, val in replacements: + flag_index = args.index(flag) + new_args[flag_index + 1] = val + return new_args + + +class _VersionTest(unittest.TestCase): + """Unittests for the version module. + """ + + _CHROME_VERSION_FILE = os.path.join( + os.path.dirname(__file__), os.pardir, os.pardir, 'chrome', 'VERSION') + + _SCRIPT = os.path.join(os.path.dirname(__file__), 'version.py') + + _EXAMPLE_VERSION = { + 'MAJOR': '74', + 'MINOR': '0', + 'BUILD': '3720', + 'PATCH': '0', + } + + _EXAMPLE_TEMPLATE = ( + 'full = "@MAJOR@.@MINOR@.@BUILD@.@PATCH@" ' + 'major = "@MAJOR@" minor = "@MINOR@" ' + 'build = "@BUILD@" patch = "@PATCH@" version_id = @VERSION_ID@ ') + + _ANDROID_CHROME_VARS = [ + 'chrome_version_code', + 'monochrome_version_code', + 'trichrome_version_code', + 'webview_stable_version_code', + 'webview_beta_version_code', + 'webview_dev_version_code', + ] + + _EXAMPLE_ANDROID_TEMPLATE = ( + _EXAMPLE_TEMPLATE + ''.join( + ['%s = "@%s@" ' % (el, el.upper()) for el in _ANDROID_CHROME_VARS])) + + _EXAMPLE_ARGS = [ + '-f', + _CHROME_VERSION_FILE, + '-t', + _EXAMPLE_TEMPLATE, + ] + + _EXAMPLE_ANDROID_ARGS = _ReplaceArgs(_EXAMPLE_ARGS, + ['-t', _EXAMPLE_ANDROID_TEMPLATE]) + [ + '-a', + 'arm', + '--os', + 'android', + ] + + @staticmethod + def _RunBuildOutput(new_version_values={}, + get_new_args=lambda old_args: old_args): + """Parameterized helper method for running the main testable method in + version.py. + + Keyword arguments: + new_version_values -- dict used to update _EXAMPLE_VERSION + get_new_args -- lambda for updating _EXAMPLE_ANDROID_ARGS + """ + + with mock.patch('version.FetchValuesFromFile') as \ + fetch_values_from_file_mock: + + fetch_values_from_file_mock.side_effect = (lambda values, file : + values.update( + dict(_VersionTest._EXAMPLE_VERSION, **new_version_values))) + + new_args = get_new_args(_VersionTest._EXAMPLE_ARGS) + return version.BuildOutput(new_args) + + def testFetchValuesFromFile(self): + """It returns a dict in correct format - { : }, to verify + assumption of other tests that mock this function + """ + result = {} + version.FetchValuesFromFile(result, self._CHROME_VERSION_FILE) + + for key, val in result.items(): + self.assertIsInstance(key, str) + self.assertIsInstance(val, str) + + def testBuildOutputAndroid(self): + """Assert it gives includes assignments of expected variables""" + output = self._RunBuildOutput( + get_new_args=lambda args: self._EXAMPLE_ANDROID_ARGS) + contents = output['contents'] + + self.assertRegex(contents, r'\bchrome_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_version_code = "\d+"\s') + self.assertRegex(contents, r'\bwebview_stable_version_code = "\d+"\s') + self.assertRegex(contents, r'\bwebview_beta_version_code = "\d+"\s') + self.assertRegex(contents, r'\bwebview_dev_version_code = "\d+"\s') + + def testBuildOutputAndroidArchVariantsArm64(self): + """Assert 64-bit-specific version codes""" + new_template = ( + self._EXAMPLE_ANDROID_TEMPLATE + + "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" " + "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" " + "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" " + "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" ") + args_with_template = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS, + ['-t', new_template]) + new_args = _ReplaceArgs(args_with_template, ['-a', 'arm64']) + output = self._RunBuildOutput(get_new_args=lambda args: new_args) + contents = output['contents'] + + self.assertRegex(contents, r'\bmonochrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_64_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_version_code = "\d+"\s') + + def testBuildOutputAndroidArchVariantsX64(self): + """Assert 64-bit-specific version codes""" + new_template = ( + self._EXAMPLE_ANDROID_TEMPLATE + + "monochrome_64_32_version_code = \"@MONOCHROME_64_32_VERSION_CODE@\" " + "monochrome_64_version_code = \"@MONOCHROME_64_VERSION_CODE@\" " + "trichrome_64_32_version_code = \"@TRICHROME_64_32_VERSION_CODE@\" " + "trichrome_64_version_code = \"@TRICHROME_64_VERSION_CODE@\" ") + args_with_template = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS, + ['-t', new_template]) + new_args = _ReplaceArgs(args_with_template, ['-a', 'x64']) + output = self._RunBuildOutput(get_new_args=lambda args: new_args) + contents = output['contents'] + + self.assertRegex(contents, r'\bmonochrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\bmonochrome_64_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_32_version_code = "\d+"\s') + self.assertRegex(contents, r'\btrichrome_64_version_code = "\d+"\s') + + def testBuildOutputAndroidChromeArchInput(self): + """Assert it raises an exception when using an invalid architecture input""" + new_args = _ReplaceArgs(self._EXAMPLE_ANDROID_ARGS, ['-a', 'foobar']) + # Mock sys.stderr because argparse will print to stderr when we pass + # the invalid '-a' value. + with self.assertRaises(SystemExit) as cm, mock.patch('sys.stderr'): + self._RunBuildOutput(get_new_args=lambda args: new_args) + + self.assertEqual(cm.exception.code, 2) + + +if __name__ == '__main__': + unittest.main() diff --git a/vs_toolchain.py b/vs_toolchain.py new file mode 100755 index 000000000000..a9cd6f03d921 --- /dev/null +++ b/vs_toolchain.py @@ -0,0 +1,589 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import collections +import glob +import json +import os +import platform +import re +import shutil +import stat +import subprocess +import sys + +from gn_helpers import ToGNString + +# VS 2022 17.4 with 10.0.22621.0 SDK with ARM64 libraries and UWP support. +# See go/chromium-msvc-toolchain for instructions about how to update the +# toolchain. +# +# When updating the toolchain, consider the following areas impacted by the +# toolchain version: +# +# * //base/win/windows_version.cc NTDDI preprocessor check +# Triggers a compiler error if the available SDK is older than the minimum. +# * SDK_VERSION in this file +# Must match the packaged/required SDK version. +# * SDK_VERSION in build/toolchain/win/setup_toolchain.py. +# * //build/config/win/BUILD.gn NTDDI_VERSION value +# Affects the availability of APIs in the toolchain headers. +# * //docs/windows_build_instructions.md mentions of VS or Windows SDK. +# Keeps the document consistent with the toolchain version. +# * //tools/win/setenv.py +# Add/remove VS versions when upgrading to a new VS version. +# * MSVC_TOOLSET_VERSION in this file +# Maps between Visual Studio version and MSVC toolset +# * MSVS_VERSIONS in this file +# Records the packaged and default version of Visual Studio +TOOLCHAIN_HASH = '27370823e7' +SDK_VERSION = '10.0.22621.0' + +script_dir = os.path.dirname(os.path.realpath(__file__)) +json_data_file = os.path.join(script_dir, 'win_toolchain.json') + +# VS versions are listed in descending order of priority (highest first). +# The first version is assumed by this script to be the one that is packaged, +# which makes a difference for the arm64 runtime. +MSVS_VERSIONS = collections.OrderedDict([ + ('2022', '17.0'), # Default and packaged version of Visual Studio. + ('2019', '16.0'), + ('2017', '15.0'), +]) + +# List of preferred VC toolset version based on MSVS +# Order is not relevant for this dictionary. +MSVC_TOOLSET_VERSION = { + '2022': 'VC143', + '2019': 'VC142', + '2017': 'VC141', +} + +def _HostIsWindows(): + """Returns True if running on a Windows host (including under cygwin).""" + return sys.platform in ('win32', 'cygwin') + +def SetEnvironmentAndGetRuntimeDllDirs(): + """Sets up os.environ to use the depot_tools VS toolchain with gyp, and + returns the location of the VC runtime DLLs so they can be copied into + the output directory after gyp generation. + + Return value is [x64path, x86path, 'Arm64Unused'] or None. arm64path is + generated separately because there are multiple folders for the arm64 VC + runtime. + """ + vs_runtime_dll_dirs = None + depot_tools_win_toolchain = \ + bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) + # When running on a non-Windows host, only do this if the SDK has explicitly + # been downloaded before (in which case json_data_file will exist). + if ((_HostIsWindows() or os.path.exists(json_data_file)) + and depot_tools_win_toolchain): + if ShouldUpdateToolchain(): + if len(sys.argv) > 1 and sys.argv[1] == 'update': + update_result = Update() + else: + update_result = Update(no_download=True) + if update_result != 0: + raise Exception('Failed to update, error code %d.' % update_result) + with open(json_data_file, 'r') as tempf: + toolchain_data = json.load(tempf) + + toolchain = toolchain_data['path'] + version = toolchain_data['version'] + win_sdk = toolchain_data.get('win_sdk') + wdk = toolchain_data['wdk'] + # TODO(scottmg): The order unfortunately matters in these. They should be + # split into separate keys for x64/x86/arm64. (See CopyDlls call below). + # http://crbug.com/345992 + vs_runtime_dll_dirs = toolchain_data['runtime_dirs'] + # The number of runtime_dirs in the toolchain_data was two (x64/x86) but + # changed to three (x64/x86/arm64) and this code needs to handle both + # possibilities, which can change independently from this code. + if len(vs_runtime_dll_dirs) == 2: + vs_runtime_dll_dirs.append('Arm64Unused') + + os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain + + os.environ['WINDOWSSDKDIR'] = win_sdk + os.environ['WDK_DIR'] = wdk + # Include the VS runtime in the PATH in case it's not machine-installed. + runtime_path = os.path.pathsep.join(vs_runtime_dll_dirs) + os.environ['PATH'] = runtime_path + os.path.pathsep + os.environ['PATH'] + elif sys.platform == 'win32' and not depot_tools_win_toolchain: + if not 'GYP_MSVS_OVERRIDE_PATH' in os.environ: + os.environ['GYP_MSVS_OVERRIDE_PATH'] = DetectVisualStudioPath() + + # When using an installed toolchain these files aren't needed in the output + # directory in order to run binaries locally, but they are needed in order + # to create isolates or the mini_installer. Copying them to the output + # directory ensures that they are available when needed. + bitness = platform.architecture()[0] + # When running 64-bit python the x64 DLLs will be in System32 + # ARM64 binaries will not be available in the system directories because we + # don't build on ARM64 machines. + x64_path = 'System32' if bitness == '64bit' else 'Sysnative' + x64_path = os.path.join(os.path.expandvars('%windir%'), x64_path) + vs_runtime_dll_dirs = [x64_path, + os.path.join(os.path.expandvars('%windir%'), + 'SysWOW64'), + 'Arm64Unused'] + + return vs_runtime_dll_dirs + + +def _RegistryGetValueUsingWinReg(key, value): + """Use the _winreg module to obtain the value of a registry key. + + Args: + key: The registry key. + value: The particular registry value to read. + Return: + contents of the registry key's value, or None on failure. Throws + ImportError if _winreg is unavailable. + """ + import _winreg + try: + root, subkey = key.split('\\', 1) + assert root == 'HKLM' # Only need HKLM for now. + with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: + return _winreg.QueryValueEx(hkey, value)[0] + except WindowsError: + return None + + +def _RegistryGetValue(key, value): + try: + return _RegistryGetValueUsingWinReg(key, value) + except ImportError: + raise Exception('The python library _winreg not found.') + + +def GetVisualStudioVersion(): + """Return best available version of Visual Studio. + """ + supported_versions = list(MSVS_VERSIONS.keys()) + + # VS installed in depot_tools for Googlers + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))): + return supported_versions[0] + + # VS installed in system for external developers + supported_versions_str = ', '.join('{} ({})'.format(v,k) + for k,v in MSVS_VERSIONS.items()) + available_versions = [] + for version in supported_versions: + # Checking vs%s_install environment variables. + # For example, vs2019_install could have the value + # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community". + # Only vs2017_install, vs2019_install and vs2022_install are supported. + path = os.environ.get('vs%s_install' % version) + if path and os.path.exists(path): + available_versions.append(version) + break + # Detecting VS under possible paths. + if version >= '2022': + program_files_path_variable = '%ProgramFiles%' + else: + program_files_path_variable = '%ProgramFiles(x86)%' + path = os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s' % version) + if path and any( + os.path.exists(os.path.join(path, edition)) + for edition in ('Enterprise', 'Professional', 'Community', 'Preview', + 'BuildTools')): + available_versions.append(version) + break + + if not available_versions: + raise Exception('No supported Visual Studio can be found.' + ' Supported versions are: %s.' % supported_versions_str) + return available_versions[0] + + +def DetectVisualStudioPath(): + """Return path to the installed Visual Studio. + """ + + # Note that this code is used from + # build/toolchain/win/setup_toolchain.py as well. + version_as_year = GetVisualStudioVersion() + + # The VC++ >=2017 install location needs to be located using COM instead of + # the registry. For details see: + # https://blogs.msdn.microsoft.com/heaths/2016/09/15/changes-to-visual-studio-15-setup/ + # For now we use a hardcoded default with an environment variable override. + if version_as_year >= '2022': + program_files_path_variable = '%ProgramFiles%' + else: + program_files_path_variable = '%ProgramFiles(x86)%' + for path in (os.environ.get('vs%s_install' % version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Enterprise' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Professional' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Community' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/Preview' % + version_as_year), + os.path.expandvars(program_files_path_variable + + '/Microsoft Visual Studio/%s/BuildTools' % + version_as_year)): + if path and os.path.exists(path): + return path + + raise Exception('Visual Studio Version %s not found.' % version_as_year) + + +def _CopyRuntimeImpl(target, source, verbose=True): + """Copy |source| to |target| if it doesn't already exist or if it needs to be + updated (comparing last modified time as an approximate float match as for + some reason the values tend to differ by ~1e-07 despite being copies of the + same file... https://crbug.com/603603). + """ + if (os.path.isdir(os.path.dirname(target)) and + (not os.path.isfile(target) or + abs(os.stat(target).st_mtime - os.stat(source).st_mtime) >= 0.01)): + if verbose: + print('Copying %s to %s...' % (source, target)) + if os.path.exists(target): + # Make the file writable so that we can delete it now, and keep it + # readable. + os.chmod(target, stat.S_IWRITE | stat.S_IREAD) + os.unlink(target) + shutil.copy2(source, target) + # Make the file writable so that we can overwrite or delete it later, + # keep it readable. + os.chmod(target, stat.S_IWRITE | stat.S_IREAD) + +def _SortByHighestVersionNumberFirst(list_of_str_versions): + """This sorts |list_of_str_versions| according to version number rules + so that version "1.12" is higher than version "1.9". Does not work + with non-numeric versions like 1.4.a8 which will be higher than + 1.4.a12. It does handle the versions being embedded in file paths. + """ + def to_int_if_int(x): + try: + return int(x) + except ValueError: + return x + + def to_number_sequence(x): + part_sequence = re.split(r'[\\/\.]', x) + return [to_int_if_int(x) for x in part_sequence] + + list_of_str_versions.sort(key=to_number_sequence, reverse=True) + + +def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix): + """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't + exist, but the target directory does exist.""" + if target_cpu == 'arm64': + # Windows ARM64 VCRuntime is located at {toolchain_root}/VC/Redist/MSVC/ + # {x.y.z}/[debug_nonredist/]arm64/Microsoft.VC14x.CRT/. + # Select VC toolset directory based on Visual Studio version + vc_redist_root = FindVCRedistRoot() + if suffix.startswith('.'): + vc_toolset_dir = 'Microsoft.{}.CRT' \ + .format(MSVC_TOOLSET_VERSION[GetVisualStudioVersion()]) + source_dir = os.path.join(vc_redist_root, + 'arm64', vc_toolset_dir) + else: + vc_toolset_dir = 'Microsoft.{}.DebugCRT' \ + .format(MSVC_TOOLSET_VERSION[GetVisualStudioVersion()]) + source_dir = os.path.join(vc_redist_root, 'debug_nonredist', + 'arm64', vc_toolset_dir) + file_parts = ('msvcp140', 'vccorlib140', 'vcruntime140') + if target_cpu == 'x64' and GetVisualStudioVersion() != '2017': + file_parts = file_parts + ('vcruntime140_1', ) + for file_part in file_parts: + dll = file_part + suffix + target = os.path.join(target_dir, dll) + source = os.path.join(source_dir, dll) + _CopyRuntimeImpl(target, source) + # We must copy ucrtbased.dll for all CPU types. The rest of the Universal CRT + # is installed as part of the OS in Windows 10 and beyond. + if not suffix.startswith('.'): + win_sdk_dir = os.path.normpath( + os.environ.get( + 'WINDOWSSDKDIR', + os.path.expandvars('%ProgramFiles(x86)%' + '\\Windows Kits\\10'))) + # ucrtbased.dll is located at {win_sdk_dir}/bin/{a.b.c.d}/{target_cpu}/ + # ucrt/. + sdk_bin_root = os.path.join(win_sdk_dir, 'bin') + sdk_bin_sub_dirs = glob.glob(os.path.join(sdk_bin_root, '10.*')) + # Select the most recent SDK if there are multiple versions installed. + _SortByHighestVersionNumberFirst(sdk_bin_sub_dirs) + for directory in sdk_bin_sub_dirs: + sdk_redist_root_version = os.path.join(sdk_bin_root, directory) + if not os.path.isdir(sdk_redist_root_version): + continue + source_dir = os.path.join(sdk_redist_root_version, target_cpu, 'ucrt') + if not os.path.isdir(source_dir): + continue + break + _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix), + os.path.join(source_dir, 'ucrtbase' + suffix)) + + +def FindVCComponentRoot(component): + """Find the most recent Tools or Redist or other directory in an MSVC install. + Typical results are {toolchain_root}/VC/{component}/MSVC/{x.y.z}. The {x.y.z} + version number part changes frequently so the highest version number found is + used. + """ + + SetEnvironmentAndGetRuntimeDllDirs() + assert ('GYP_MSVS_OVERRIDE_PATH' in os.environ) + vc_component_msvc_root = os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'], + 'VC', component, 'MSVC') + vc_component_msvc_contents = glob.glob( + os.path.join(vc_component_msvc_root, '14.*')) + # Select the most recent toolchain if there are several. + _SortByHighestVersionNumberFirst(vc_component_msvc_contents) + for directory in vc_component_msvc_contents: + if os.path.isdir(directory): + return directory + raise Exception('Unable to find the VC %s directory.' % component) + + +def FindVCRedistRoot(): + """In >=VS2017, Redist binaries are located in + {toolchain_root}/VC/Redist/MSVC/{x.y.z}/{target_cpu}/. + + This returns the '{toolchain_root}/VC/Redist/MSVC/{x.y.z}/' path. + """ + return FindVCComponentRoot('Redist') + + +def _CopyRuntime(target_dir, source_dir, target_cpu, debug): + """Copy the VS runtime DLLs, only if the target doesn't exist, but the target + directory does exist. Handles VS 2015, 2017 and 2019.""" + suffix = 'd.dll' if debug else '.dll' + # VS 2015, 2017 and 2019 use the same CRT DLLs. + _CopyUCRTRuntime(target_dir, source_dir, target_cpu, suffix) + + +def CopyDlls(target_dir, configuration, target_cpu): + """Copy the VS runtime DLLs into the requested directory as needed. + + configuration is one of 'Debug' or 'Release'. + target_cpu is one of 'x86', 'x64' or 'arm64'. + + The debug configuration gets both the debug and release DLLs; the + release config only the latter. + """ + vs_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs() + if not vs_runtime_dll_dirs: + return + + x64_runtime, x86_runtime, arm64_runtime = vs_runtime_dll_dirs + if target_cpu == 'x64': + runtime_dir = x64_runtime + elif target_cpu == 'x86': + runtime_dir = x86_runtime + elif target_cpu == 'arm64': + runtime_dir = arm64_runtime + else: + raise Exception('Unknown target_cpu: ' + target_cpu) + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False) + if configuration == 'Debug': + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True) + _CopyDebugger(target_dir, target_cpu) + if target_cpu == 'arm64': + target_dir = os.path.join(target_dir, 'win_clang_x64') + target_cpu = 'x64' + runtime_dir = x64_runtime + os.makedirs(target_dir, exist_ok=True) + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False) + if configuration == 'Debug': + _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True) + _CopyDebugger(target_dir, target_cpu) + + +def _CopyDebugger(target_dir, target_cpu): + """Copy dbghelp.dll, dbgcore.dll, and msdia140.dll into the requested + directory. + + target_cpu is one of 'x86', 'x64' or 'arm64'. + + dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file + from the SDK directory avoids using the system copy of dbghelp.dll which then + ensures compatibility with recent debug information formats, such as + large-page PDBs. Note that for these DLLs to be deployed to swarming bots they + also need to be listed in group("runtime_libs"). + + dbgcore.dll is needed when using some functions from dbghelp.dll (like + MinidumpWriteDump). + + msdia140.dll is needed for tools like symupload.exe and dump_syms.exe. + """ + win_sdk_dir = SetEnvironmentAndGetSDKDir() + if not win_sdk_dir: + return + + # List of debug files that should be copied, the first element of the tuple is + # the name of the file and the second indicates if it's optional. + debug_files = [('dbghelp.dll', False), ('dbgcore.dll', True)] + for debug_file, is_optional in debug_files: + full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file) + if not os.path.exists(full_path): + if is_optional: + continue + else: + raise Exception('%s not found in "%s"\r\nYou must install ' + 'Windows 10 SDK version %s including the ' + '"Debugging Tools for Windows" feature.' % + (debug_file, full_path, SDK_VERSION)) + target_path = os.path.join(target_dir, debug_file) + _CopyRuntimeImpl(target_path, full_path) + + # The x64 version of msdia140.dll is always used because symupload and + # dump_syms are always built as x64 binaries. + dia_path = os.path.join(NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH']), + 'DIA SDK', 'bin', 'amd64', 'msdia140.dll') + _CopyRuntimeImpl(os.path.join(target_dir, 'msdia140.dll'), dia_path) + + +def _GetDesiredVsToolchainHashes(): + """Load a list of SHA1s corresponding to the toolchains that we want installed + to build with.""" + # Third parties that do not have access to the canonical toolchain can map + # canonical toolchain version to their own toolchain versions. + toolchain_hash_mapping_key = 'GYP_MSVS_HASH_%s' % TOOLCHAIN_HASH + return [os.environ.get(toolchain_hash_mapping_key, TOOLCHAIN_HASH)] + + +def ShouldUpdateToolchain(): + """Check if the toolchain should be upgraded.""" + if not os.path.exists(json_data_file): + return True + with open(json_data_file, 'r') as tempf: + toolchain_data = json.load(tempf) + version = toolchain_data['version'] + env_version = GetVisualStudioVersion() + # If there's a mismatch between the version set in the environment and the one + # in the json file then the toolchain should be updated. + return version != env_version + + +def Update(force=False, no_download=False): + """Requests an update of the toolchain to the specific hashes we have at + this revision. The update outputs a .json of the various configuration + information required to pass to gyp which we use in |GetToolchainDir()|. + If no_download is true then the toolchain will be configured if present but + will not be downloaded. + """ + if force != False and force != '--force': + print('Unknown parameter "%s"' % force, file=sys.stderr) + return 1 + if force == '--force' or os.path.exists(json_data_file): + force = True + + depot_tools_win_toolchain = \ + bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))) + if (_HostIsWindows() or force) and depot_tools_win_toolchain: + import find_depot_tools + depot_tools_path = find_depot_tools.add_depot_tools_to_path() + + # On Linux, the file system is usually case-sensitive while the Windows + # SDK only works on case-insensitive file systems. If it doesn't already + # exist, set up a ciopfs fuse mount to put the SDK in a case-insensitive + # part of the file system. + toolchain_dir = os.path.join(depot_tools_path, 'win_toolchain', 'vs_files') + # For testing this block, unmount existing mounts with + # fusermount -u third_party/depot_tools/win_toolchain/vs_files + if sys.platform.startswith('linux') and not os.path.ismount(toolchain_dir): + ciopfs = shutil.which('ciopfs') + if not ciopfs: + # ciopfs not found in PATH; try the one downloaded from the DEPS hook. + ciopfs = os.path.join(script_dir, 'ciopfs') + if not os.path.isdir(toolchain_dir): + os.mkdir(toolchain_dir) + if not os.path.isdir(toolchain_dir + '.ciopfs'): + os.mkdir(toolchain_dir + '.ciopfs') + # Without use_ino, clang's #pragma once and Wnonportable-include-path + # both don't work right, see https://llvm.org/PR34931 + # use_ino doesn't slow down builds, so it seems there's no drawback to + # just using it always. + subprocess.check_call([ + ciopfs, '-o', 'use_ino', toolchain_dir + '.ciopfs', toolchain_dir]) + + get_toolchain_args = [ + sys.executable, + os.path.join(depot_tools_path, + 'win_toolchain', + 'get_toolchain_if_necessary.py'), + '--output-json', json_data_file, + ] + _GetDesiredVsToolchainHashes() + if force: + get_toolchain_args.append('--force') + if no_download: + get_toolchain_args.append('--no-download') + subprocess.check_call(get_toolchain_args) + + return 0 + + +def NormalizePath(path): + while path.endswith('\\'): + path = path[:-1] + return path + + +def SetEnvironmentAndGetSDKDir(): + """Gets location information about the current sdk (must have been + previously updated by 'update'). This is used for the GN build.""" + SetEnvironmentAndGetRuntimeDllDirs() + + # If WINDOWSSDKDIR is not set, search the default SDK path and set it. + if not 'WINDOWSSDKDIR' in os.environ: + default_sdk_path = os.path.expandvars('%ProgramFiles(x86)%' + '\\Windows Kits\\10') + if os.path.isdir(default_sdk_path): + os.environ['WINDOWSSDKDIR'] = default_sdk_path + + return NormalizePath(os.environ['WINDOWSSDKDIR']) + + +def GetToolchainDir(): + """Gets location information about the current toolchain (must have been + previously updated by 'update'). This is used for the GN build.""" + runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs() + win_sdk_dir = SetEnvironmentAndGetSDKDir() + + print('''vs_path = %s +sdk_version = %s +sdk_path = %s +vs_version = %s +wdk_dir = %s +runtime_dirs = %s +''' % (ToGNString(NormalizePath( + os.environ['GYP_MSVS_OVERRIDE_PATH'])), ToGNString(SDK_VERSION), + ToGNString(win_sdk_dir), ToGNString(GetVisualStudioVersion()), + ToGNString(NormalizePath(os.environ.get('WDK_DIR', ''))), + ToGNString(os.path.pathsep.join(runtime_dll_dirs or ['None'])))) + + +def main(): + commands = { + 'update': Update, + 'get_toolchain_dir': GetToolchainDir, + 'copy_dlls': CopyDlls, + } + if len(sys.argv) < 2 or sys.argv[1] not in commands: + print('Expected one of: %s' % ', '.join(commands), file=sys.stderr) + return 1 + return commands[sys.argv[1]](*sys.argv[2:]) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/whitespace_file.txt b/whitespace_file.txt new file mode 100644 index 000000000000..0b23301145f9 --- /dev/null +++ b/whitespace_file.txt @@ -0,0 +1,211 @@ +Copyright 2014 The Chromium Authors +Use of this useless file is governed by a BSD-style license that can be +found in the LICENSE file. + + +This file is used for making non-code changes to trigger buildbot cycles. Make +any modification below this line. + +====================================================================== + +Let's make a story. Add zero+ sentences for every commit: + +CHÄPTER 1: +It was a dark and blinky night; the rain fell in torrents -- except at +occasional intervals, when it was checked by a violent gust of wind which +swept up the streets (for it is in London that our scene lies), rattling along +the housetops, and fiercely agitating the scanty flame of the lamps that +struggled against the elements. A hooded figure emerged. + +It was a Domo-Kun. + +"What took you so long?", inquired his wife. + +Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the +waffles you brought him?" "You know him, he's not one to forego a waffle, +no matter how burnt," he snickered. + +The pause was filled with the sound of compile errors. + +CHAPTER 2: +The jelly was as dark as night, and just as runny. +The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles +with his fork, watching the runny jelly spread and pool across his plate, +like the blood of a dying fawn. "It reminds me of that time --" he started, as +his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of +images coming from the past flowed through his mind. + +"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly +overhead, barely disturbing the thick cigarette smoke. No doubt was left about +when the fan was last cleaned. + +There was a poignant pause. + +CHAPTER 3: +Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he +began feeling sick. He thought out loud to himself, "No, he wouldn't have done +that to me." He considered that perhaps he shouldn't have pushed so hard. +Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable +horror that had occurred just the week before. + +Next time, there won't be any sushi. Why sushi with waffles anyway? It's like +adorning breakfast cereal with halibut -- shameful. + +CHAPTER 4: +The taste of stale sushi in his mouth the next morning was unbearable. He +wondered where the sushi came from as he attempted to wash the taste away with +a bottle of 3000¥ sake. He tries to recall the cook's face. Green? Probably. + +CHAPTER 5: +Many tears later, Mr. Usagi would laugh at the memory of the earnest, +well-intentioned Domo-Kun. Another day in the life. That is when he realized that +life goes on. + +$CHAPTER6 + +TRUISMS (1978-1983) +JENNY HOLZER +A LITTLE KNOWLEDGE CAN GO A LONG WAY +A LOT OF PROFESSIONALS ARE CRACKPOTS +A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER +A NAME MEANS A LOT JUST BY ITSELF +A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD +A RELAXED MAN IS NOT NECESSARILY A BETTER MAN +NO ONE SHOULD EVER USE SVN +AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS +IT IS MANS FATE TO OUTSMART HIMSELF +BEING SURE OF YOURSELF MEANS YOU'RE A FOOL +AM NOT +ARE TOO +IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY +AND THEN, TIME LEAPT BACKWARDS +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT +I'm really tempted to change something above the line. +Reeccciiiipppppeeeeeesssssss!!!!!!!!! +PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION. +WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY + +I can feel the heat closing in, feel them out there making their moves... +What could possibly go wrong? We've already ate our cake. + +Stand Still. Pause Clocks. We can make the World Stop. +WUBWUBWUBWUBWUB + +I want a 1917 build and you will give me what I want. + +This sentence is false. + +Beauty is in the eyes of a Beholder. + +I'm the best at space. + +The first time Yossarian saw the chaplain, he fell madly in love with him. +* +* +* +Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for +the time it did me. There is a wisdom that is woe; but there is a woe that is +madness. And there is a Catskill eagle in some souls that can alike dive down +into the blackest gorges, and soar out of them again and become invisible in +the sunny spaces. And even if he for ever flies within the gorge, that gorge +is in the mountains; so that even in his lowest swoop the mountain eagle is +still higher than other birds upon the plain, even though they soar. +* +* +* + +I'm here to commit lines and drop rhymes +* +This is a line to test and try uploading a cl. +* +Yay, another first commit! What a beautiful day! + +And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on +the Android bots, and it was good. Except on one bot, where it was bad. And +lo, the change was reverted, and GCC went back to 4.6, where code is slower +and less optimized. And verily did it break the build, because artifacts had +been created with 4.8, and alignment was no longer the same, and a great +sadness descended upon the Android GN buildbot, and it did refuseth to build +any more. But the sheriffs thought to themselves: Placebo! Let us clobber the +bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many +seasons. And so they modified the whitespace file with these immortal lines, +and visited it upon the bots, that great destruction might be wrought upon +their outdated binaries. In clobberus, veritas. + +As the git approaches, light begins to shine through the SCM thrice again... +However, the git, is, after all, quite stupid. + +Suddenly Domo-Kun found itself in a room filled with dazzling mirrors. As +Domo-Kun looked around, it realized that some of the mirrors were actually but +pale reflections of true reality. + +A herd of wild gits appears! Time for CQ :D +And one more for sizes.py... + +What's an overmarketed dietary supplement expressing sadness, relief, +tiredness, or a similar feeling.? Ah-Sigh-ee. + +It was love at first sight. The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him. + +Cool whitespace change for git-cl land + +Oh god the bots are red! I'm blind! Mmmm, cronuts. + +If you stand on your head, you will get footprints in your hair. + +sigh +sigher +pick up cls + +In the BUILD we trust. +^_^ + +In the masters we don't. +In the tryservers, we don't either. +In the CQ sometimes. +Auto-generated by git-eject-upstream (http://goo.gl/cIHsYR) +My sandwiches are like my children: I love them all. +No, really, I couldn't eat another bit. +When I hunger I think of you, and a pastrami sandwich. +Do make a terrible mistake every once in a while. +I just made two. +Mistakes are the best sometimes. +\o/ +This is groovy. + +SECRET ENDING: IT WAS _____ ALL ALONG! +testing trailing line. + +So many books, so little time. + +Hi there, it's my first commit. +!false isn't funny because it's true. + +Lalala + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. +Vestibulum rhoncus neque sodales nibh lobortis, non fringilla odio aliquet. +Praesent ultrices quam eu pretium ultrices. +Quisque et consequat ex. Curabitur sed nunc neque. +foo + +And if you go chasing rabbits +And you know you're going to fall +Tell 'em a hookah-smoking caterpillar + +Isn't it supposed to be a whitespace file? +Let's add some " ". + +I'll join to add my first commit here. P. S. It has stopped being a story long long ago. + +PANCAKE RECIPE: +STEP 1: Put 100g plain flour, 2 large eggs, 300ml milk, 1 tbsp sunflower or vegetable oil +and a pinch of salt into a bowl or large jug, then whisk to a smooth batter. +STEP 2: Set aside for 30 mins to rest if you have time, or start cooking straight away. +STEP 3: Set a medium frying pan or crêpe pan over a medium heat and carefully wipe it with some +oiled kitchen paper. +STEP 4: When hot, cook your pancakes for 1 min on each side until golden, keeping them warm +in a low oven as you go. +STEP 5: Serve with lemon wedges and caster sugar, or your favourite filling. +Once cold, you can layer the pancakes between baking parchment, +then wrap in cling film and freeze for up to 2 months. diff --git a/win/BUILD.gn b/win/BUILD.gn new file mode 100644 index 000000000000..864581851519 --- /dev/null +++ b/win/BUILD.gn @@ -0,0 +1,81 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import("//build/config/clang/clang.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("//build/config/win/manifest.gni") + +# Depending on this target will cause the manifests for Chrome's default +# Windows and common control compatibility and elevation for executables. +windows_manifest("default_exe_manifest") { + sources = [ + as_invoker_manifest, + common_controls_manifest, + default_compatibility_manifest, + ] +} + +if (is_win) { + assert(host_os != "mac" || target_cpu != "x86", + "Windows cross-builds from Mac must be 64-bit.") + + action("copy_cdb_to_output") { + script = "//build/win/copy_cdb_to_output.py" + inputs = [ + script, + "//build/vs_toolchain.py", + ] + outputs = [ + "$root_out_dir/cdb/cdb.exe", + "$root_out_dir/cdb/dbgeng.dll", + "$root_out_dir/cdb/dbghelp.dll", + "$root_out_dir/cdb/dbgmodel.dll", + "$root_out_dir/cdb/winext/ext.dll", + "$root_out_dir/cdb/winext/uext.dll", + "$root_out_dir/cdb/winxp/exts.dll", + "$root_out_dir/cdb/winxp/ntsdexts.dll", + ] + args = [ + rebase_path("$root_out_dir/cdb", root_out_dir), + current_cpu, + ] + } + + group("runtime_libs") { + # These are needed for any tests that need to decode stacks. + data = [ + "$root_out_dir/dbghelp.dll", + "$root_out_dir/dbgcore.dll", + ] + if (is_component_build) { + # Copy the VS runtime DLLs into the isolate so that they don't have to be + # preinstalled on the target machine. The debug runtimes have a "d" at + # the end. + if (is_debug) { + vcrt_suffix = "d" + } else { + vcrt_suffix = "" + } + + # These runtime files are copied to the output directory by the + # vs_toolchain script that runs as part of toolchain configuration. + data += [ + "$root_out_dir/msvcp140${vcrt_suffix}.dll", + "$root_out_dir/vccorlib140${vcrt_suffix}.dll", + "$root_out_dir/vcruntime140${vcrt_suffix}.dll", + ] + if (current_cpu == "x64") { + data += [ "$root_out_dir/vcruntime140_1${vcrt_suffix}.dll" ] + } + if (is_debug) { + data += [ "$root_out_dir/ucrtbased.dll" ] + } + if (is_asan) { + assert(target_cpu == "x64", + "ASan is only supported in 64-bit builds on Windows.") + data += [ "$clang_base_path/lib/clang/$clang_version/lib/windows/clang_rt.asan_dynamic-x86_64.dll" ] + } + } + } +} diff --git a/win/as_invoker.manifest b/win/as_invoker.manifest new file mode 100644 index 000000000000..df046fdf6841 --- /dev/null +++ b/win/as_invoker.manifest @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/win/common_controls.manifest b/win/common_controls.manifest new file mode 100644 index 000000000000..1710196fce8e --- /dev/null +++ b/win/common_controls.manifest @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/win/compatibility.manifest b/win/compatibility.manifest new file mode 100644 index 000000000000..755c272c671d --- /dev/null +++ b/win/compatibility.manifest @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/win/copy_cdb_to_output.py b/win/copy_cdb_to_output.py new file mode 100755 index 000000000000..5d124403340a --- /dev/null +++ b/win/copy_cdb_to_output.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +# Copyright 2016 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import glob +import hashlib +import os +import shutil +import sys + +script_dir = os.path.dirname(os.path.realpath(__file__)) +src_build_dir = os.path.abspath(os.path.join(script_dir, os.pardir)) +sys.path.insert(0, src_build_dir) + +import vs_toolchain + + +def _HexDigest(file_name): + hasher = hashlib.sha256() + afile = open(file_name, 'rb') + blocksize = 65536 + buf = afile.read(blocksize) + while len(buf) > 0: + hasher.update(buf) + buf = afile.read(blocksize) + afile.close() + return hasher.hexdigest() + + +def _CopyImpl(file_name, target_dir, source_dir, verbose=False): + """Copy |source| to |target| if it doesn't already exist or if it + needs to be updated. + """ + target = os.path.join(target_dir, file_name) + source = os.path.join(source_dir, file_name) + if (os.path.isdir(os.path.dirname(target)) and + ((not os.path.isfile(target)) or + _HexDigest(source) != _HexDigest(target))): + if verbose: + print('Copying %s to %s...' % (source, target)) + if os.path.exists(target): + os.unlink(target) + shutil.copy(source, target) + + +def _ConditionalMkdir(output_dir): + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + + +def _CopyCDBToOutput(output_dir, target_arch): + """Copies the Windows debugging executable cdb.exe to the output + directory, which is created if it does not exist. The output + directory, and target architecture that should be copied, are + passed. Supported values for the target architecture are the GYP + values "ia32", "x64", "arm64" and the GN values "x86", "x64", "arm64". + """ + _ConditionalMkdir(output_dir) + vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() + # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case + # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run. + win_sdk_dir = os.path.normpath( + os.environ.get('WINDOWSSDKDIR', + os.path.expandvars('%ProgramFiles(x86)%' + '\\Windows Kits\\10'))) + if target_arch == 'ia32' or target_arch == 'x86': + src_arch = 'x86' + elif target_arch in ['x64', 'arm64']: + src_arch = target_arch + else: + print('copy_cdb_to_output.py: unknown target_arch %s' % target_arch) + sys.exit(1) + # We need to copy multiple files, so cache the computed source directory. + src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch) + # We need to copy some helper DLLs to get access to the !uniqstack + # command to dump all threads' stacks. + src_winext_dir = os.path.join(src_dir, 'winext') + dst_winext_dir = os.path.join(output_dir, 'winext') + src_winxp_dir = os.path.join(src_dir, 'winxp') + dst_winxp_dir = os.path.join(output_dir, 'winxp') + # Starting with the 10.0.17763 SDK the ucrt files are in a version-named + # directory - this handles both cases. + redist_dir = os.path.join(win_sdk_dir, 'Redist') + version_dirs = glob.glob(os.path.join(redist_dir, '10.*')) + if len(version_dirs) > 0: + version_dirs.sort(reverse=True) + redist_dir = version_dirs[0] + src_crt_dir = os.path.join(redist_dir, 'ucrt', 'DLLs', src_arch) + _ConditionalMkdir(dst_winext_dir) + _ConditionalMkdir(dst_winxp_dir) + # Note that the outputs from the "copy_cdb_to_output" target need to + # be kept in sync with this list. + _CopyImpl('cdb.exe', output_dir, src_dir) + _CopyImpl('dbgeng.dll', output_dir, src_dir) + _CopyImpl('dbghelp.dll', output_dir, src_dir) + _CopyImpl('dbgmodel.dll', output_dir, src_dir) + _CopyImpl('ext.dll', dst_winext_dir, src_winext_dir) + _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir) + _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir) + _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir) + return 0 + + +def main(): + if len(sys.argv) < 2: + print('Usage: copy_cdb_to_output.py ' + \ + '', file=sys.stderr) + return 1 + return _CopyCDBToOutput(sys.argv[1], sys.argv[2]) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/win/gn_meta_sln.py b/win/gn_meta_sln.py new file mode 100644 index 000000000000..2165a1301f98 --- /dev/null +++ b/win/gn_meta_sln.py @@ -0,0 +1,213 @@ +# Copyright 2017 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +# +# gn_meta_sln.py +# Helper utility to combine GN-generated Visual Studio projects into +# a single meta-solution. + + +import os +import glob +import re +import sys +from shutil import copyfile + +# Helpers +def EnsureExists(path): + try: + os.makedirs(path) + except OSError: + pass + +def WriteLinesToFile(lines, file_name): + EnsureExists(os.path.dirname(file_name)) + with open(file_name, "w") as f: + f.writelines(lines) + +def ExtractIdg(proj_file_name): + result = [] + with open(proj_file_name) as proj_file: + lines = iter(proj_file) + for p_line in lines: + if " [ (config, pathToProject, GUID, arch), ... ] +all_projects = {} +project_pattern = (r'Project\("\{' + cpp_type_guid + + r'\}"\) = "([^"]*)", "([^"]*)", "\{([^\}]*)\}"') + +# We need something to work with. Typically, this will fail if no GN folders +# have IDE files +if len(configs) == 0: + print("ERROR: At least one GN directory must have been built with --ide=vs") + sys.exit() + +# Filter out configs which don't match the name and vs version of the first. +name = configs[0][1] +vs_version = configs[0][2] + +for config in configs: + if config[1] != name or config[2] != vs_version: + continue + + sln_lines = iter(open(os.path.join("out", config[0], config[1]))) + for sln_line in sln_lines: + match_obj = re.match(project_pattern, sln_line) + if match_obj: + proj_name = match_obj.group(1) + if proj_name not in all_projects: + all_projects[proj_name] = [] + all_projects[proj_name].append((config[0], match_obj.group(2), + match_obj.group(3))) + +# We need something to work with. Typically, this will fail if no GN folders +# have IDE files +if len(all_projects) == 0: + print("ERROR: At least one GN directory must have been built with --ide=vs") + sys.exit() + +# Create a new solution. We arbitrarily use the first config as the GUID source +# (but we need to match that behavior later, when we copy/generate the project +# files). +new_sln_lines = [] +new_sln_lines.append( + 'Microsoft Visual Studio Solution File, Format Version 12.00\n') +new_sln_lines.append('# Visual Studio ' + vs_version + '\n') +for proj_name, proj_configs in all_projects.items(): + new_sln_lines.append('Project("{' + cpp_type_guid + '}") = "' + proj_name + + '", "' + proj_configs[0][1] + '", "{' + + proj_configs[0][2] + '}"\n') + new_sln_lines.append('EndProject\n') + +new_sln_lines.append('Global\n') +new_sln_lines.append( + '\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n') +for config in configs: + match = config[0] + '|' + hard_coded_arch + new_sln_lines.append('\t\t' + match + ' = ' + match + '\n') +new_sln_lines.append('\tEndGlobalSection\n') +new_sln_lines.append( + '\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n') +for proj_name, proj_configs in all_projects.items(): + proj_guid = proj_configs[0][2] + for config in configs: + match = config[0] + '|' + hard_coded_arch + new_sln_lines.append('\t\t{' + proj_guid + '}.' + match + + '.ActiveCfg = ' + match + '\n') + new_sln_lines.append('\t\t{' + proj_guid + '}.' + match + + '.Build.0 = ' + match + '\n') +new_sln_lines.append('\tEndGlobalSection\n') +new_sln_lines.append('\tGlobalSection(SolutionProperties) = preSolution\n') +new_sln_lines.append('\t\tHideSolutionNode = FALSE\n') +new_sln_lines.append('\tEndGlobalSection\n') +new_sln_lines.append('\tGlobalSection(NestedProjects) = preSolution\n') +new_sln_lines.append('\tEndGlobalSection\n') +new_sln_lines.append('EndGlobal\n') + +# Write solution file +WriteLinesToFile(new_sln_lines, 'out/sln/' + name) + +idg_hdr = " + {config} + {arch} + +""" + +def FormatProjectConfig(config): + return configuration_template.format( + config = config[0], arch = hard_coded_arch) + +# Now, bring over the project files +for proj_name, proj_configs in all_projects.items(): + # Paths to project and filter file in src and dst locations + src_proj_path = os.path.join("out", proj_configs[0][0], proj_configs[0][1]) + dst_proj_path = os.path.join("out", "sln", proj_configs[0][1]) + src_filter_path = src_proj_path + ".filters" + dst_filter_path = dst_proj_path + ".filters" + + # Copy the filter file unmodified + EnsureExists(os.path.dirname(dst_proj_path)) + copyfile(src_filter_path, dst_filter_path) + + preferred_tool_arch = None + config_arch = {} + + # Bring over the project file, modified with extra configs + with open(src_proj_path) as src_proj_file: + proj_lines = iter(src_proj_file) + new_proj_lines = [] + for line in proj_lines: + if "\n") + for idg_line in config_idg_lines[1:]: + new_proj_lines.append(idg_line) + elif "ProjectConfigurations" in line: + new_proj_lines.append(line) + proj_lines.next() + proj_lines.next() + proj_lines.next() + proj_lines.next() + for config in configs: + new_proj_lines.append(FormatProjectConfig(config)) + + elif "" + + hard_coded_arch + + "\n") + else: + new_proj_lines.append(line) + with open(dst_proj_path, "w") as new_proj: + new_proj.writelines(new_proj_lines) + +print('Wrote meta solution to out/sln/' + name) diff --git a/win/message_compiler.gni b/win/message_compiler.gni new file mode 100644 index 000000000000..33ced1cb7fb3 --- /dev/null +++ b/win/message_compiler.gni @@ -0,0 +1,87 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +assert(is_win, "This only runs on Windows.") + +# Runs mc.exe over a list of sources. The outputs (a header and rc file) are +# placed in the target gen dir, and compiled. +# +# sources +# List of message files to process. +# +# user_mode_logging (optional bool) +# Generates user-mode logging code. Defaults to false (no logging code). +# +# compile_generated_code (optional, deafults = true) +# If unset or true, the generated code will be compiled and linked into +# targets that depend on it. If set to false, the .h and .rc files will only +# be generated. +# +# deps, public_deps, visibility +# Normal meaning. +template("message_compiler") { + if (defined(invoker.compile_generated_code) && + !invoker.compile_generated_code) { + compile_generated_code = false + action_name = target_name + } else { + compile_generated_code = true + action_name = "${target_name}_mc" + source_set_name = target_name + } + + action_foreach(action_name) { + if (compile_generated_code) { + visibility = [ ":$source_set_name" ] + } else { + forward_variables_from(invoker, [ "visibility" ]) + } + + script = "//build/win/message_compiler.py" + + outputs = [ + "$target_gen_dir/{{source_name_part}}.h", + "$target_gen_dir/{{source_name_part}}.rc", + ] + + args = [ + # The first argument is the environment file saved to the build + # directory. This is required because the Windows toolchain setup saves + # the VC paths and such so that running "mc.exe" will work with the + # configured toolchain. This file is in the root build dir. + "environment.$current_cpu", + + # Where to put the header. + "-h", + rebase_path(target_gen_dir, root_build_dir), + + # Where to put the .rc file. + "-r", + rebase_path(target_gen_dir, root_build_dir), + + # Input is Unicode. + "-u", + ] + if (defined(invoker.user_mode_logging) && invoker.user_mode_logging) { + args += [ "-um" ] + } + args += [ "{{source}}" ] + + forward_variables_from(invoker, + [ + "deps", + "public_deps", + "sources", + ]) + } + + if (compile_generated_code) { + # Compile the generated rc file. + source_set(source_set_name) { + forward_variables_from(invoker, [ "visibility" ]) + sources = get_target_outputs(":$action_name") + deps = [ ":$action_name" ] + } + } +} diff --git a/win/message_compiler.py b/win/message_compiler.py new file mode 100644 index 000000000000..9759c1fd1a4f --- /dev/null +++ b/win/message_compiler.py @@ -0,0 +1,147 @@ +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Runs the Microsoft Message Compiler (mc.exe). +# +# Usage: message_compiler.py [*] + + +import difflib +import distutils.dir_util +import filecmp +import os +import re +import shutil +import subprocess +import sys +import tempfile + +def main(): + env_file, rest = sys.argv[1], sys.argv[2:] + + # Parse some argument flags. + header_dir = None + resource_dir = None + input_file = None + for i, arg in enumerate(rest): + if arg == '-h' and len(rest) > i + 1: + assert header_dir == None + header_dir = rest[i + 1] + elif arg == '-r' and len(rest) > i + 1: + assert resource_dir == None + resource_dir = rest[i + 1] + elif arg.endswith('.mc') or arg.endswith('.man'): + assert input_file == None + input_file = arg + + # Copy checked-in outputs to final location. + THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + assert header_dir == resource_dir + source = os.path.join(THIS_DIR, "..", "..", + "third_party", "win_build_output", + re.sub(r'^(?:[^/]+/)?gen/', 'mc/', header_dir)) + distutils.dir_util.copy_tree(source, header_dir, preserve_times=False) + + # On non-Windows, that's all we can do. + if sys.platform != 'win32': + return + + # On Windows, run mc.exe on the input and check that its outputs are + # identical to the checked-in outputs. + + # Read the environment block from the file. This is stored in the format used + # by CreateProcess. Drop last 2 NULs, one for list terminator, one for + # trailing vs. separator. + env_pairs = open(env_file).read()[:-2].split('\0') + env_dict = dict([item.split('=', 1) for item in env_pairs]) + + extension = os.path.splitext(input_file)[1] + if extension in ['.man', '.mc']: + # For .man files, mc's output changed significantly from Version 10.0.15063 + # to Version 10.0.16299. We should always have the output of the current + # default SDK checked in and compare to that. Early out if a different SDK + # is active. This also happens with .mc files. + # TODO(thakis): Check in new baselines and compare to 16299 instead once + # we use the 2017 Fall Creator's Update by default. + mc_help = subprocess.check_output(['mc.exe', '/?'], env=env_dict, + stderr=subprocess.STDOUT, shell=True) + version = re.search(br'Message Compiler\s+Version (\S+)', mc_help).group(1) + if version != '10.0.15063': + return + + # mc writes to stderr, so this explicitly redirects to stdout and eats it. + try: + tmp_dir = tempfile.mkdtemp() + delete_tmp_dir = True + if header_dir: + rest[rest.index('-h') + 1] = tmp_dir + header_dir = tmp_dir + if resource_dir: + rest[rest.index('-r') + 1] = tmp_dir + resource_dir = tmp_dir + + # This needs shell=True to search the path in env_dict for the mc + # executable. + subprocess.check_output(['mc.exe'] + rest, + env=env_dict, + stderr=subprocess.STDOUT, + shell=True) + # We require all source code (in particular, the header generated here) to + # be UTF-8. jinja can output the intermediate .mc file in UTF-8 or UTF-16LE. + # However, mc.exe only supports Unicode via the -u flag, and it assumes when + # that is specified that the input is UTF-16LE (and errors out on UTF-8 + # files, assuming they're ANSI). Even with -u specified and UTF16-LE input, + # it generates an ANSI header, and includes broken versions of the message + # text in the comment before the value. To work around this, for any invalid + # // comment lines, we simply drop the line in the header after building it. + # Also, mc.exe apparently doesn't always write #define lines in + # deterministic order, so manually sort each block of #defines. + if header_dir: + header_file = os.path.join( + header_dir, os.path.splitext(os.path.basename(input_file))[0] + '.h') + header_contents = [] + with open(header_file, 'rb') as f: + define_block = [] # The current contiguous block of #defines. + for line in f.readlines(): + if line.startswith('//') and '?' in line: + continue + if line.startswith('#define '): + define_block.append(line) + continue + # On the first non-#define line, emit the sorted preceding #define + # block. + header_contents += sorted(define_block, key=lambda s: s.split()[-1]) + define_block = [] + header_contents.append(line) + # If the .h file ends with a #define block, flush the final block. + header_contents += sorted(define_block, key=lambda s: s.split()[-1]) + with open(header_file, 'wb') as f: + f.write(''.join(header_contents)) + + # mc.exe invocation and post-processing are complete, now compare the output + # in tmp_dir to the checked-in outputs. + diff = filecmp.dircmp(tmp_dir, source) + if diff.diff_files or set(diff.left_list) != set(diff.right_list): + print('mc.exe output different from files in %s, see %s' % (source, + tmp_dir)) + diff.report() + for f in diff.diff_files: + if f.endswith('.bin'): continue + fromfile = os.path.join(source, f) + tofile = os.path.join(tmp_dir, f) + print(''.join( + difflib.unified_diff( + open(fromfile, 'U').readlines(), + open(tofile, 'U').readlines(), fromfile, tofile))) + delete_tmp_dir = False + sys.exit(1) + except subprocess.CalledProcessError as e: + print(e.output) + sys.exit(e.returncode) + finally: + if os.path.exists(tmp_dir) and delete_tmp_dir: + shutil.rmtree(tmp_dir) + +if __name__ == '__main__': + main() diff --git a/win/reorder-imports.py b/win/reorder-imports.py new file mode 100755 index 000000000000..7dd8e1d8babb --- /dev/null +++ b/win/reorder-imports.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 +# Copyright 2014 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import glob +import optparse +import os +import shutil +import subprocess +import sys + +sys.path.insert( + 0, + os.path.join(os.path.dirname(__file__), '..', '..', 'third_party', + 'pefile_py3')) +import pefile + +def reorder_imports(input_dir, output_dir, architecture): + """Swap chrome_elf.dll to be the first import of chrome.exe. + Also copy over any related files that might be needed + (pdbs, manifests etc.). + """ + # TODO(thakis): See if there is a reliable way to write the + # correct executable in the first place, so that this script + # only needs to verify that and not write a whole new exe. + + input_image = os.path.join(input_dir, 'chrome.exe') + output_image = os.path.join(output_dir, 'chrome.exe') + + # pefile mmap()s the whole executable, and then parses parts of + # it into python data structures for ease of processing. + # To write the file again, only the mmap'd data is written back, + # so modifying the parsed python objects generally has no effect. + # However, parsed raw data ends up in pe.Structure instances, + # and these all get serialized back when the file gets written. + # So things that are in a Structure must have their data set + # through the Structure, while other data must bet set through + # the set_bytes_*() methods. + pe = pefile.PE(input_image, fast_load=True) + if architecture == 'x64' or architecture == 'arm64': + assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE_PLUS + else: + assert pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE + + pe.parse_data_directories(directories=[ + pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_IMPORT']]) + + found_elf = False + for i, peimport in enumerate(pe.DIRECTORY_ENTRY_IMPORT): + if peimport.dll.lower() == b'chrome_elf.dll': + assert not found_elf, 'only one chrome_elf.dll import expected' + found_elf = True + if i > 0: + swap = pe.DIRECTORY_ENTRY_IMPORT[0] + + # Morally we want to swap peimport.struct and swap.struct here, + # but the pe module doesn't expose a public method on Structure + # to get all data of a Structure without explicitly listing all + # field names. + # NB: OriginalFirstThunk and Characteristics are an union both at + # offset 0, handling just one of them is enough. + peimport.struct.OriginalFirstThunk, swap.struct.OriginalFirstThunk = \ + swap.struct.OriginalFirstThunk, peimport.struct.OriginalFirstThunk + peimport.struct.TimeDateStamp, swap.struct.TimeDateStamp = \ + swap.struct.TimeDateStamp, peimport.struct.TimeDateStamp + peimport.struct.ForwarderChain, swap.struct.ForwarderChain = \ + swap.struct.ForwarderChain, peimport.struct.ForwarderChain + peimport.struct.Name, swap.struct.Name = \ + swap.struct.Name, peimport.struct.Name + peimport.struct.FirstThunk, swap.struct.FirstThunk = \ + swap.struct.FirstThunk, peimport.struct.FirstThunk + assert found_elf, 'chrome_elf.dll import not found' + + pe.write(filename=output_image) + + for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')): + shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname))) + return 0 + + +def main(argv): + usage = 'reorder_imports.py -i -o -a ' + parser = optparse.OptionParser(usage=usage) + parser.add_option('-i', '--input', help='reorder chrome.exe in DIR', + metavar='DIR') + parser.add_option('-o', '--output', help='write new chrome.exe to DIR', + metavar='DIR') + parser.add_option('-a', '--arch', help='architecture of build (optional)', + default='ia32') + opts, args = parser.parse_args() + + if not opts.input or not opts.output: + parser.error('Please provide and input and output directory') + return reorder_imports(opts.input, opts.output, opts.arch) + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/win/require_administrator.manifest b/win/require_administrator.manifest new file mode 100644 index 000000000000..4142e7334247 --- /dev/null +++ b/win/require_administrator.manifest @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/win/segment_heap.manifest b/win/segment_heap.manifest new file mode 100644 index 000000000000..fc930a4d563c --- /dev/null +++ b/win/segment_heap.manifest @@ -0,0 +1,8 @@ + + + + + SegmentHeap + + + diff --git a/win/set_appcontainer_acls.py b/win/set_appcontainer_acls.py new file mode 100755 index 000000000000..06a1819548fa --- /dev/null +++ b/win/set_appcontainer_acls.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 +# Copyright 2021 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Sets the app container ACLs on directory.""" + +import os +import argparse +import sys + +SRC_DIR = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +sys.path.append(os.path.join(SRC_DIR, 'testing', 'scripts')) + +import common + + +def main(): + parser = argparse.ArgumentParser( + description='Sets App Container ACL on a directory.') + parser.add_argument('--stamp', + required=False, + help='Touch this stamp file on success.') + parser.add_argument('--dir', required=True, help='Set ACL on this directory.') + # parser.add_argument('--fail', required=True, help='Argument to fail.') + args = parser.parse_args() + + common.set_lpac_acls(os.path.abspath(args.dir)) + if args.stamp: + open(args.stamp, 'w').close() # Update mtime on stamp file. + + +if __name__ == '__main__': + main() diff --git a/win/use_ansi_codes.py b/win/use_ansi_codes.py new file mode 100755 index 000000000000..dfc3c3322596 --- /dev/null +++ b/win/use_ansi_codes.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Prints if the the terminal is likely to understand ANSI codes.""" + + +import os + +# Add more terminals here as needed. +print('ANSICON' in os.environ) diff --git a/write_buildflag_header.py b/write_buildflag_header.py new file mode 100755 index 000000000000..89a073761e25 --- /dev/null +++ b/write_buildflag_header.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +# Copyright 2015 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# This writes headers for build flags. See buildflag_header.gni for usage of +# this system as a whole. +# +# The parameters are passed in a response file so we don't have to worry +# about command line lengths. The name of the response file is passed on the +# command line. +# +# The format of the response file is: +# [--flags ] + +import optparse +import os +import re +import shlex + + +class Options: + def __init__(self, output, rulename, header_guard, flags): + self.output = output + self.rulename = rulename + self.header_guard = header_guard + self.flags = flags + + +def GetOptions(): + parser = optparse.OptionParser() + parser.add_option('--output', help="Output header name inside --gen-dir.") + parser.add_option('--rulename', + help="Helpful name of build rule for including in the " + + "comment at the top of the file.") + parser.add_option('--gen-dir', + help="Path to root of generated file directory tree.") + parser.add_option('--definitions', + help="Name of the response file containing the flags.") + cmdline_options, cmdline_flags = parser.parse_args() + + # Compute a valid C++ header guard by replacing non valid chars with '_', + # upper-casing everything and prepending '_' if first symbol is digit. + header_guard = cmdline_options.output.upper() + if header_guard[0].isdigit(): + header_guard = '_' + header_guard + header_guard = re.sub('[^\w]', '_', header_guard) + header_guard += '_' + + # The actual output file is inside the gen dir. + output = os.path.join(cmdline_options.gen_dir, cmdline_options.output) + + # Definition file in GYP is newline separated, in GN they are shell formatted. + # shlex can parse both of these. + with open(cmdline_options.definitions, 'r') as def_file: + defs = shlex.split(def_file.read()) + flags_index = defs.index('--flags') + + # Everything after --flags are flags. true/false are remapped to 1/0, + # everything else is passed through. + flags = [] + for flag in defs[flags_index + 1 :]: + equals_index = flag.index('=') + key = flag[:equals_index] + value = flag[equals_index + 1:] + + # Canonicalize and validate the value. + if value == 'true': + value = '1' + elif value == 'false': + value = '0' + flags.append((key, str(value))) + + return Options(output=output, + rulename=cmdline_options.rulename, + header_guard=header_guard, + flags=flags) + + +def WriteHeader(options): + with open(options.output, 'w') as output_file: + output_file.write("// Generated by build/write_buildflag_header.py\n") + if options.rulename: + output_file.write('// From "' + options.rulename + '"\n') + + output_file.write('\n#ifndef %s\n' % options.header_guard) + output_file.write('#define %s\n\n' % options.header_guard) + output_file.write('#include "build/buildflag.h" // IWYU pragma: export\n\n') + + for pair in options.flags: + output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair) + + output_file.write('\n#endif // %s\n' % options.header_guard) + + +options = GetOptions() +WriteHeader(options) diff --git a/xcode_binaries.yaml b/xcode_binaries.yaml new file mode 100644 index 000000000000..b9069f4b41f9 --- /dev/null +++ b/xcode_binaries.yaml @@ -0,0 +1,59 @@ +# This yaml file is used to package binaries from Xcode.app. +# To use this: +# 1) Move Xcode.app to the same directory as this file, +# and rename Xcode.app to xcode_binaries. Or make a symlink: +# $ rm -rf build/xcode_binaries && \ +# ln -s /Applications/Xcode.app build/xcode_binaries +# 2) Call `cipd create --pkg-def build/xcode_binaries.yaml` +# To deploy the newly created cipd package across the fleet, modify +# mac_toolchain.py to point to the new cipd hash. +# +# Note that runhooks extracts the cipd file to build/mac_files/xcode_binaries +# -- your build/xcode_binaries you're creating in step 1 above isn't used as +# part of the Chromium build, build/mac_files/xcode_binaries is. So you need to +# `runhooks` after updating the hash in mac_toolchain.py like everyone else to +# get the new bits for your local build. +# +# The ACLs for this package are determined by the directory structure. The +# nomenclature mirrors that of the hermetic toolchain to avoid ACL duplication. +package: infra_internal/ios/xcode/xcode_binaries/mac-amd64 +description: A hermetic deployment of all Xcode binaries used to build Chromium. +root: "xcode_binaries" +data: + - dir: Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/ + - dir: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/share/bison + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/bison + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/gm4 + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/gperf + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/install_name_tool + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/libtool + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/lipo + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-nm + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-objdump + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-otool + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/llvm-size + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/mig + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/nm + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/otool-classic + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/size + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/size-classic + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/strip + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libLTO.dylib + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libcodedirectory.dylib + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libswiftDemangle.dylib + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libtapi.dylib + - file: Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/libexec/migcom + - file: Contents/Developer/usr/bin/atos + - file: Contents/Developer/usr/bin/notarytool + - file: Contents/Resources/en.lproj/License.rtf + - file: Contents/Resources/LicenseInfo.plist + - dir: Contents/SharedFrameworks/CoreSymbolicationDT.framework + - dir: Contents/SharedFrameworks/DebugSymbolsDT.framework + - dir: Contents/SharedFrameworks/MallocStackLoggingDT.framework + - dir: Contents/SharedFrameworks/SymbolicationDT.framework + - file: Contents/version.plist +# llvm-size and size are run on swarming, those are symlinked to out of +# isolated tree and produce invdalid symlink if we don't use copy mode here. +install_mode: copy diff --git a/zip_helpers.py b/zip_helpers.py new file mode 100644 index 000000000000..b8ab9dd0baba --- /dev/null +++ b/zip_helpers.py @@ -0,0 +1,238 @@ +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Helper functions for dealing with .zip files.""" + +import os +import pathlib +import posixpath +import stat +import time +import zipfile + +_FIXED_ZIP_HEADER_LEN = 30 + + +def _set_alignment(zip_obj, zip_info, alignment): + """Sets a ZipInfo's extra field such that the file will be aligned. + + Args: + zip_obj: The ZipFile object that is being written. + zip_info: The ZipInfo object about to be written. + alignment: The amount of alignment (e.g. 4, or 4*1024). + """ + header_size = _FIXED_ZIP_HEADER_LEN + len(zip_info.filename) + pos = zip_obj.fp.tell() + header_size + padding_needed = (alignment - (pos % alignment)) % alignment + + # Python writes |extra| to both the local file header and the central + # directory's file header. Android's zipalign tool writes only to the + # local file header, so there is more overhead in using Python to align. + zip_info.extra = b'\0' * padding_needed + + +def _hermetic_date_time(timestamp=None): + if not timestamp: + return (2001, 1, 1, 0, 0, 0) + utc_time = time.gmtime(timestamp) + return (utc_time.tm_year, utc_time.tm_mon, utc_time.tm_mday, utc_time.tm_hour, + utc_time.tm_min, utc_time.tm_sec) + + +def add_to_zip_hermetic(zip_file, + zip_path, + *, + src_path=None, + data=None, + compress=None, + alignment=None, + timestamp=None): + """Adds a file to the given ZipFile with a hard-coded modified time. + + Args: + zip_file: ZipFile instance to add the file to. + zip_path: Destination path within the zip file (or ZipInfo instance). + src_path: Path of the source file. Mutually exclusive with |data|. + data: File data as a string. + compress: Whether to enable compression. Default is taken from ZipFile + constructor. + alignment: If set, align the data of the entry to this many bytes. + timestamp: The last modification date and time for the archive member. + """ + assert (src_path is None) != (data is None), ( + '|src_path| and |data| are mutually exclusive.') + if isinstance(zip_path, zipfile.ZipInfo): + zipinfo = zip_path + zip_path = zipinfo.filename + else: + zipinfo = zipfile.ZipInfo(filename=zip_path) + zipinfo.external_attr = 0o644 << 16 + + zipinfo.date_time = _hermetic_date_time(timestamp) + + if alignment: + _set_alignment(zip_file, zipinfo, alignment) + + # Filenames can contain backslashes, but it is more likely that we've + # forgotten to use forward slashes as a directory separator. + assert '\\' not in zip_path, 'zip_path should not contain \\: ' + zip_path + assert not posixpath.isabs(zip_path), 'Absolute zip path: ' + zip_path + assert not zip_path.startswith('..'), 'Should not start with ..: ' + zip_path + assert posixpath.normpath(zip_path) == zip_path, ( + f'Non-canonical zip_path: {zip_path} vs: {posixpath.normpath(zip_path)}') + assert zip_path not in zip_file.namelist(), ( + 'Tried to add a duplicate zip entry: ' + zip_path) + + if src_path and os.path.islink(src_path): + zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink + zip_file.writestr(zipinfo, os.readlink(src_path)) + return + + # Maintain the executable bit. + if src_path: + st = os.stat(src_path) + for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH): + if st.st_mode & mode: + zipinfo.external_attr |= mode << 16 + + if src_path: + with open(src_path, 'rb') as f: + data = f.read() + + # zipfile will deflate even when it makes the file bigger. To avoid + # growing files, disable compression at an arbitrary cut off point. + if len(data) < 16: + compress = False + + # None converts to ZIP_STORED, when passed explicitly rather than the + # default passed to the ZipFile constructor. + compress_type = zip_file.compression + if compress is not None: + compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + zip_file.writestr(zipinfo, data, compress_type) + + +def add_files_to_zip(inputs, + output, + *, + base_dir=None, + compress=None, + zip_prefix_path=None, + timestamp=None): + """Creates a zip file from a list of files. + + Args: + inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. + output: Path, fileobj, or ZipFile instance to add files to. + base_dir: Prefix to strip from inputs. + compress: Whether to compress + zip_prefix_path: Path prepended to file path in zip file. + timestamp: Unix timestamp to use for files in the archive. + """ + if base_dir is None: + base_dir = '.' + input_tuples = [] + for tup in inputs: + if isinstance(tup, str): + src_path = tup + zip_path = os.path.relpath(src_path, base_dir) + # Zip files always use / as path separator. + if os.path.sep != posixpath.sep: + zip_path = str(pathlib.Path(zip_path).as_posix()) + tup = (zip_path, src_path) + input_tuples.append(tup) + + # Sort by zip path to ensure stable zip ordering. + input_tuples.sort(key=lambda tup: tup[0]) + + out_zip = output + if not isinstance(output, zipfile.ZipFile): + out_zip = zipfile.ZipFile(output, 'w') + + try: + for zip_path, fs_path in input_tuples: + if zip_prefix_path: + zip_path = posixpath.join(zip_prefix_path, zip_path) + add_to_zip_hermetic(out_zip, + zip_path, + src_path=fs_path, + compress=compress, + timestamp=timestamp) + finally: + if output is not out_zip: + out_zip.close() + + +def zip_directory(output, base_dir, **kwargs): + """Zips all files in the given directory.""" + inputs = [] + for root, _, files in os.walk(base_dir): + for f in files: + inputs.append(os.path.join(root, f)) + + add_files_to_zip(inputs, output, base_dir=base_dir, **kwargs) + + +def merge_zips(output, input_zips, path_transform=None, compress=None): + """Combines all files from |input_zips| into |output|. + + Args: + output: Path, fileobj, or ZipFile instance to add files to. + input_zips: Iterable of paths to zip files to merge. + path_transform: Called for each entry path. Returns a new path, or None to + skip the file. + compress: Overrides compression setting from origin zip entries. + """ + assert not isinstance(input_zips, str) # Easy mistake to make. + if isinstance(output, zipfile.ZipFile): + out_zip = output + out_filename = output.filename + else: + assert isinstance(output, str), 'Was: ' + repr(output) + out_zip = zipfile.ZipFile(output, 'w') + out_filename = output + + # Include paths in the existing zip here to avoid adding duplicate files. + crc_by_name = {i.filename: (out_filename, i.CRC) for i in out_zip.infolist()} + + try: + for in_file in input_zips: + with zipfile.ZipFile(in_file, 'r') as in_zip: + for info in in_zip.infolist(): + # Ignore directories. + if info.filename[-1] == '/': + continue + if path_transform: + dst_name = path_transform(info.filename) + if dst_name is None: + continue + else: + dst_name = info.filename + + data = in_zip.read(info) + + # If there's a duplicate file, ensure contents is the same and skip + # adding it multiple times. + if dst_name in crc_by_name: + orig_filename, orig_crc = crc_by_name[dst_name] + new_crc = zipfile.crc32(data) + if new_crc == orig_crc: + continue + msg = f"""File appeared in multiple inputs with differing contents. +File: {dst_name} +Input1: {orig_filename} +Input2: {in_file}""" + raise Exception(msg) + + if compress is not None: + compress_entry = compress + else: + compress_entry = info.compress_type != zipfile.ZIP_STORED + add_to_zip_hermetic(out_zip, + dst_name, + data=data, + compress=compress_entry) + crc_by_name[dst_name] = (in_file, out_zip.getinfo(dst_name).CRC) + finally: + if output is not out_zip: + out_zip.close() diff --git a/zip_helpers_unittest.py b/zip_helpers_unittest.py new file mode 100755 index 000000000000..19000273b5ca --- /dev/null +++ b/zip_helpers_unittest.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +# Copyright 2023 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import pathlib +import shutil +import sys +import tempfile +import unittest +import zipfile + +import zip_helpers + + +def _make_test_zips(tmp_dir, create_conflct=False): + zip1 = os.path.join(tmp_dir, 'A.zip') + zip2 = os.path.join(tmp_dir, 'B.zip') + with zipfile.ZipFile(zip1, 'w') as z: + z.writestr('file1', 'AAAAA') + z.writestr('file2', 'BBBBB') + with zipfile.ZipFile(zip2, 'w') as z: + z.writestr('file2', 'ABABA' if create_conflct else 'BBBBB') + z.writestr('file3', 'CCCCC') + return zip1, zip2 + + +class ZipHelpersTest(unittest.TestCase): + def test_merge_zips__identical_file(self): + with tempfile.TemporaryDirectory() as tmp_dir: + zip1, zip2 = _make_test_zips(tmp_dir) + + merged_zip = os.path.join(tmp_dir, 'merged.zip') + zip_helpers.merge_zips(merged_zip, [zip1, zip2]) + + with zipfile.ZipFile(merged_zip) as z: + self.assertEqual(z.namelist(), ['file1', 'file2', 'file3']) + + def test_merge_zips__conflict(self): + with tempfile.TemporaryDirectory() as tmp_dir: + zip1, zip2 = _make_test_zips(tmp_dir, create_conflct=True) + + merged_zip = os.path.join(tmp_dir, 'merged.zip') + with self.assertRaises(Exception): + zip_helpers.merge_zips(merged_zip, [zip1, zip2]) + + def test_merge_zips__conflict_with_append(self): + with tempfile.TemporaryDirectory() as tmp_dir: + zip1, zip2 = _make_test_zips(tmp_dir, create_conflct=True) + + with self.assertRaises(Exception): + with zipfile.ZipFile(zip1, 'a') as dst_zip: + zip_helpers.merge_zips(dst_zip, [zip2]) + + +if __name__ == '__main__': + unittest.main() From 6365d6cd473b4d655cad2b0d2597c6e133d55e77 Mon Sep 17 00:00:00 2001 From: Dana Dahlstrom Date: Fri, 12 Jan 2024 16:00:00 -0800 Subject: [PATCH 2/3] Restore some Cobalt customizations --- build/BUILD.gn | 1 + build/config/android/internal_rules.gni | 2 +- build/config/apple/sdk_info.py | 3 ++- build/config/compiler/compiler.gni | 4 ++++ build/config/dcheck_always_on.gni | 2 ++ build/config/ozone.gni | 1 + build/config/pch.gni | 1 + build/config/ui.gni | 1 + build/toolchain/BUILD.gn | 2 +- 9 files changed, 14 insertions(+), 3 deletions(-) diff --git a/build/BUILD.gn b/build/BUILD.gn index 58f5f20fb4b4..663413278785 100644 --- a/build/BUILD.gn +++ b/build/BUILD.gn @@ -40,6 +40,7 @@ buildflag_header("chromecast_buildflags") { header = "chromecast_buildflags.h" flags = [ + "IS_CHROMECAST=$is_chromecast", "IS_CASTOS=$is_castos", "IS_CAST_ANDROID=$is_cast_android", "ENABLE_CAST_RECEIVER=$enable_cast_receiver", diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni index 41abf539fe96..427fa0dca401 100644 --- a/build/config/android/internal_rules.gni +++ b/build/config/android/internal_rules.gni @@ -15,7 +15,7 @@ import("//build/toolchain/goma.gni") import("//build/toolchain/kythe.gni") import("//build/util/generate_wrapper.gni") import("//build_overrides/build.gni") -if (current_toolchain == default_toolchain) { +if (is_starboardized_toolchain || current_toolchain == default_toolchain) { import("//build/toolchain/concurrent_links.gni") } assert(is_android) diff --git a/build/config/apple/sdk_info.py b/build/config/apple/sdk_info.py index 81b06d438df1..7928dbf81ea7 100755 --- a/build/config/apple/sdk_info.py +++ b/build/config/apple/sdk_info.py @@ -134,7 +134,8 @@ def main(): default='.', help='Value of gn $root_build_dir') parser.add_argument('platform', - choices=['iphoneos', 'iphonesimulator', 'macosx']) + choices=['iphoneos', 'iphonesimulator', 'macosx', + 'appletvos']) # Cobalt: for internal build args = parser.parse_args() if args.developer_dir: os.environ['DEVELOPER_DIR'] = args.developer_dir diff --git a/build/config/compiler/compiler.gni b/build/config/compiler/compiler.gni index 4738ee80d307..aa5f37fd0b06 100644 --- a/build/config/compiler/compiler.gni +++ b/build/config/compiler/compiler.gni @@ -37,6 +37,10 @@ declare_args() { # The default linker everywhere else. use_lld = is_clang && current_os != "zos" + if (use_cobalt_customizations && is_apple) { + use_lld = false + } + # If true, optimize for size. # Default to favoring speed over size for platforms not listed below. optimize_for_size = diff --git a/build/config/dcheck_always_on.gni b/build/config/dcheck_always_on.gni index cca3a547cd55..26cb76c7b7cf 100644 --- a/build/config/dcheck_always_on.gni +++ b/build/config/dcheck_always_on.gni @@ -2,10 +2,12 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +if (!use_cobalt_customizations) { # TODO(crbug.com/1233050): Until the bug is resolved we need to include # gclient_args for the definition of build_with_chromium and build_overrides # for client overrides of that flag. The latter should go away. import("//build/config/gclient_args.gni") +} import("//build_overrides/build.gni") declare_args() { # Enables DCHECKs to be built-in, but to default to being non-fatal/log-only. diff --git a/build/config/ozone.gni b/build/config/ozone.gni index 8bb512ad634b..db0a5b83dab3 100644 --- a/build/config/ozone.gni +++ b/build/config/ozone.gni @@ -10,6 +10,7 @@ declare_args() { # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux # that does not require X11. use_ozone = is_chromeos || is_fuchsia || is_linux + && !is_starboard } declare_args() { diff --git a/build/config/pch.gni b/build/config/pch.gni index bc4e9e6d150b..9d877918a054 100644 --- a/build/config/pch.gni +++ b/build/config/pch.gni @@ -12,4 +12,5 @@ declare_args() { # On Linux it slows down the build, so don't enable it by default. enable_precompiled_headers = !is_official_build && !(use_goma || use_remoteexec) && !is_linux + && !is_starboard } diff --git a/build/config/ui.gni b/build/config/ui.gni index 3989967ea82b..388b29a23f01 100644 --- a/build/config/ui.gni +++ b/build/config/ui.gni @@ -39,6 +39,7 @@ declare_args() { use_glib = is_linux && !is_castos && + !is_starboard && # Avoid the need for glib when Android is building things via secondary # toolchains. target_os != "android" diff --git a/build/toolchain/BUILD.gn b/build/toolchain/BUILD.gn index a3bd8c58cc70..73e3a71cc21b 100644 --- a/build/toolchain/BUILD.gn +++ b/build/toolchain/BUILD.gn @@ -11,7 +11,7 @@ declare_args() { action_pool_depth = -1 } -if (current_toolchain == default_toolchain) { +if (is_starboardized_toolchain || current_toolchain == default_toolchain) { if (action_pool_depth == -1 || (use_goma || use_remoteexec)) { action_pool_depth = exec_script("get_cpu_count.py", [], "value") } From 2b95e304c1d9b2e41986688aba686ef70fd6aacc Mon Sep 17 00:00:00 2001 From: Dana Dahlstrom Date: Fri, 12 Jan 2024 16:00:00 -0800 Subject: [PATCH 3/3] Move added cflags entries --- build/config/compiler/BUILD.gn | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn index cefc3772ca01..6c6fd688ba89 100644 --- a/build/config/compiler/BUILD.gn +++ b/build/config/compiler/BUILD.gn @@ -1631,14 +1631,13 @@ config("default_warnings") { # which we no longer use. Check if it makes sense to remove # this as well. http://crbug.com/316352 "-Wno-unneeded-internal-declaration", - - "-Wno-extra-semi", - "-Wno-pessimizing-move", - "-Wno-shadow", ] if (use_cobalt_customizations) { cflags += [ + "-Wno-extra-semi", + "-Wno-pessimizing-move", + "-Wno-shadow", "-Wno-range-loop-bind-reference", "-Wno-range-loop-construct", ]