From 2a19c63448c84c1805fb1a585c3651318bb86ca7 Mon Sep 17 00:00:00 2001 From: Allan Sandfeld Jensen Date: Tue, 28 Aug 2018 15:28:34 +0200 Subject: BASELINE: Update Chromium to 69.0.3497.70 Change-Id: I2b7b56e4e7a8b26656930def0d4575dc32b900a0 Reviewed-by: Allan Sandfeld Jensen --- chromium/tools/accessibility/inspect/README.md | 32 +- .../tools/accessibility/inspect/ax_dump_events.cc | 3 + .../tools/accessibility/inspect/ax_dump_tree.cc | 15 +- .../tools/accessibility/inspect/ax_tree_server.cc | 2 +- .../tools/accessibility/inspect/chrome-events.ps1 | 12 + .../tools/accessibility/inspect/chrome-tree.ps1 | 13 + .../accessibility/inspect/chromium-events.ps1 | 12 + .../tools/accessibility/inspect/chromium-tree.ps1 | 13 + .../accessibility/inspect/example-tree-filters.txt | 8 + chromium/tools/accessibility/inspect/ff-events.ps1 | 5 + chromium/tools/accessibility/inspect/ff-tree.ps1 | 5 + .../rebase_dump_accessibility_tree_test.py | 22 +- chromium/tools/android/BUILD.gn | 7 + chromium/tools/android/forwarder2/BUILD.gn | 2 - chromium/tools/android/io_benchmark/BUILD.gn | 14 + chromium/tools/android/md5sum/BUILD.gn | 8 - chromium/tools/android/memdump/BUILD.gn | 1 - chromium/tools/battor_agent/BUILD.gn | 1 - .../battor_connection_impl_unittest.cc | 29 +- chromium/tools/binary_size/BUILD.gn | 1 + chromium/tools/binary_size/README.md | 32 +- chromium/tools/binary_size/diagnose_bloat.py | 95 +- chromium/tools/binary_size/html_report_faq.md | 80 + .../tools/binary_size/libsupersize/apkanalyzer.py | 11 +- chromium/tools/binary_size/libsupersize/archive.py | 184 +- .../tools/binary_size/libsupersize/bcanalyzer.py | 379 + .../binary_size/libsupersize/bcanalyzer_test.py | 146 + .../tools/binary_size/libsupersize/concurrent.py | 28 +- .../binary_size/libsupersize/concurrent_test.py | 31 +- chromium/tools/binary_size/libsupersize/console.py | 15 +- .../tools/binary_size/libsupersize/file_format.py | 216 +- .../tools/binary_size/libsupersize/html_report.py | 414 +- .../binary_size/libsupersize/integration_test.py | 92 +- .../binary_size/libsupersize/linker_map_parser.py | 65 +- chromium/tools/binary_size/libsupersize/main.py | 6 +- chromium/tools/binary_size/libsupersize/models.py | 53 +- chromium/tools/binary_size/libsupersize/nm.py | 574 +- .../tools/binary_size/libsupersize/obj_analyzer.py | 371 + .../tools/binary_size/libsupersize/path_util.py | 11 +- .../tools/binary_size/libsupersize/start_server.py | 51 + .../binary_size/libsupersize/static/favicon.ico | Bin 0 -> 25995 bytes .../binary_size/libsupersize/static/index.html | 603 ++ .../binary_size/libsupersize/static/infocard-ui.js | 328 + .../binary_size/libsupersize/static/infocard.css | 113 + .../binary_size/libsupersize/static/options.css | 291 + .../binary_size/libsupersize/static/shared.js | 141 + .../libsupersize/static/start-worker.js | 95 + .../tools/binary_size/libsupersize/static/state.js | 402 + .../binary_size/libsupersize/static/tree-ui.js | 411 + .../binary_size/libsupersize/static/tree-worker.js | 759 ++ .../binary_size/libsupersize/string_extract.py | 249 + .../libsupersize/template/D3SymbolTreeMap.js | 930 --- .../binary_size/libsupersize/template/index.html | 517 -- .../libsupersize/template/test-data-generator.html | 157 - .../testdata/mock_output_directory/args.gn | 2 - .../mock_source_directory/out/Release/args.gn | 2 + .../gvr-android-sdk/libgvr_shim_static_arm.a | 3 - chromium/tools/binary_size/supersize.pydeps | 3 + .../binary_size/trybot_commit_size_checker.py | 70 + chromium/tools/cfi/blacklist.txt | 1 - chromium/tools/checkperms/checkperms.py | 3 +- chromium/tools/chrome_proxy/webdriver/bypass.py | 6 +- chromium/tools/chrome_proxy/webdriver/lite_page.py | 112 +- chromium/tools/chrome_proxy/webdriver/lofi.py | 43 + .../webdriver/variations_combinations.py | 4 +- .../base_bind_rewriters/BaseBindRewriters.cpp | 72 +- .../tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp | 2 - .../blink_gc_plugin/BlinkGCPluginConsumer.cpp | 28 - .../clang/blink_gc_plugin/BlinkGCPluginConsumer.h | 7 - .../clang/blink_gc_plugin/BlinkGCPluginOptions.h | 3 - .../tools/clang/blink_gc_plugin/CMakeLists.txt | 1 - .../blink_gc_plugin/CheckTraceWrappersVisitor.cpp | 103 - .../blink_gc_plugin/CheckTraceWrappersVisitor.h | 39 - .../tools/clang/blink_gc_plugin/CollectVisitor.cpp | 6 - .../tools/clang/blink_gc_plugin/CollectVisitor.h | 2 - chromium/tools/clang/blink_gc_plugin/Config.cpp | 31 - chromium/tools/clang/blink_gc_plugin/Config.h | 16 - .../clang/blink_gc_plugin/DiagnosticsReporter.cpp | 9 - .../clang/blink_gc_plugin/DiagnosticsReporter.h | 4 - .../tools/clang/blink_gc_plugin/RecordInfo.cpp | 123 +- chromium/tools/clang/blink_gc_plugin/RecordInfo.h | 28 +- chromium/tools/clang/plugins/OWNERS | 2 +- chromium/tools/clang/pylib/clang/compile_db.py | 11 +- chromium/tools/clang/scripts/generate_compdb.py | 17 +- chromium/tools/clang/scripts/package.py | 3 +- chromium/tools/clang/scripts/run_tool.py | 14 +- chromium/tools/clang/scripts/update.py | 59 +- .../translation_unit/TranslationUnitGenerator.cpp | 12 +- .../test_files/compile_commands.json.template | 7 +- .../translation_unit/test_files/includes_self.cc | 6 + .../test_files/includes_self.cc.filepaths.expected | 1 + .../test_files/test.cc.filepaths.expected | 4 +- .../translation_unit/test_translation_unit.py | 2 + chromium/tools/code_coverage/test_suite.txt | 9 +- chromium/tools/cygprofile/BUILD.gn | 33 - chromium/tools/cygprofile/delayed_dumper.cc | 54 - .../tools/cygprofile/lightweight_cygprofile.cc | 224 - chromium/tools/cygprofile/lightweight_cygprofile.h | 37 - .../cygprofile/lightweight_cygprofile_perftest.cc | 123 - .../cygprofile/orderfile_generator_backend.py | 42 +- .../tools/cygprofile/profile_android_startup.py | 108 +- .../tools/determinism/compare_build_artifacts.py | 18 +- .../determinism/deterministic_build_whitelist.pyl | 1 - chromium/tools/dump_process_memory/BUILD.gn | 18 + chromium/tools/dump_process_memory/OWNERS | 3 + .../tools/dump_process_memory/analyze_dumps.py | 253 + chromium/tools/dump_process_memory/dump_process.cc | 259 + chromium/tools/emacs/trybot-mac.txt | 6 +- .../comparative_tester/comparative_tester.py | 430 +- .../comparative_tester/generate_perf_report.py | 289 + .../fuchsia/comparative_tester/target_spec.py | 25 +- .../fuchsia/comparative_tester/test_results.py | 186 + chromium/tools/fuchsia/local-sdk.py | 2 +- chromium/tools/gdb/gdbinit | 4 +- chromium/tools/gn/BUILD.gn | 374 - chromium/tools/gn/DEPS | 3 - chromium/tools/gn/OWNERS | 3 - chromium/tools/gn/README.md | 19 +- chromium/tools/gn/action_target_generator.cc | 221 - chromium/tools/gn/action_target_generator.h | 41 - .../tools/gn/action_target_generator_unittest.cc | 122 - chromium/tools/gn/action_values.cc | 31 - chromium/tools/gn/action_values.h | 70 - chromium/tools/gn/analyzer.cc | 486 -- chromium/tools/gn/analyzer.h | 104 - chromium/tools/gn/analyzer_unittest.cc | 594 -- chromium/tools/gn/args.cc | 424 - chromium/tools/gn/args.h | 147 - chromium/tools/gn/args_unittest.cc | 81 - chromium/tools/gn/bin/compare_test_lists.py | 101 - chromium/tools/gn/bin/gn-format.py | 59 - chromium/tools/gn/bin/help_as_html.py | 105 - chromium/tools/gn/bin/roll_gn.py | 461 -- chromium/tools/gn/binary_target_generator.cc | 182 - chromium/tools/gn/binary_target_generator.h | 40 - chromium/tools/gn/bootstrap/OWNERS | 6 +- chromium/tools/gn/bootstrap/bootstrap.py | 1037 +-- chromium/tools/gn/bootstrap/build.ninja.template | 19 - .../tools/gn/bootstrap/build_aix.ninja.template | 19 - .../tools/gn/bootstrap/build_mac.ninja.template | 19 - .../tools/gn/bootstrap/build_vs.ninja.template | 28 - chromium/tools/gn/build_settings.cc | 77 - chromium/tools/gn/build_settings.h | 140 - chromium/tools/gn/builder.cc | 604 -- chromium/tools/gn/builder.h | 148 - chromium/tools/gn/builder_record.cc | 74 - chromium/tools/gn/builder_record.h | 113 - chromium/tools/gn/builder_unittest.cc | 248 - chromium/tools/gn/bundle_data.cc | 170 - chromium/tools/gn/bundle_data.h | 199 - chromium/tools/gn/bundle_data_target_generator.cc | 94 - chromium/tools/gn/bundle_data_target_generator.h | 33 - chromium/tools/gn/bundle_file_rule.cc | 66 - chromium/tools/gn/bundle_file_rule.h | 51 - chromium/tools/gn/c_include_iterator.cc | 175 - chromium/tools/gn/c_include_iterator.h | 57 - chromium/tools/gn/c_include_iterator_unittest.cc | 159 - chromium/tools/gn/command_analyze.cc | 133 - chromium/tools/gn/command_args.cc | 505 -- chromium/tools/gn/command_check.cc | 256 - chromium/tools/gn/command_clean.cc | 129 - chromium/tools/gn/command_desc.cc | 516 -- chromium/tools/gn/command_format.cc | 1119 --- chromium/tools/gn/command_format.h | 27 - chromium/tools/gn/command_format_unittest.cc | 108 - chromium/tools/gn/command_gen.cc | 477 -- chromium/tools/gn/command_help.cc | 316 - chromium/tools/gn/command_ls.cc | 112 - chromium/tools/gn/command_path.cc | 415 - chromium/tools/gn/command_refs.cc | 501 -- chromium/tools/gn/commands.cc | 568 -- chromium/tools/gn/commands.h | 201 - chromium/tools/gn/config.cc | 50 - chromium/tools/gn/config.h | 71 - chromium/tools/gn/config_unittest.cc | 85 - chromium/tools/gn/config_values.cc | 46 - chromium/tools/gn/config_values.h | 98 - chromium/tools/gn/config_values_extractors.cc | 35 - chromium/tools/gn/config_values_extractors.h | 107 - .../tools/gn/config_values_extractors_unittest.cc | 138 - chromium/tools/gn/config_values_generator.cc | 128 - chromium/tools/gn/config_values_generator.h | 45 - chromium/tools/gn/copy_target_generator.cc | 43 - chromium/tools/gn/copy_target_generator.h | 28 - .../tools/gn/create_bundle_target_generator.cc | 301 - chromium/tools/gn/create_bundle_target_generator.h | 45 - chromium/tools/gn/deps_iterator.cc | 55 - chromium/tools/gn/deps_iterator.h | 72 - chromium/tools/gn/desc_builder.cc | 729 -- chromium/tools/gn/desc_builder.h | 27 - chromium/tools/gn/docs/cross_compiles.md | 125 - chromium/tools/gn/docs/faq.md | 52 - chromium/tools/gn/docs/hacking.md | 23 - chromium/tools/gn/docs/language.md | 540 -- chromium/tools/gn/docs/quick_start.md | 365 - chromium/tools/gn/docs/reference.md | 6405 --------------- chromium/tools/gn/docs/standalone.md | 44 - chromium/tools/gn/docs/style_guide.md | 286 - chromium/tools/gn/docs/update_binaries.md | 5 - chromium/tools/gn/eclipse_writer.cc | 172 - chromium/tools/gn/eclipse_writer.h | 67 - chromium/tools/gn/err.cc | 202 - chromium/tools/gn/err.h | 99 - chromium/tools/gn/escape.cc | 202 - chromium/tools/gn/escape.h | 80 - chromium/tools/gn/escape_unittest.cc | 64 - chromium/tools/gn/example/.gn | 2 - chromium/tools/gn/example/BUILD.gn | 30 - chromium/tools/gn/example/README.txt | 4 - chromium/tools/gn/example/build/BUILD.gn | 19 - chromium/tools/gn/example/build/BUILDCONFIG.gn | 38 - chromium/tools/gn/example/build/toolchain/BUILD.gn | 86 - chromium/tools/gn/example/hello.cc | 13 - chromium/tools/gn/example/hello_shared.cc | 9 - chromium/tools/gn/example/hello_shared.h | 32 - chromium/tools/gn/example/hello_static.cc | 9 - chromium/tools/gn/example/hello_static.h | 10 - chromium/tools/gn/exec_process.cc | 260 - chromium/tools/gn/exec_process.h | 25 - chromium/tools/gn/exec_process_unittest.cc | 131 - chromium/tools/gn/filesystem_utils.cc | 1063 --- chromium/tools/gn/filesystem_utils.h | 304 - chromium/tools/gn/filesystem_utils_unittest.cc | 850 -- chromium/tools/gn/format_test_data/001.gn | 2 - chromium/tools/gn/format_test_data/001.golden | 3 - chromium/tools/gn/format_test_data/002.gn | 6 - chromium/tools/gn/format_test_data/002.golden | 6 - chromium/tools/gn/format_test_data/003.gn | 10 - chromium/tools/gn/format_test_data/003.golden | 10 - chromium/tools/gn/format_test_data/004.gn | 10 - chromium/tools/gn/format_test_data/004.golden | 13 - chromium/tools/gn/format_test_data/005.gn | 5 - chromium/tools/gn/format_test_data/005.golden | 5 - chromium/tools/gn/format_test_data/006.gn | 9 - chromium/tools/gn/format_test_data/006.golden | 5 - chromium/tools/gn/format_test_data/007.gn | 9 - chromium/tools/gn/format_test_data/007.golden | 11 - chromium/tools/gn/format_test_data/008.gn | 1 - chromium/tools/gn/format_test_data/008.golden | 5 - chromium/tools/gn/format_test_data/009.gn | 2 - chromium/tools/gn/format_test_data/009.golden | 9 - chromium/tools/gn/format_test_data/010.gn | 2 - chromium/tools/gn/format_test_data/010.golden | 9 - chromium/tools/gn/format_test_data/011.gn | 4 - chromium/tools/gn/format_test_data/011.golden | 13 - chromium/tools/gn/format_test_data/012.gn | 16 - chromium/tools/gn/format_test_data/012.golden | 22 - chromium/tools/gn/format_test_data/013.gn | 7 - chromium/tools/gn/format_test_data/013.golden | 7 - chromium/tools/gn/format_test_data/014.gn | 6 - chromium/tools/gn/format_test_data/014.golden | 5 - chromium/tools/gn/format_test_data/015.gn | 4 - chromium/tools/gn/format_test_data/015.golden | 6 - chromium/tools/gn/format_test_data/016.gn | 1 - chromium/tools/gn/format_test_data/016.golden | 1 - chromium/tools/gn/format_test_data/017.gn | 15 - chromium/tools/gn/format_test_data/017.golden | 16 - chromium/tools/gn/format_test_data/018.gn | 3 - chromium/tools/gn/format_test_data/018.golden | 3 - chromium/tools/gn/format_test_data/019.gn | 23 - chromium/tools/gn/format_test_data/019.golden | 23 - chromium/tools/gn/format_test_data/020.gn | 5 - chromium/tools/gn/format_test_data/020.golden | 5 - chromium/tools/gn/format_test_data/021.gn | 33 - chromium/tools/gn/format_test_data/021.golden | 61 - chromium/tools/gn/format_test_data/022.gn | 6 - chromium/tools/gn/format_test_data/022.golden | 6 - chromium/tools/gn/format_test_data/023.gn | 38 - chromium/tools/gn/format_test_data/023.golden | 88 - chromium/tools/gn/format_test_data/024.gn | 1 - chromium/tools/gn/format_test_data/024.golden | 2 - chromium/tools/gn/format_test_data/025.gn | 5 - chromium/tools/gn/format_test_data/025.golden | 9 - chromium/tools/gn/format_test_data/026.gn | 6 - chromium/tools/gn/format_test_data/026.golden | 7 - chromium/tools/gn/format_test_data/027.gn | 3 - chromium/tools/gn/format_test_data/027.golden | 5 - chromium/tools/gn/format_test_data/028.gn | 9 - chromium/tools/gn/format_test_data/028.golden | 7 - chromium/tools/gn/format_test_data/029.gn | 9 - chromium/tools/gn/format_test_data/029.golden | 9 - chromium/tools/gn/format_test_data/030.gn | 12 - chromium/tools/gn/format_test_data/030.golden | 12 - chromium/tools/gn/format_test_data/031.gn | 8 - chromium/tools/gn/format_test_data/031.golden | 8 - chromium/tools/gn/format_test_data/032.gn | 6 - chromium/tools/gn/format_test_data/032.golden | 7 - chromium/tools/gn/format_test_data/033.gn | 8 - chromium/tools/gn/format_test_data/033.golden | 8 - chromium/tools/gn/format_test_data/034.gn | 13 - chromium/tools/gn/format_test_data/035.gn | 1 - chromium/tools/gn/format_test_data/035.golden | 1 - chromium/tools/gn/format_test_data/036.gn | 9 - chromium/tools/gn/format_test_data/036.golden | 9 - chromium/tools/gn/format_test_data/037.gn | 5 - chromium/tools/gn/format_test_data/037.golden | 6 - chromium/tools/gn/format_test_data/038.gn | 4 - chromium/tools/gn/format_test_data/038.golden | 3 - chromium/tools/gn/format_test_data/039.gn | 6 - chromium/tools/gn/format_test_data/039.golden | 4 - chromium/tools/gn/format_test_data/040.gn | 9 - chromium/tools/gn/format_test_data/041.gn | 12 - chromium/tools/gn/format_test_data/041.golden | 12 - chromium/tools/gn/format_test_data/042.gn | 44 - chromium/tools/gn/format_test_data/042.golden | 110 - chromium/tools/gn/format_test_data/043.gn | 6 - chromium/tools/gn/format_test_data/043.golden | 7 - chromium/tools/gn/format_test_data/044.gn | 10 - chromium/tools/gn/format_test_data/044.golden | 11 - chromium/tools/gn/format_test_data/045.gn | 10 - chromium/tools/gn/format_test_data/045.golden | 14 - chromium/tools/gn/format_test_data/046.gn | 22 - chromium/tools/gn/format_test_data/046.golden | 19 - chromium/tools/gn/format_test_data/047.gn | 7 - chromium/tools/gn/format_test_data/047.golden | 10 - chromium/tools/gn/format_test_data/048.gn | 19 - chromium/tools/gn/format_test_data/048.golden | 19 - chromium/tools/gn/format_test_data/049.gn | 14 - chromium/tools/gn/format_test_data/050.gn | 10 - chromium/tools/gn/format_test_data/050.golden | 27 - chromium/tools/gn/format_test_data/051.gn | 6 - chromium/tools/gn/format_test_data/051.golden | 7 - chromium/tools/gn/format_test_data/052.gn | 11 - chromium/tools/gn/format_test_data/052.golden | 12 - chromium/tools/gn/format_test_data/053.gn | 7 - chromium/tools/gn/format_test_data/053.golden | 8 - chromium/tools/gn/format_test_data/054.gn | 7 - chromium/tools/gn/format_test_data/054.golden | 8 - chromium/tools/gn/format_test_data/055.gn | 10 - chromium/tools/gn/format_test_data/055.golden | 11 - chromium/tools/gn/format_test_data/056.gn | 45 - chromium/tools/gn/format_test_data/056.golden | 45 - chromium/tools/gn/format_test_data/057.gn | 24 - chromium/tools/gn/format_test_data/057.golden | 24 - chromium/tools/gn/format_test_data/058.gn | 2 - chromium/tools/gn/format_test_data/058.golden | 2 - chromium/tools/gn/format_test_data/059.gn | 10 - chromium/tools/gn/format_test_data/059.golden | 11 - chromium/tools/gn/format_test_data/060.gn | 2 - chromium/tools/gn/format_test_data/060.golden | 2 - chromium/tools/gn/format_test_data/061.gn | 9 - chromium/tools/gn/format_test_data/061.golden | 9 - chromium/tools/gn/format_test_data/062.gn | 122 - chromium/tools/gn/format_test_data/062.golden | 132 - chromium/tools/gn/format_test_data/063.gn | 36 - chromium/tools/gn/format_test_data/063.golden | 36 - chromium/tools/gn/format_test_data/064.gn | 3 - chromium/tools/gn/format_test_data/064.golden | 5 - chromium/tools/gn/format_test_data/065.gn | 4 - chromium/tools/gn/format_test_data/065.golden | 8 - chromium/tools/gn/format_test_data/066.gn | 30 - chromium/tools/gn/format_test_data/066.golden | 28 - chromium/tools/gn/format_test_data/067.gn | 8 - chromium/tools/gn/format_test_data/067.golden | 17 - chromium/tools/gn/format_test_data/068.gn | 3 - chromium/tools/gn/format_test_data/068.golden | 3 - chromium/tools/gn/format_test_data/069.gn | 3 - chromium/tools/gn/format_test_data/069.golden | 5 - chromium/tools/gn/format_test_data/070.gn | 15 - chromium/tools/gn/format_test_data/070.golden | 14 - chromium/tools/gn/function_exec_script.cc | 271 - chromium/tools/gn/function_foreach.cc | 113 - chromium/tools/gn/function_foreach_unittest.cc | 100 - .../tools/gn/function_forward_variables_from.cc | 241 - .../gn/function_forward_variables_from_unittest.cc | 244 - chromium/tools/gn/function_get_label_info.cc | 144 - .../tools/gn/function_get_label_info_unittest.cc | 107 - chromium/tools/gn/function_get_path_info.cc | 250 - .../tools/gn/function_get_path_info_unittest.cc | 120 - chromium/tools/gn/function_get_target_outputs.cc | 141 - .../gn/function_get_target_outputs_unittest.cc | 110 - .../tools/gn/function_process_file_template.cc | 115 - .../gn/function_process_file_template_unittest.cc | 64 - chromium/tools/gn/function_read_file.cc | 79 - chromium/tools/gn/function_rebase_path.cc | 295 - chromium/tools/gn/function_rebase_path_unittest.cc | 181 - .../tools/gn/function_set_default_toolchain.cc | 86 - chromium/tools/gn/function_set_defaults.cc | 76 - chromium/tools/gn/function_template.cc | 227 - chromium/tools/gn/function_template_unittest.cc | 29 - chromium/tools/gn/function_toolchain.cc | 1099 --- chromium/tools/gn/function_toolchain_unittest.cc | 60 - chromium/tools/gn/function_write_file.cc | 103 - chromium/tools/gn/function_write_file_unittest.cc | 93 - chromium/tools/gn/functions.cc | 1296 --- chromium/tools/gn/functions.h | 512 -- chromium/tools/gn/functions_target.cc | 797 -- chromium/tools/gn/functions_target_unittest.cc | 124 - chromium/tools/gn/functions_unittest.cc | 172 - chromium/tools/gn/gn_main.cc | 144 - chromium/tools/gn/group_target_generator.cc | 24 - chromium/tools/gn/group_target_generator.h | 27 - chromium/tools/gn/header_checker.cc | 600 -- chromium/tools/gn/header_checker.h | 198 - chromium/tools/gn/header_checker_unittest.cc | 382 - chromium/tools/gn/import_manager.cc | 157 - chromium/tools/gn/import_manager.h | 53 - chromium/tools/gn/inherited_libraries.cc | 74 - chromium/tools/gn/inherited_libraries.h | 71 - chromium/tools/gn/inherited_libraries_unittest.cc | 135 - chromium/tools/gn/input_conversion.cc | 324 - chromium/tools/gn/input_conversion.h | 30 - chromium/tools/gn/input_conversion_unittest.cc | 274 - chromium/tools/gn/input_file.cc | 30 - chromium/tools/gn/input_file.h | 65 - chromium/tools/gn/input_file_manager.cc | 323 - chromium/tools/gn/input_file_manager.h | 155 - chromium/tools/gn/item.cc | 60 - chromium/tools/gn/item.h | 82 - chromium/tools/gn/json_project_writer.cc | 220 - chromium/tools/gn/json_project_writer.h | 26 - chromium/tools/gn/label.cc | 332 - chromium/tools/gn/label.h | 128 - chromium/tools/gn/label_pattern.cc | 276 - chromium/tools/gn/label_pattern.h | 80 - chromium/tools/gn/label_pattern_unittest.cc | 86 - chromium/tools/gn/label_ptr.h | 117 - chromium/tools/gn/label_unittest.cc | 95 - chromium/tools/gn/last_commit_position.py | 100 - chromium/tools/gn/lib_file.cc | 30 - chromium/tools/gn/lib_file.h | 58 - chromium/tools/gn/loader.cc | 429 - chromium/tools/gn/loader.h | 182 - chromium/tools/gn/loader_unittest.cc | 264 - chromium/tools/gn/location.cc | 77 - chromium/tools/gn/location.h | 61 - chromium/tools/gn/misc/OWNERS | 2 - chromium/tools/gn/misc/emacs/gn-mode.el | 191 - chromium/tools/gn/misc/tm/GN.tmLanguage | 102 - chromium/tools/gn/misc/tm/GN.tmPreferences | 22 - chromium/tools/gn/misc/vim/README.chromium | 5 - chromium/tools/gn/misc/vim/autoload/gn.vim | 26 - chromium/tools/gn/misc/vim/ftdetect/gnfiletype.vim | 27 - chromium/tools/gn/misc/vim/ftplugin/gn.vim | 12 - chromium/tools/gn/misc/vim/syntax/gn.vim | 84 - chromium/tools/gn/ninja_action_target_writer.cc | 239 - chromium/tools/gn/ninja_action_target_writer.h | 63 - .../gn/ninja_action_target_writer_unittest.cc | 485 -- chromium/tools/gn/ninja_binary_target_writer.cc | 1090 --- chromium/tools/gn/ninja_binary_target_writer.h | 160 - .../gn/ninja_binary_target_writer_unittest.cc | 1125 --- chromium/tools/gn/ninja_build_writer.cc | 594 -- chromium/tools/gn/ninja_build_writer.h | 72 - chromium/tools/gn/ninja_build_writer_unittest.cc | 154 - .../tools/gn/ninja_bundle_data_target_writer.cc | 33 - .../tools/gn/ninja_bundle_data_target_writer.h | 23 - .../gn/ninja_bundle_data_target_writer_unittest.cc | 53 - chromium/tools/gn/ninja_copy_target_writer.cc | 119 - chromium/tools/gn/ninja_copy_target_writer.h | 27 - .../tools/gn/ninja_copy_target_writer_unittest.cc | 94 - .../tools/gn/ninja_create_bundle_target_writer.cc | 332 - .../tools/gn/ninja_create_bundle_target_writer.h | 71 - .../ninja_create_bundle_target_writer_unittest.cc | 457 -- chromium/tools/gn/ninja_group_target_writer.cc | 33 - chromium/tools/gn/ninja_group_target_writer.h | 23 - .../tools/gn/ninja_group_target_writer_unittest.cc | 50 - chromium/tools/gn/ninja_target_writer.cc | 330 - chromium/tools/gn/ninja_target_writer.h | 72 - chromium/tools/gn/ninja_target_writer_unittest.cc | 164 - chromium/tools/gn/ninja_toolchain_writer.cc | 128 - chromium/tools/gn/ninja_toolchain_writer.h | 58 - .../tools/gn/ninja_toolchain_writer_unittest.cc | 25 - chromium/tools/gn/ninja_utils.cc | 29 - chromium/tools/gn/ninja_utils.h | 25 - chromium/tools/gn/ninja_writer.cc | 55 - chromium/tools/gn/ninja_writer.h | 51 - chromium/tools/gn/operators.cc | 783 -- chromium/tools/gn/operators.h | 25 - chromium/tools/gn/operators_unittest.cc | 390 - chromium/tools/gn/ordered_set.h | 71 - chromium/tools/gn/output_file.cc | 50 - chromium/tools/gn/output_file.h | 66 - chromium/tools/gn/parse_node_value_adapter.cc | 45 - chromium/tools/gn/parse_node_value_adapter.h | 55 - chromium/tools/gn/parse_tree.cc | 859 -- chromium/tools/gn/parse_tree.h | 546 -- chromium/tools/gn/parse_tree_unittest.cc | 255 - chromium/tools/gn/parser.cc | 884 -- chromium/tools/gn/parser.h | 149 - chromium/tools/gn/parser_fuzzer.cc | 71 - chromium/tools/gn/parser_unittest.cc | 744 -- chromium/tools/gn/path_output.cc | 171 - chromium/tools/gn/path_output.h | 91 - chromium/tools/gn/path_output_unittest.cc | 284 - chromium/tools/gn/pattern.cc | 190 - chromium/tools/gn/pattern.h | 92 - chromium/tools/gn/pattern_unittest.cc | 64 - chromium/tools/gn/pool.cc | 45 - chromium/tools/gn/pool.h | 41 - chromium/tools/gn/qt_creator_writer.cc | 176 - chromium/tools/gn/qt_creator_writer.h | 56 - chromium/tools/gn/runtime_deps.cc | 314 - chromium/tools/gn/runtime_deps.h | 28 - chromium/tools/gn/runtime_deps_unittest.cc | 448 - chromium/tools/gn/scheduler.cc | 199 - chromium/tools/gn/scheduler.h | 146 - chromium/tools/gn/scope.cc | 576 -- chromium/tools/gn/scope.h | 397 - chromium/tools/gn/scope_per_file_provider.cc | 119 - chromium/tools/gn/scope_per_file_provider.h | 51 - .../tools/gn/scope_per_file_provider_unittest.cc | 55 - chromium/tools/gn/scope_unittest.cc | 336 - chromium/tools/gn/settings.cc | 34 - chromium/tools/gn/settings.h | 120 - chromium/tools/gn/setup.cc | 818 -- chromium/tools/gn/setup.h | 173 - chromium/tools/gn/source_dir.cc | 159 - chromium/tools/gn/source_dir.h | 163 - chromium/tools/gn/source_dir_unittest.cc | 187 - chromium/tools/gn/source_file.cc | 65 - chromium/tools/gn/source_file.h | 109 - chromium/tools/gn/source_file_type.cc | 34 - chromium/tools/gn/source_file_type.h | 31 - chromium/tools/gn/source_file_unittest.cc | 19 - chromium/tools/gn/standard_out.cc | 335 - chromium/tools/gn/standard_out.h | 41 - chromium/tools/gn/string_utils.cc | 345 - chromium/tools/gn/string_utils.h | 53 - chromium/tools/gn/string_utils_unittest.cc | 157 - chromium/tools/gn/substitution_list.cc | 70 - chromium/tools/gn/substitution_list.h | 47 - chromium/tools/gn/substitution_pattern.cc | 149 - chromium/tools/gn/substitution_pattern.h | 78 - chromium/tools/gn/substitution_pattern_unittest.cc | 49 - chromium/tools/gn/substitution_type.cc | 264 - chromium/tools/gn/substitution_type.h | 144 - chromium/tools/gn/substitution_writer.cc | 608 -- chromium/tools/gn/substitution_writer.h | 254 - chromium/tools/gn/substitution_writer_unittest.cc | 337 - chromium/tools/gn/switches.cc | 290 - chromium/tools/gn/switches.h | 107 - chromium/tools/gn/target.cc | 874 -- chromium/tools/gn/target.h | 402 - chromium/tools/gn/target_generator.cc | 392 - chromium/tools/gn/target_generator.h | 84 - chromium/tools/gn/target_unittest.cc | 1051 --- chromium/tools/gn/template.cc | 127 - chromium/tools/gn/template.h | 67 - chromium/tools/gn/template_unittest.cc | 93 - chromium/tools/gn/test_with_scheduler.cc | 8 - chromium/tools/gn/test_with_scheduler.h | 27 - chromium/tools/gn/test_with_scope.cc | 222 - chromium/tools/gn/test_with_scope.h | 120 - chromium/tools/gn/token.cc | 28 - chromium/tools/gn/token.h | 86 - chromium/tools/gn/tokenizer.cc | 409 - chromium/tools/gn/tokenizer.h | 90 - chromium/tools/gn/tokenizer_unittest.cc | 228 - chromium/tools/gn/tool.cc | 29 - chromium/tools/gn/tool.h | 236 - chromium/tools/gn/toolchain.cc | 180 - chromium/tools/gn/toolchain.h | 146 - chromium/tools/gn/trace.cc | 339 - chromium/tools/gn/trace.h | 106 - chromium/tools/gn/tutorial/hello.cc | 17 - chromium/tools/gn/tutorial/hello.h | 14 - chromium/tools/gn/tutorial/hello_world.cc | 10 - chromium/tools/gn/tutorial/say_hello.cc | 14 - chromium/tools/gn/unique_vector.h | 178 - chromium/tools/gn/unique_vector_unittest.cc | 45 - chromium/tools/gn/value.cc | 222 - chromium/tools/gn/value.h | 135 - chromium/tools/gn/value_extractors.cc | 257 - chromium/tools/gn/value_extractors.h | 89 - chromium/tools/gn/value_unittest.cc | 43 - chromium/tools/gn/variables.cc | 2089 ----- chromium/tools/gn/variables.h | 333 - chromium/tools/gn/visibility.cc | 114 - chromium/tools/gn/visibility.h | 68 - chromium/tools/gn/visibility_unittest.cc | 53 - chromium/tools/gn/visual_studio_utils.cc | 139 - chromium/tools/gn/visual_studio_utils.h | 50 - chromium/tools/gn/visual_studio_utils_unittest.cc | 103 - chromium/tools/gn/visual_studio_writer.cc | 908 --- chromium/tools/gn/visual_studio_writer.h | 166 - chromium/tools/gn/visual_studio_writer_unittest.cc | 165 - chromium/tools/gn/xcode_object.cc | 990 --- chromium/tools/gn/xcode_object.h | 464 -- chromium/tools/gn/xcode_object_unittest.cc | 435 - chromium/tools/gn/xcode_writer.cc | 664 -- chromium/tools/gn/xcode_writer.h | 89 - chromium/tools/gn/xml_element_writer.cc | 114 - chromium/tools/gn/xml_element_writer.h | 124 - chromium/tools/gn/xml_element_writer_unittest.cc | 93 - .../tools/grit/grit/format/chrome_messages_json.py | 2 +- .../grit/format/chrome_messages_json_unittest.py | 6 +- chromium/tools/grit/grit/format/html_inline.py | 7 +- .../tools/grit/grit/format/html_inline_unittest.py | 87 +- chromium/tools/grit/grit/gather/chrome_html.py | 1 + chromium/tools/grit/grit/node/include.py | 1 + chromium/tools/grit/grit/tool/build.py | 7 +- chromium/tools/grit/grit/tool/xmb.py | 2 + chromium/tools/grit/grit_info.py | 2 + chromium/tools/grit/grit_rule.gni | 17 +- chromium/tools/gritsettings/resource_ids | 42 +- .../tools/gritsettings/startup_resources_mac.txt | 1 - .../gritsettings/translation_expectations.pyl | 2 +- chromium/tools/idl_parser/idl_lexer.py | 2 +- chromium/tools/idl_parser/idl_parser.py | 21 +- chromium/tools/idl_parser/test_lexer/values.in | 1 + .../tools/idl_parser/test_parser/interface_web.idl | 19 +- chromium/tools/imagediff/BUILD.gn | 1 - chromium/tools/include_tracer.py | 126 +- chromium/tools/ipc_fuzzer/BUILD.gn | 5 +- chromium/tools/ipc_fuzzer/message_replay/BUILD.gn | 2 +- chromium/tools/ipc_fuzzer/message_replay/DEPS | 3 +- .../ipc_fuzzer/message_replay/replay_process.cc | 57 +- .../ipc_fuzzer/message_replay/replay_process.h | 12 +- .../tools/json_schema_compiler/feature_compiler.py | 48 +- .../json_schema_compiler/js_externs_generator.py | 17 +- .../js_externs_generator_test.py | 81 +- .../json_schema_compiler/js_interface_generator.py | 3 +- .../js_interface_generator_test.py | 4 +- chromium/tools/json_schema_compiler/js_util.py | 18 +- .../tools/json_schema_compiler/json_features.gni | 11 +- chromium/tools/json_schema_compiler/test/BUILD.gn | 2 +- chromium/tools/luci-go/OWNERS | 2 - chromium/tools/luci-go/README.md | 7 +- chromium/tools/luci-go/linux64/isolate.sha1 | 2 +- chromium/tools/luci-go/mac64/isolate.sha1 | 2 +- chromium/tools/luci-go/win64/isolate.exe.sha1 | 2 +- chromium/tools/mb/mb.py | 16 +- chromium/tools/mb/mb_config.pyl | 289 +- chromium/tools/metrics/actions/README.md | 14 +- chromium/tools/metrics/actions/actions.xml | 2106 ++--- chromium/tools/metrics/histograms/README.md | 62 +- chromium/tools/metrics/histograms/enums.xml | 1872 ++++- chromium/tools/metrics/histograms/histograms.xml | 8547 ++++++++++++++++---- chromium/tools/metrics/histograms/print_style.py | 13 +- chromium/tools/metrics/rappor/rappor.xml | 16 - chromium/tools/metrics/ukm/ukm.xml | 542 +- chromium/tools/perf/clear_system_cache/BUILD.gn | 1 - chromium/tools/perf/contrib/vr_benchmarks/BUILD.gn | 4 +- .../polymer/generate_compiled_resources_gyp.py | 97 - chromium/tools/resources/optimize-png-files.sh | 25 +- chromium/tools/traffic_annotation/auditor/BUILD.gn | 2 +- .../tools/traffic_annotation/auditor/README.md | 2 +- .../auditor/traffic_annotation_exporter.cc | 5 +- chromium/tools/traffic_annotation/bin/README.md | 4 +- .../bin/win32/traffic_annotation_auditor.exe.sha1 | 2 +- .../scripts/traffic_annotation_auditor_tests.py | 39 +- .../traffic_annotation/summary/annotations.xml | 33 +- chromium/tools/v8_context_snapshot/BUILD.gn | 2 +- chromium/tools/v8_context_snapshot/DEPS | 2 +- .../v8_context_snapshot_generator.cc | 4 +- .../valgrind/asan/third_party/asan_symbolize.py | 8 +- chromium/tools/variations/fieldtrial_to_struct.py | 73 +- .../variations/fieldtrial_to_struct_unittest.py | 35 +- chromium/tools/variations/fieldtrial_util.py | 11 +- .../tools/variations/fieldtrial_util_unittest.py | 30 +- chromium/tools/variations/unittest_data/DEPS | 3 + .../variations/unittest_data/expected_output.cc | 25 + .../variations/unittest_data/expected_output.h | 3 + .../variations/unittest_data/test_config.json | 8 +- chromium/tools/win/ShowGlobals/ShowGlobals.cc | 11 + chromium/tools/win/ShowGlobals/ShowGlobals.vcxproj | 10 +- .../win/static_initializers/static_initializers.cc | 22 +- chromium/tools/xdisplaycheck/BUILD.gn | 4 - 658 files changed, 18131 insertions(+), 83033 deletions(-) create mode 100644 chromium/tools/accessibility/inspect/chrome-events.ps1 create mode 100644 chromium/tools/accessibility/inspect/chrome-tree.ps1 create mode 100644 chromium/tools/accessibility/inspect/chromium-events.ps1 create mode 100644 chromium/tools/accessibility/inspect/chromium-tree.ps1 create mode 100644 chromium/tools/accessibility/inspect/example-tree-filters.txt create mode 100644 chromium/tools/accessibility/inspect/ff-events.ps1 create mode 100644 chromium/tools/accessibility/inspect/ff-tree.ps1 create mode 100644 chromium/tools/android/io_benchmark/BUILD.gn create mode 100644 chromium/tools/binary_size/html_report_faq.md create mode 100755 chromium/tools/binary_size/libsupersize/bcanalyzer.py create mode 100755 chromium/tools/binary_size/libsupersize/bcanalyzer_test.py mode change 100755 => 100644 chromium/tools/binary_size/libsupersize/nm.py create mode 100755 chromium/tools/binary_size/libsupersize/obj_analyzer.py create mode 100644 chromium/tools/binary_size/libsupersize/start_server.py create mode 100644 chromium/tools/binary_size/libsupersize/static/favicon.ico create mode 100644 chromium/tools/binary_size/libsupersize/static/index.html create mode 100644 chromium/tools/binary_size/libsupersize/static/infocard-ui.js create mode 100644 chromium/tools/binary_size/libsupersize/static/infocard.css create mode 100644 chromium/tools/binary_size/libsupersize/static/options.css create mode 100644 chromium/tools/binary_size/libsupersize/static/shared.js create mode 100644 chromium/tools/binary_size/libsupersize/static/start-worker.js create mode 100644 chromium/tools/binary_size/libsupersize/static/state.js create mode 100644 chromium/tools/binary_size/libsupersize/static/tree-ui.js create mode 100644 chromium/tools/binary_size/libsupersize/static/tree-worker.js create mode 100644 chromium/tools/binary_size/libsupersize/string_extract.py delete mode 100644 chromium/tools/binary_size/libsupersize/template/D3SymbolTreeMap.js delete mode 100644 chromium/tools/binary_size/libsupersize/template/index.html delete mode 100644 chromium/tools/binary_size/libsupersize/template/test-data-generator.html delete mode 100644 chromium/tools/binary_size/libsupersize/testdata/mock_output_directory/args.gn create mode 100644 chromium/tools/binary_size/libsupersize/testdata/mock_source_directory/out/Release/args.gn delete mode 100644 chromium/tools/binary_size/libsupersize/third_party/gvr-android-sdk/libgvr_shim_static_arm.a create mode 100755 chromium/tools/binary_size/trybot_commit_size_checker.py delete mode 100644 chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.cpp delete mode 100644 chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.h create mode 100644 chromium/tools/clang/translation_unit/test_files/includes_self.cc create mode 100644 chromium/tools/clang/translation_unit/test_files/includes_self.cc.filepaths.expected delete mode 100644 chromium/tools/cygprofile/BUILD.gn delete mode 100644 chromium/tools/cygprofile/delayed_dumper.cc delete mode 100644 chromium/tools/cygprofile/lightweight_cygprofile.cc delete mode 100644 chromium/tools/cygprofile/lightweight_cygprofile.h delete mode 100644 chromium/tools/cygprofile/lightweight_cygprofile_perftest.cc create mode 100644 chromium/tools/dump_process_memory/BUILD.gn create mode 100644 chromium/tools/dump_process_memory/OWNERS create mode 100755 chromium/tools/dump_process_memory/analyze_dumps.py create mode 100644 chromium/tools/dump_process_memory/dump_process.cc create mode 100755 chromium/tools/fuchsia/comparative_tester/generate_perf_report.py create mode 100644 chromium/tools/fuchsia/comparative_tester/test_results.py delete mode 100644 chromium/tools/gn/BUILD.gn delete mode 100644 chromium/tools/gn/DEPS delete mode 100644 chromium/tools/gn/OWNERS delete mode 100644 chromium/tools/gn/action_target_generator.cc delete mode 100644 chromium/tools/gn/action_target_generator.h delete mode 100644 chromium/tools/gn/action_target_generator_unittest.cc delete mode 100644 chromium/tools/gn/action_values.cc delete mode 100644 chromium/tools/gn/action_values.h delete mode 100644 chromium/tools/gn/analyzer.cc delete mode 100644 chromium/tools/gn/analyzer.h delete mode 100644 chromium/tools/gn/analyzer_unittest.cc delete mode 100644 chromium/tools/gn/args.cc delete mode 100644 chromium/tools/gn/args.h delete mode 100644 chromium/tools/gn/args_unittest.cc delete mode 100644 chromium/tools/gn/bin/compare_test_lists.py delete mode 100644 chromium/tools/gn/bin/gn-format.py delete mode 100755 chromium/tools/gn/bin/help_as_html.py delete mode 100755 chromium/tools/gn/bin/roll_gn.py delete mode 100644 chromium/tools/gn/binary_target_generator.cc delete mode 100644 chromium/tools/gn/binary_target_generator.h delete mode 100644 chromium/tools/gn/bootstrap/build.ninja.template delete mode 100644 chromium/tools/gn/bootstrap/build_aix.ninja.template delete mode 100644 chromium/tools/gn/bootstrap/build_mac.ninja.template delete mode 100644 chromium/tools/gn/bootstrap/build_vs.ninja.template delete mode 100644 chromium/tools/gn/build_settings.cc delete mode 100644 chromium/tools/gn/build_settings.h delete mode 100644 chromium/tools/gn/builder.cc delete mode 100644 chromium/tools/gn/builder.h delete mode 100644 chromium/tools/gn/builder_record.cc delete mode 100644 chromium/tools/gn/builder_record.h delete mode 100644 chromium/tools/gn/builder_unittest.cc delete mode 100644 chromium/tools/gn/bundle_data.cc delete mode 100644 chromium/tools/gn/bundle_data.h delete mode 100644 chromium/tools/gn/bundle_data_target_generator.cc delete mode 100644 chromium/tools/gn/bundle_data_target_generator.h delete mode 100644 chromium/tools/gn/bundle_file_rule.cc delete mode 100644 chromium/tools/gn/bundle_file_rule.h delete mode 100644 chromium/tools/gn/c_include_iterator.cc delete mode 100644 chromium/tools/gn/c_include_iterator.h delete mode 100644 chromium/tools/gn/c_include_iterator_unittest.cc delete mode 100644 chromium/tools/gn/command_analyze.cc delete mode 100644 chromium/tools/gn/command_args.cc delete mode 100644 chromium/tools/gn/command_check.cc delete mode 100644 chromium/tools/gn/command_clean.cc delete mode 100644 chromium/tools/gn/command_desc.cc delete mode 100644 chromium/tools/gn/command_format.cc delete mode 100644 chromium/tools/gn/command_format.h delete mode 100644 chromium/tools/gn/command_format_unittest.cc delete mode 100644 chromium/tools/gn/command_gen.cc delete mode 100644 chromium/tools/gn/command_help.cc delete mode 100644 chromium/tools/gn/command_ls.cc delete mode 100644 chromium/tools/gn/command_path.cc delete mode 100644 chromium/tools/gn/command_refs.cc delete mode 100644 chromium/tools/gn/commands.cc delete mode 100644 chromium/tools/gn/commands.h delete mode 100644 chromium/tools/gn/config.cc delete mode 100644 chromium/tools/gn/config.h delete mode 100644 chromium/tools/gn/config_unittest.cc delete mode 100644 chromium/tools/gn/config_values.cc delete mode 100644 chromium/tools/gn/config_values.h delete mode 100644 chromium/tools/gn/config_values_extractors.cc delete mode 100644 chromium/tools/gn/config_values_extractors.h delete mode 100644 chromium/tools/gn/config_values_extractors_unittest.cc delete mode 100644 chromium/tools/gn/config_values_generator.cc delete mode 100644 chromium/tools/gn/config_values_generator.h delete mode 100644 chromium/tools/gn/copy_target_generator.cc delete mode 100644 chromium/tools/gn/copy_target_generator.h delete mode 100644 chromium/tools/gn/create_bundle_target_generator.cc delete mode 100644 chromium/tools/gn/create_bundle_target_generator.h delete mode 100644 chromium/tools/gn/deps_iterator.cc delete mode 100644 chromium/tools/gn/deps_iterator.h delete mode 100644 chromium/tools/gn/desc_builder.cc delete mode 100644 chromium/tools/gn/desc_builder.h delete mode 100644 chromium/tools/gn/docs/cross_compiles.md delete mode 100644 chromium/tools/gn/docs/faq.md delete mode 100644 chromium/tools/gn/docs/hacking.md delete mode 100644 chromium/tools/gn/docs/language.md delete mode 100644 chromium/tools/gn/docs/quick_start.md delete mode 100644 chromium/tools/gn/docs/reference.md delete mode 100644 chromium/tools/gn/docs/standalone.md delete mode 100644 chromium/tools/gn/docs/style_guide.md delete mode 100644 chromium/tools/gn/docs/update_binaries.md delete mode 100644 chromium/tools/gn/eclipse_writer.cc delete mode 100644 chromium/tools/gn/eclipse_writer.h delete mode 100644 chromium/tools/gn/err.cc delete mode 100644 chromium/tools/gn/err.h delete mode 100644 chromium/tools/gn/escape.cc delete mode 100644 chromium/tools/gn/escape.h delete mode 100644 chromium/tools/gn/escape_unittest.cc delete mode 100644 chromium/tools/gn/example/.gn delete mode 100644 chromium/tools/gn/example/BUILD.gn delete mode 100644 chromium/tools/gn/example/README.txt delete mode 100644 chromium/tools/gn/example/build/BUILD.gn delete mode 100644 chromium/tools/gn/example/build/BUILDCONFIG.gn delete mode 100644 chromium/tools/gn/example/build/toolchain/BUILD.gn delete mode 100644 chromium/tools/gn/example/hello.cc delete mode 100644 chromium/tools/gn/example/hello_shared.cc delete mode 100644 chromium/tools/gn/example/hello_shared.h delete mode 100644 chromium/tools/gn/example/hello_static.cc delete mode 100644 chromium/tools/gn/example/hello_static.h delete mode 100644 chromium/tools/gn/exec_process.cc delete mode 100644 chromium/tools/gn/exec_process.h delete mode 100644 chromium/tools/gn/exec_process_unittest.cc delete mode 100644 chromium/tools/gn/filesystem_utils.cc delete mode 100644 chromium/tools/gn/filesystem_utils.h delete mode 100644 chromium/tools/gn/filesystem_utils_unittest.cc delete mode 100644 chromium/tools/gn/format_test_data/001.gn delete mode 100644 chromium/tools/gn/format_test_data/001.golden delete mode 100644 chromium/tools/gn/format_test_data/002.gn delete mode 100644 chromium/tools/gn/format_test_data/002.golden delete mode 100644 chromium/tools/gn/format_test_data/003.gn delete mode 100644 chromium/tools/gn/format_test_data/003.golden delete mode 100644 chromium/tools/gn/format_test_data/004.gn delete mode 100644 chromium/tools/gn/format_test_data/004.golden delete mode 100644 chromium/tools/gn/format_test_data/005.gn delete mode 100644 chromium/tools/gn/format_test_data/005.golden delete mode 100644 chromium/tools/gn/format_test_data/006.gn delete mode 100644 chromium/tools/gn/format_test_data/006.golden delete mode 100644 chromium/tools/gn/format_test_data/007.gn delete mode 100644 chromium/tools/gn/format_test_data/007.golden delete mode 100644 chromium/tools/gn/format_test_data/008.gn delete mode 100644 chromium/tools/gn/format_test_data/008.golden delete mode 100644 chromium/tools/gn/format_test_data/009.gn delete mode 100644 chromium/tools/gn/format_test_data/009.golden delete mode 100644 chromium/tools/gn/format_test_data/010.gn delete mode 100644 chromium/tools/gn/format_test_data/010.golden delete mode 100644 chromium/tools/gn/format_test_data/011.gn delete mode 100644 chromium/tools/gn/format_test_data/011.golden delete mode 100644 chromium/tools/gn/format_test_data/012.gn delete mode 100644 chromium/tools/gn/format_test_data/012.golden delete mode 100644 chromium/tools/gn/format_test_data/013.gn delete mode 100644 chromium/tools/gn/format_test_data/013.golden delete mode 100644 chromium/tools/gn/format_test_data/014.gn delete mode 100644 chromium/tools/gn/format_test_data/014.golden delete mode 100644 chromium/tools/gn/format_test_data/015.gn delete mode 100644 chromium/tools/gn/format_test_data/015.golden delete mode 100644 chromium/tools/gn/format_test_data/016.gn delete mode 100644 chromium/tools/gn/format_test_data/016.golden delete mode 100644 chromium/tools/gn/format_test_data/017.gn delete mode 100644 chromium/tools/gn/format_test_data/017.golden delete mode 100644 chromium/tools/gn/format_test_data/018.gn delete mode 100644 chromium/tools/gn/format_test_data/018.golden delete mode 100644 chromium/tools/gn/format_test_data/019.gn delete mode 100644 chromium/tools/gn/format_test_data/019.golden delete mode 100644 chromium/tools/gn/format_test_data/020.gn delete mode 100644 chromium/tools/gn/format_test_data/020.golden delete mode 100644 chromium/tools/gn/format_test_data/021.gn delete mode 100644 chromium/tools/gn/format_test_data/021.golden delete mode 100644 chromium/tools/gn/format_test_data/022.gn delete mode 100644 chromium/tools/gn/format_test_data/022.golden delete mode 100644 chromium/tools/gn/format_test_data/023.gn delete mode 100644 chromium/tools/gn/format_test_data/023.golden delete mode 100644 chromium/tools/gn/format_test_data/024.gn delete mode 100644 chromium/tools/gn/format_test_data/024.golden delete mode 100644 chromium/tools/gn/format_test_data/025.gn delete mode 100644 chromium/tools/gn/format_test_data/025.golden delete mode 100644 chromium/tools/gn/format_test_data/026.gn delete mode 100644 chromium/tools/gn/format_test_data/026.golden delete mode 100644 chromium/tools/gn/format_test_data/027.gn delete mode 100644 chromium/tools/gn/format_test_data/027.golden delete mode 100644 chromium/tools/gn/format_test_data/028.gn delete mode 100644 chromium/tools/gn/format_test_data/028.golden delete mode 100644 chromium/tools/gn/format_test_data/029.gn delete mode 100644 chromium/tools/gn/format_test_data/029.golden delete mode 100644 chromium/tools/gn/format_test_data/030.gn delete mode 100644 chromium/tools/gn/format_test_data/030.golden delete mode 100644 chromium/tools/gn/format_test_data/031.gn delete mode 100644 chromium/tools/gn/format_test_data/031.golden delete mode 100644 chromium/tools/gn/format_test_data/032.gn delete mode 100644 chromium/tools/gn/format_test_data/032.golden delete mode 100644 chromium/tools/gn/format_test_data/033.gn delete mode 100644 chromium/tools/gn/format_test_data/033.golden delete mode 100644 chromium/tools/gn/format_test_data/034.gn delete mode 100644 chromium/tools/gn/format_test_data/035.gn delete mode 100644 chromium/tools/gn/format_test_data/035.golden delete mode 100644 chromium/tools/gn/format_test_data/036.gn delete mode 100644 chromium/tools/gn/format_test_data/036.golden delete mode 100644 chromium/tools/gn/format_test_data/037.gn delete mode 100644 chromium/tools/gn/format_test_data/037.golden delete mode 100644 chromium/tools/gn/format_test_data/038.gn delete mode 100644 chromium/tools/gn/format_test_data/038.golden delete mode 100644 chromium/tools/gn/format_test_data/039.gn delete mode 100644 chromium/tools/gn/format_test_data/039.golden delete mode 100644 chromium/tools/gn/format_test_data/040.gn delete mode 100644 chromium/tools/gn/format_test_data/041.gn delete mode 100644 chromium/tools/gn/format_test_data/041.golden delete mode 100644 chromium/tools/gn/format_test_data/042.gn delete mode 100644 chromium/tools/gn/format_test_data/042.golden delete mode 100644 chromium/tools/gn/format_test_data/043.gn delete mode 100644 chromium/tools/gn/format_test_data/043.golden delete mode 100644 chromium/tools/gn/format_test_data/044.gn delete mode 100644 chromium/tools/gn/format_test_data/044.golden delete mode 100644 chromium/tools/gn/format_test_data/045.gn delete mode 100644 chromium/tools/gn/format_test_data/045.golden delete mode 100644 chromium/tools/gn/format_test_data/046.gn delete mode 100644 chromium/tools/gn/format_test_data/046.golden delete mode 100644 chromium/tools/gn/format_test_data/047.gn delete mode 100644 chromium/tools/gn/format_test_data/047.golden delete mode 100644 chromium/tools/gn/format_test_data/048.gn delete mode 100644 chromium/tools/gn/format_test_data/048.golden delete mode 100644 chromium/tools/gn/format_test_data/049.gn delete mode 100644 chromium/tools/gn/format_test_data/050.gn delete mode 100644 chromium/tools/gn/format_test_data/050.golden delete mode 100644 chromium/tools/gn/format_test_data/051.gn delete mode 100644 chromium/tools/gn/format_test_data/051.golden delete mode 100644 chromium/tools/gn/format_test_data/052.gn delete mode 100644 chromium/tools/gn/format_test_data/052.golden delete mode 100644 chromium/tools/gn/format_test_data/053.gn delete mode 100644 chromium/tools/gn/format_test_data/053.golden delete mode 100644 chromium/tools/gn/format_test_data/054.gn delete mode 100644 chromium/tools/gn/format_test_data/054.golden delete mode 100644 chromium/tools/gn/format_test_data/055.gn delete mode 100644 chromium/tools/gn/format_test_data/055.golden delete mode 100644 chromium/tools/gn/format_test_data/056.gn delete mode 100644 chromium/tools/gn/format_test_data/056.golden delete mode 100644 chromium/tools/gn/format_test_data/057.gn delete mode 100644 chromium/tools/gn/format_test_data/057.golden delete mode 100644 chromium/tools/gn/format_test_data/058.gn delete mode 100644 chromium/tools/gn/format_test_data/058.golden delete mode 100644 chromium/tools/gn/format_test_data/059.gn delete mode 100644 chromium/tools/gn/format_test_data/059.golden delete mode 100644 chromium/tools/gn/format_test_data/060.gn delete mode 100644 chromium/tools/gn/format_test_data/060.golden delete mode 100644 chromium/tools/gn/format_test_data/061.gn delete mode 100644 chromium/tools/gn/format_test_data/061.golden delete mode 100644 chromium/tools/gn/format_test_data/062.gn delete mode 100644 chromium/tools/gn/format_test_data/062.golden delete mode 100644 chromium/tools/gn/format_test_data/063.gn delete mode 100644 chromium/tools/gn/format_test_data/063.golden delete mode 100644 chromium/tools/gn/format_test_data/064.gn delete mode 100644 chromium/tools/gn/format_test_data/064.golden delete mode 100644 chromium/tools/gn/format_test_data/065.gn delete mode 100644 chromium/tools/gn/format_test_data/065.golden delete mode 100644 chromium/tools/gn/format_test_data/066.gn delete mode 100644 chromium/tools/gn/format_test_data/066.golden delete mode 100644 chromium/tools/gn/format_test_data/067.gn delete mode 100644 chromium/tools/gn/format_test_data/067.golden delete mode 100644 chromium/tools/gn/format_test_data/068.gn delete mode 100644 chromium/tools/gn/format_test_data/068.golden delete mode 100644 chromium/tools/gn/format_test_data/069.gn delete mode 100644 chromium/tools/gn/format_test_data/069.golden delete mode 100644 chromium/tools/gn/format_test_data/070.gn delete mode 100644 chromium/tools/gn/format_test_data/070.golden delete mode 100644 chromium/tools/gn/function_exec_script.cc delete mode 100644 chromium/tools/gn/function_foreach.cc delete mode 100644 chromium/tools/gn/function_foreach_unittest.cc delete mode 100644 chromium/tools/gn/function_forward_variables_from.cc delete mode 100644 chromium/tools/gn/function_forward_variables_from_unittest.cc delete mode 100644 chromium/tools/gn/function_get_label_info.cc delete mode 100644 chromium/tools/gn/function_get_label_info_unittest.cc delete mode 100644 chromium/tools/gn/function_get_path_info.cc delete mode 100644 chromium/tools/gn/function_get_path_info_unittest.cc delete mode 100644 chromium/tools/gn/function_get_target_outputs.cc delete mode 100644 chromium/tools/gn/function_get_target_outputs_unittest.cc delete mode 100644 chromium/tools/gn/function_process_file_template.cc delete mode 100644 chromium/tools/gn/function_process_file_template_unittest.cc delete mode 100644 chromium/tools/gn/function_read_file.cc delete mode 100644 chromium/tools/gn/function_rebase_path.cc delete mode 100644 chromium/tools/gn/function_rebase_path_unittest.cc delete mode 100644 chromium/tools/gn/function_set_default_toolchain.cc delete mode 100644 chromium/tools/gn/function_set_defaults.cc delete mode 100644 chromium/tools/gn/function_template.cc delete mode 100644 chromium/tools/gn/function_template_unittest.cc delete mode 100644 chromium/tools/gn/function_toolchain.cc delete mode 100644 chromium/tools/gn/function_toolchain_unittest.cc delete mode 100644 chromium/tools/gn/function_write_file.cc delete mode 100644 chromium/tools/gn/function_write_file_unittest.cc delete mode 100644 chromium/tools/gn/functions.cc delete mode 100644 chromium/tools/gn/functions.h delete mode 100644 chromium/tools/gn/functions_target.cc delete mode 100644 chromium/tools/gn/functions_target_unittest.cc delete mode 100644 chromium/tools/gn/functions_unittest.cc delete mode 100644 chromium/tools/gn/gn_main.cc delete mode 100644 chromium/tools/gn/group_target_generator.cc delete mode 100644 chromium/tools/gn/group_target_generator.h delete mode 100644 chromium/tools/gn/header_checker.cc delete mode 100644 chromium/tools/gn/header_checker.h delete mode 100644 chromium/tools/gn/header_checker_unittest.cc delete mode 100644 chromium/tools/gn/import_manager.cc delete mode 100644 chromium/tools/gn/import_manager.h delete mode 100644 chromium/tools/gn/inherited_libraries.cc delete mode 100644 chromium/tools/gn/inherited_libraries.h delete mode 100644 chromium/tools/gn/inherited_libraries_unittest.cc delete mode 100644 chromium/tools/gn/input_conversion.cc delete mode 100644 chromium/tools/gn/input_conversion.h delete mode 100644 chromium/tools/gn/input_conversion_unittest.cc delete mode 100644 chromium/tools/gn/input_file.cc delete mode 100644 chromium/tools/gn/input_file.h delete mode 100644 chromium/tools/gn/input_file_manager.cc delete mode 100644 chromium/tools/gn/input_file_manager.h delete mode 100644 chromium/tools/gn/item.cc delete mode 100644 chromium/tools/gn/item.h delete mode 100644 chromium/tools/gn/json_project_writer.cc delete mode 100644 chromium/tools/gn/json_project_writer.h delete mode 100644 chromium/tools/gn/label.cc delete mode 100644 chromium/tools/gn/label.h delete mode 100644 chromium/tools/gn/label_pattern.cc delete mode 100644 chromium/tools/gn/label_pattern.h delete mode 100644 chromium/tools/gn/label_pattern_unittest.cc delete mode 100644 chromium/tools/gn/label_ptr.h delete mode 100644 chromium/tools/gn/label_unittest.cc delete mode 100644 chromium/tools/gn/last_commit_position.py delete mode 100644 chromium/tools/gn/lib_file.cc delete mode 100644 chromium/tools/gn/lib_file.h delete mode 100644 chromium/tools/gn/loader.cc delete mode 100644 chromium/tools/gn/loader.h delete mode 100644 chromium/tools/gn/loader_unittest.cc delete mode 100644 chromium/tools/gn/location.cc delete mode 100644 chromium/tools/gn/location.h delete mode 100644 chromium/tools/gn/misc/OWNERS delete mode 100644 chromium/tools/gn/misc/emacs/gn-mode.el delete mode 100644 chromium/tools/gn/misc/tm/GN.tmLanguage delete mode 100644 chromium/tools/gn/misc/tm/GN.tmPreferences delete mode 100644 chromium/tools/gn/misc/vim/README.chromium delete mode 100644 chromium/tools/gn/misc/vim/autoload/gn.vim delete mode 100644 chromium/tools/gn/misc/vim/ftdetect/gnfiletype.vim delete mode 100644 chromium/tools/gn/misc/vim/ftplugin/gn.vim delete mode 100644 chromium/tools/gn/misc/vim/syntax/gn.vim delete mode 100644 chromium/tools/gn/ninja_action_target_writer.cc delete mode 100644 chromium/tools/gn/ninja_action_target_writer.h delete mode 100644 chromium/tools/gn/ninja_action_target_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_binary_target_writer.cc delete mode 100644 chromium/tools/gn/ninja_binary_target_writer.h delete mode 100644 chromium/tools/gn/ninja_binary_target_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_build_writer.cc delete mode 100644 chromium/tools/gn/ninja_build_writer.h delete mode 100644 chromium/tools/gn/ninja_build_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_bundle_data_target_writer.cc delete mode 100644 chromium/tools/gn/ninja_bundle_data_target_writer.h delete mode 100644 chromium/tools/gn/ninja_bundle_data_target_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_copy_target_writer.cc delete mode 100644 chromium/tools/gn/ninja_copy_target_writer.h delete mode 100644 chromium/tools/gn/ninja_copy_target_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_create_bundle_target_writer.cc delete mode 100644 chromium/tools/gn/ninja_create_bundle_target_writer.h delete mode 100644 chromium/tools/gn/ninja_create_bundle_target_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_group_target_writer.cc delete mode 100644 chromium/tools/gn/ninja_group_target_writer.h delete mode 100644 chromium/tools/gn/ninja_group_target_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_target_writer.cc delete mode 100644 chromium/tools/gn/ninja_target_writer.h delete mode 100644 chromium/tools/gn/ninja_target_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_toolchain_writer.cc delete mode 100644 chromium/tools/gn/ninja_toolchain_writer.h delete mode 100644 chromium/tools/gn/ninja_toolchain_writer_unittest.cc delete mode 100644 chromium/tools/gn/ninja_utils.cc delete mode 100644 chromium/tools/gn/ninja_utils.h delete mode 100644 chromium/tools/gn/ninja_writer.cc delete mode 100644 chromium/tools/gn/ninja_writer.h delete mode 100644 chromium/tools/gn/operators.cc delete mode 100644 chromium/tools/gn/operators.h delete mode 100644 chromium/tools/gn/operators_unittest.cc delete mode 100644 chromium/tools/gn/ordered_set.h delete mode 100644 chromium/tools/gn/output_file.cc delete mode 100644 chromium/tools/gn/output_file.h delete mode 100644 chromium/tools/gn/parse_node_value_adapter.cc delete mode 100644 chromium/tools/gn/parse_node_value_adapter.h delete mode 100644 chromium/tools/gn/parse_tree.cc delete mode 100644 chromium/tools/gn/parse_tree.h delete mode 100644 chromium/tools/gn/parse_tree_unittest.cc delete mode 100644 chromium/tools/gn/parser.cc delete mode 100644 chromium/tools/gn/parser.h delete mode 100644 chromium/tools/gn/parser_fuzzer.cc delete mode 100644 chromium/tools/gn/parser_unittest.cc delete mode 100644 chromium/tools/gn/path_output.cc delete mode 100644 chromium/tools/gn/path_output.h delete mode 100644 chromium/tools/gn/path_output_unittest.cc delete mode 100644 chromium/tools/gn/pattern.cc delete mode 100644 chromium/tools/gn/pattern.h delete mode 100644 chromium/tools/gn/pattern_unittest.cc delete mode 100644 chromium/tools/gn/pool.cc delete mode 100644 chromium/tools/gn/pool.h delete mode 100644 chromium/tools/gn/qt_creator_writer.cc delete mode 100644 chromium/tools/gn/qt_creator_writer.h delete mode 100644 chromium/tools/gn/runtime_deps.cc delete mode 100644 chromium/tools/gn/runtime_deps.h delete mode 100644 chromium/tools/gn/runtime_deps_unittest.cc delete mode 100644 chromium/tools/gn/scheduler.cc delete mode 100644 chromium/tools/gn/scheduler.h delete mode 100644 chromium/tools/gn/scope.cc delete mode 100644 chromium/tools/gn/scope.h delete mode 100644 chromium/tools/gn/scope_per_file_provider.cc delete mode 100644 chromium/tools/gn/scope_per_file_provider.h delete mode 100644 chromium/tools/gn/scope_per_file_provider_unittest.cc delete mode 100644 chromium/tools/gn/scope_unittest.cc delete mode 100644 chromium/tools/gn/settings.cc delete mode 100644 chromium/tools/gn/settings.h delete mode 100644 chromium/tools/gn/setup.cc delete mode 100644 chromium/tools/gn/setup.h delete mode 100644 chromium/tools/gn/source_dir.cc delete mode 100644 chromium/tools/gn/source_dir.h delete mode 100644 chromium/tools/gn/source_dir_unittest.cc delete mode 100644 chromium/tools/gn/source_file.cc delete mode 100644 chromium/tools/gn/source_file.h delete mode 100644 chromium/tools/gn/source_file_type.cc delete mode 100644 chromium/tools/gn/source_file_type.h delete mode 100644 chromium/tools/gn/source_file_unittest.cc delete mode 100644 chromium/tools/gn/standard_out.cc delete mode 100644 chromium/tools/gn/standard_out.h delete mode 100644 chromium/tools/gn/string_utils.cc delete mode 100644 chromium/tools/gn/string_utils.h delete mode 100644 chromium/tools/gn/string_utils_unittest.cc delete mode 100644 chromium/tools/gn/substitution_list.cc delete mode 100644 chromium/tools/gn/substitution_list.h delete mode 100644 chromium/tools/gn/substitution_pattern.cc delete mode 100644 chromium/tools/gn/substitution_pattern.h delete mode 100644 chromium/tools/gn/substitution_pattern_unittest.cc delete mode 100644 chromium/tools/gn/substitution_type.cc delete mode 100644 chromium/tools/gn/substitution_type.h delete mode 100644 chromium/tools/gn/substitution_writer.cc delete mode 100644 chromium/tools/gn/substitution_writer.h delete mode 100644 chromium/tools/gn/substitution_writer_unittest.cc delete mode 100644 chromium/tools/gn/switches.cc delete mode 100644 chromium/tools/gn/switches.h delete mode 100644 chromium/tools/gn/target.cc delete mode 100644 chromium/tools/gn/target.h delete mode 100644 chromium/tools/gn/target_generator.cc delete mode 100644 chromium/tools/gn/target_generator.h delete mode 100644 chromium/tools/gn/target_unittest.cc delete mode 100644 chromium/tools/gn/template.cc delete mode 100644 chromium/tools/gn/template.h delete mode 100644 chromium/tools/gn/template_unittest.cc delete mode 100644 chromium/tools/gn/test_with_scheduler.cc delete mode 100644 chromium/tools/gn/test_with_scheduler.h delete mode 100644 chromium/tools/gn/test_with_scope.cc delete mode 100644 chromium/tools/gn/test_with_scope.h delete mode 100644 chromium/tools/gn/token.cc delete mode 100644 chromium/tools/gn/token.h delete mode 100644 chromium/tools/gn/tokenizer.cc delete mode 100644 chromium/tools/gn/tokenizer.h delete mode 100644 chromium/tools/gn/tokenizer_unittest.cc delete mode 100644 chromium/tools/gn/tool.cc delete mode 100644 chromium/tools/gn/tool.h delete mode 100644 chromium/tools/gn/toolchain.cc delete mode 100644 chromium/tools/gn/toolchain.h delete mode 100644 chromium/tools/gn/trace.cc delete mode 100644 chromium/tools/gn/trace.h delete mode 100644 chromium/tools/gn/tutorial/hello.cc delete mode 100644 chromium/tools/gn/tutorial/hello.h delete mode 100644 chromium/tools/gn/tutorial/hello_world.cc delete mode 100644 chromium/tools/gn/tutorial/say_hello.cc delete mode 100644 chromium/tools/gn/unique_vector.h delete mode 100644 chromium/tools/gn/unique_vector_unittest.cc delete mode 100644 chromium/tools/gn/value.cc delete mode 100644 chromium/tools/gn/value.h delete mode 100644 chromium/tools/gn/value_extractors.cc delete mode 100644 chromium/tools/gn/value_extractors.h delete mode 100644 chromium/tools/gn/value_unittest.cc delete mode 100644 chromium/tools/gn/variables.cc delete mode 100644 chromium/tools/gn/variables.h delete mode 100644 chromium/tools/gn/visibility.cc delete mode 100644 chromium/tools/gn/visibility.h delete mode 100644 chromium/tools/gn/visibility_unittest.cc delete mode 100644 chromium/tools/gn/visual_studio_utils.cc delete mode 100644 chromium/tools/gn/visual_studio_utils.h delete mode 100644 chromium/tools/gn/visual_studio_utils_unittest.cc delete mode 100644 chromium/tools/gn/visual_studio_writer.cc delete mode 100644 chromium/tools/gn/visual_studio_writer.h delete mode 100644 chromium/tools/gn/visual_studio_writer_unittest.cc delete mode 100644 chromium/tools/gn/xcode_object.cc delete mode 100644 chromium/tools/gn/xcode_object.h delete mode 100644 chromium/tools/gn/xcode_object_unittest.cc delete mode 100644 chromium/tools/gn/xcode_writer.cc delete mode 100644 chromium/tools/gn/xcode_writer.h delete mode 100644 chromium/tools/gn/xml_element_writer.cc delete mode 100644 chromium/tools/gn/xml_element_writer.h delete mode 100644 chromium/tools/gn/xml_element_writer_unittest.cc delete mode 100755 chromium/tools/polymer/generate_compiled_resources_gyp.py create mode 100644 chromium/tools/variations/unittest_data/DEPS (limited to 'chromium/tools') diff --git a/chromium/tools/accessibility/inspect/README.md b/chromium/tools/accessibility/inspect/README.md index af2ee4e969b..a4efc2a7410 100644 --- a/chromium/tools/accessibility/inspect/README.md +++ b/chromium/tools/accessibility/inspect/README.md @@ -1,9 +1,35 @@ -# ax_dump_events +# Inspect tools + +Note: please see convenience scripts section, it makes the tools much easier to use. + +## ax_dump_events This tool helps monitor accessibility events. It currently works on Windows, and Mac is TBD. -Events are currently dumped to the console. To use it, run +Events are dumped to the console. To use it, run `ax_dump_events --pid=[processid]` -Press Ctrl+C to quit. \ No newline at end of file +Press Ctrl+C to quit. + +## ax_dump_tree + +This tool helps monitor accessibility events. It currently works on Windows, +and Mac is TBD. + +Events are dumped to the console. To use it, run +`ax_dump_events --window=[hwnd]` + +Notes: +* To use a hex window handle prefix it with `0x`. +* For json output, use the `--json` option +* To filter certain properties, use `--filters=[path-to-filters.txt]` where the filters text file has a series of `@ALLOW` and/or `@DENY` lines. See example-tree-filters.txt in tools/accessibility/inspect. + +## Convenience PowerShell scripts + +Run these scripts to avoid the difficulty of looking up the process id or window handle you want to inspect. +Sometimes there may be several windows open for the given app, and disambuation. In this case, after you run the script, it will list top level windows/processes and ask you to re-run with an argument that includes a substring from the window title you want to inspect the tree/events for. For example, `chrome-tree live` will inspect a tab with the name "Live region tests" (the title matcher is case insensitive). + +* chrome-tree and chrome-events for official Google Chrome (has 'Google Chrome' in the window title) +* chromium-tree and chromium-events for Chrome you built yourself (has 'Chromium' in the window title) +* ff-tree and ff-events for Firefox diff --git a/chromium/tools/accessibility/inspect/ax_dump_events.cc b/chromium/tools/accessibility/inspect/ax_dump_events.cc index fbd77a23f79..c111eecaae3 100644 --- a/chromium/tools/accessibility/inspect/ax_dump_events.cc +++ b/chromium/tools/accessibility/inspect/ax_dump_events.cc @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include #include #include "base/at_exit.h" @@ -35,6 +36,8 @@ int main(int argc, char** argv) { std::unique_ptr server( new content::AXEventServer(pid)); } + } else { + std::cout << "* Error: No process id provided via --pid=[process-id]."; } return 0; diff --git a/chromium/tools/accessibility/inspect/ax_dump_tree.cc b/chromium/tools/accessibility/inspect/ax_dump_tree.cc index 7eb549a89e6..4888634ea3d 100644 --- a/chromium/tools/accessibility/inspect/ax_dump_tree.cc +++ b/chromium/tools/accessibility/inspect/ax_dump_tree.cc @@ -2,6 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include #include #include "base/at_exit.h" @@ -9,7 +10,6 @@ #include "base/strings/string_number_conversions.h" #include "tools/accessibility/inspect/ax_tree_server.h" -char kPidSwitch[] = "pid"; char kWindowSwitch[] = "window"; char kFiltersSwitch[] = "filters"; char kJsonSwitch[] = "json"; @@ -48,16 +48,9 @@ int main(int argc, char** argv) { new content::AXTreeServer(widget, filters_path, use_json)); return 0; } - } - std::string pid_str = - base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(kPidSwitch); - if (!pid_str.empty()) { - int pid; - if (StringToInt(pid_str, &pid)) { - base::ProcessId process_id = static_cast(pid); - std::unique_ptr server( - new content::AXTreeServer(process_id, filters_path, use_json)); - } + } else { + std::cout + << "* Error: No window handle provided via --window=[window-handle]."; } return 0; } diff --git a/chromium/tools/accessibility/inspect/ax_tree_server.cc b/chromium/tools/accessibility/inspect/ax_tree_server.cc index 89d0501f281..7cfcf97b6c2 100644 --- a/chromium/tools/accessibility/inspect/ax_tree_server.cc +++ b/chromium/tools/accessibility/inspect/ax_tree_server.cc @@ -35,7 +35,7 @@ AXTreeServer::AXTreeServer(base::ProcessId pid, formatter->BuildAccessibilityTreeForProcess(pid); if (!dict) { - std::cout << "Failed to get accessibility tree"; + std::cout << "Error: Failed to get accessibility tree"; return; } diff --git a/chromium/tools/accessibility/inspect/chrome-events.ps1 b/chromium/tools/accessibility/inspect/chrome-events.ps1 new file mode 100644 index 00000000000..a43c726a8f4 --- /dev/null +++ b/chromium/tools/accessibility/inspect/chrome-events.ps1 @@ -0,0 +1,12 @@ +# Powershell script to dump accessibility events for Chrome. Takes optional first argument with part of window title to disambiguate the desired process. +$all = ps | where {$_.ProcessName -eq 'chrome'} |where MainWindowTitle -like "*$($args[0])*chrome" | select id, MainWindowTitle +echo $all +echo "" +If (@($all).length -gt 1) { + echo "Multiple matching processes, please disambuate: include part of the desired window's title as a first argument." + exit +} +$id = ps | where {$_.ProcessName -eq 'chrome'} | where MainWindowTitle -like "*$($args[0])*chrome*" | select id -ExpandProperty id | Out-String +$id_arg = "--pid=" + $id +$exe = ".\ax_dump_events.exe" +& $exe $id_arg diff --git a/chromium/tools/accessibility/inspect/chrome-tree.ps1 b/chromium/tools/accessibility/inspect/chrome-tree.ps1 new file mode 100644 index 00000000000..c0bb8a709f0 --- /dev/null +++ b/chromium/tools/accessibility/inspect/chrome-tree.ps1 @@ -0,0 +1,13 @@ +# Powershell script to dump accessibility tree for Chrome. Takes optional first argument with part of window title to disambiguate the desired window. +$all = ps | where {$_.ProcessName -eq 'chrome'} |where MainWindowTitle -like "*$($args[0])*Google Chrome" | select MainWindowHandle, MainWindowTitle +echo $all +echo "" +If (@($all).length -gt 1) { + echo "Multiple matching windows, please disambuate: include part of the desired window's title as a first argument." + exit +} + +$hwnd = Get-Process | Where-Object {$_.ProcessName -eq 'chrome'} | where MainWindowTitle -like "*$($args[0])*Google Chrome*" | select MainWindowHandle -ExpandProperty MainWindowHandle | Out-String +$hwnd_arg = "--window=" + $hwnd +$exe = ".\ax_dump_tree.exe" +& $exe $hwnd_arg \ No newline at end of file diff --git a/chromium/tools/accessibility/inspect/chromium-events.ps1 b/chromium/tools/accessibility/inspect/chromium-events.ps1 new file mode 100644 index 00000000000..ad1a8d6063e --- /dev/null +++ b/chromium/tools/accessibility/inspect/chromium-events.ps1 @@ -0,0 +1,12 @@ +# Powershell script to dump accessibility events for Chromium. Takes optional first argument with part of window title to disambiguate the desired process. +$all = ps | where {$_.ProcessName -eq 'chrome'} |where MainWindowTitle -like "*$($args[0])*chromium" | select id, MainWindowTitle +echo $all +echo "" +If (@($all).length -gt 1) { + echo "Multiple matching processes, please disambuate: include part of the desired window's title as a first argument." + exit +} +$id = ps | where {$_.ProcessName -eq 'chrome'} | where MainWindowTitle -like "*$($args[0])*chromium*" | select id -ExpandProperty id | Out-String +$id_arg = "--pid=" + $id +$exe = ".\ax_dump_events.exe" +& $exe $id_arg diff --git a/chromium/tools/accessibility/inspect/chromium-tree.ps1 b/chromium/tools/accessibility/inspect/chromium-tree.ps1 new file mode 100644 index 00000000000..8c77fff09c9 --- /dev/null +++ b/chromium/tools/accessibility/inspect/chromium-tree.ps1 @@ -0,0 +1,13 @@ +# Powershell script to dump accessibility tree for Chromium. Takes optional first argument with part of window title to disambiguate the desired window. +$all = ps | where {$_.ProcessName -eq 'chrome'} |where MainWindowTitle -like "*$($args[0])*Google Chrome" | select MainWindowHandle, MainWindowTitle +echo $all +echo "" +If (@($all).length -gt 1) { + echo "Multiple matching windows, please disambuate: include part of the desired window's title as a first argument." + exit +} + +$hwnd = Get-Process | where {$_.ProcessName -eq 'chrome'} | where MainWindowTitle -like "*$($args[0])*Chromium*" | select MainWindowHandle -ExpandProperty MainWindowHandle | Out-String +$hwnd_arg = "--window=" + $hwnd +$exe = ".\ax_dump_tree.exe" +& $exe $hwnd_arg \ No newline at end of file diff --git a/chromium/tools/accessibility/inspect/example-tree-filters.txt b/chromium/tools/accessibility/inspect/example-tree-filters.txt new file mode 100644 index 00000000000..1a27b52bca6 --- /dev/null +++ b/chromium/tools/accessibility/inspect/example-tree-filters.txt @@ -0,0 +1,8 @@ +# Sample filters to use with ax_dump_tree, e.g. with --filters=[path-to-filters.txt] +@ALLOW:role* +@ALLOW:name* +@ALLOW:rowtext* +@ALLOW:coltext* +@ALLOW:font* +@DENY:font-family* +@DENY:font-size* diff --git a/chromium/tools/accessibility/inspect/ff-events.ps1 b/chromium/tools/accessibility/inspect/ff-events.ps1 new file mode 100644 index 00000000000..b6ecae3e9ff --- /dev/null +++ b/chromium/tools/accessibility/inspect/ff-events.ps1 @@ -0,0 +1,5 @@ +# Powershell script to dump entire accessibility tree for Firefox, all windows + tabs. +$id = ps | where Processname -like firefox | where MainWindowTitle -like "*Firefox" | select id -ExpandProperty id | Out-String +$id_arg = "--pid=" + $id +$exe = ".\ax_dump_events.exe" +& $exe $id_arg \ No newline at end of file diff --git a/chromium/tools/accessibility/inspect/ff-tree.ps1 b/chromium/tools/accessibility/inspect/ff-tree.ps1 new file mode 100644 index 00000000000..999c239171c --- /dev/null +++ b/chromium/tools/accessibility/inspect/ff-tree.ps1 @@ -0,0 +1,5 @@ +# Powershell script to dump entire accessibility tree for Firefox, all windows + tabs. +$hwnd = Get-Process | Where-Object {$_.ProcessName -eq 'firefox'} | where MainWindowTitle -like "*Firefox" | select MainWindowHandle -ExpandProperty MainWindowHandle | Out-String +$hwnd_arg = "--window=" + $hwnd +$exe = ".\ax_dump_tree.exe" +& $exe $hwnd_arg \ No newline at end of file diff --git a/chromium/tools/accessibility/rebase_dump_accessibility_tree_test.py b/chromium/tools/accessibility/rebase_dump_accessibility_tree_test.py index bdcfaf325c2..92c7f3f2a50 100755 --- a/chromium/tools/accessibility/rebase_dump_accessibility_tree_test.py +++ b/chromium/tools/accessibility/rebase_dump_accessibility_tree_test.py @@ -111,18 +111,18 @@ def Run(): #print(json.dumps(data, indent=4)) for builder in data: - #print builder['result'] + print builder['builder_name'], builder['result'] if builder['result'] == 'FAILURE': - url = builder['url'] - tokens = url.split('/') - if len(tokens) < 9: - continue - bucket = tokens[4] - platform = tokens[6] - build = tokens[8] - logdog_prefix = 'chromium/bb/%s/%s/%s' % (bucket, platform, build) - logdog_steps = '%s/+/recipes/steps/**' % logdog_prefix - logdog_query = 'cit logdog query -results 999 -path "%s"' % logdog_steps + logdog_tokens = [ + 'chromium', + 'buildbucket', + 'cr-buildbucket.appspot.com', + builder['buildbucket_id'], + '+', + 'steps', + '**'] + logdog_path = '/'.join(logdog_tokens) + logdog_query = 'cit logdog query -results 999 -path "%s"' % logdog_path print (BRIGHT_COLOR + '=> %s' + NORMAL_COLOR) % logdog_query steps = os.popen(logdog_query).readlines() a11y_step = None diff --git a/chromium/tools/android/BUILD.gn b/chromium/tools/android/BUILD.gn index 3fdc092dea4..23279725c68 100644 --- a/chromium/tools/android/BUILD.gn +++ b/chromium/tools/android/BUILD.gn @@ -71,3 +71,10 @@ group("push_apps_to_background") { "//tools/android/push_apps_to_background:push_apps_to_background_apk", ] } + +group("io_benchmark") { + deps = [ + "//tools/android/io_benchmark", + ] + testonly = true +} diff --git a/chromium/tools/android/forwarder2/BUILD.gn b/chromium/tools/android/forwarder2/BUILD.gn index b674344d6dc..c77915a776b 100644 --- a/chromium/tools/android/forwarder2/BUILD.gn +++ b/chromium/tools/android/forwarder2/BUILD.gn @@ -43,7 +43,6 @@ if (current_toolchain == default_toolchain) { ] deps = [ "//base", - "//build/config:exe_and_shlib_deps", "//tools/android/common", ] data_deps = [ @@ -86,7 +85,6 @@ if (current_toolchain != default_toolchain) { ] deps = [ "//base", - "//build/config:exe_and_shlib_deps", "//tools/android/common", ] } diff --git a/chromium/tools/android/io_benchmark/BUILD.gn b/chromium/tools/android/io_benchmark/BUILD.gn new file mode 100644 index 00000000000..f8d406314a7 --- /dev/null +++ b/chromium/tools/android/io_benchmark/BUILD.gn @@ -0,0 +1,14 @@ +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +executable("io_benchmark") { + sources = [ + "io_benchmark.cc", + ] + deps = [ + "//base", + "//base/test:test_support", + ] + testonly = true +} diff --git a/chromium/tools/android/md5sum/BUILD.gn b/chromium/tools/android/md5sum/BUILD.gn index ef8e6db414e..36a81e08d49 100644 --- a/chromium/tools/android/md5sum/BUILD.gn +++ b/chromium/tools/android/md5sum/BUILD.gn @@ -21,15 +21,7 @@ executable("md5sum_bin") { ] deps = [ "//base", - "//build/config:exe_and_shlib_deps", ] - - # md5sum uses //base, and is built when chrome_apk is. As a consequence, - # it references the instrumentation function, meaning that //tools/cygprofile - # is required to link. - if (is_android && use_order_profiling) { - deps += [ "//tools/cygprofile" ] - } } if (current_toolchain == default_toolchain) { diff --git a/chromium/tools/android/memdump/BUILD.gn b/chromium/tools/android/memdump/BUILD.gn index f047218b987..dafc7ec1d6f 100644 --- a/chromium/tools/android/memdump/BUILD.gn +++ b/chromium/tools/android/memdump/BUILD.gn @@ -8,6 +8,5 @@ executable("memdump") { ] deps = [ "//base", - "//build/config:exe_and_shlib_deps", ] } diff --git a/chromium/tools/battor_agent/BUILD.gn b/chromium/tools/battor_agent/BUILD.gn index 5339b67f3ce..f1f9f17db32 100644 --- a/chromium/tools/battor_agent/BUILD.gn +++ b/chromium/tools/battor_agent/BUILD.gn @@ -14,7 +14,6 @@ executable("battor_agent") { deps = [ ":battor_agent_lib", "//base", - "//build/config:exe_and_shlib_deps", "//build/win:default_exe_manifest", ] } diff --git a/chromium/tools/battor_agent/battor_connection_impl_unittest.cc b/chromium/tools/battor_agent/battor_connection_impl_unittest.cc index 2ce35ca9cb4..eddce4d707e 100644 --- a/chromium/tools/battor_agent/battor_connection_impl_unittest.cc +++ b/chromium/tools/battor_agent/battor_connection_impl_unittest.cc @@ -201,7 +201,16 @@ TEST_F(BattOrConnectionImplTest, FlushConnectionSucceedsOnlyAfterTimeout) { ASSERT_TRUE(GetFlushSuccess()); } -TEST_F(BattOrConnectionImplTest, FlushConnectionFlushesAlreadyReadBuffer) { +#if defined(ADDRESS_SANITIZER) +// https://crbug.com/843729 +#define MAYBE_FlushConnectionFlushesAlreadyReadBuffer \ + DISABLED_FlushConnectionFlushesAlreadyReadBuffer +#else +#define MAYBE_FlushConnectionFlushesAlreadyReadBuffer \ + FlushConnectionFlushesAlreadyReadBuffer +#endif +TEST_F(BattOrConnectionImplTest, + MAYBE_FlushConnectionFlushesAlreadyReadBuffer) { OpenConnection(); ASSERT_TRUE(IsOpenComplete()); ASSERT_TRUE(GetOpenSuccess()); @@ -263,8 +272,16 @@ TEST_F(BattOrConnectionImplTest, FlushConnectionNewBytesRestartQuietPeriod) { ASSERT_TRUE(IsFlushComplete()); } +#if defined(ADDRESS_SANITIZER) +// https://crbug.com/843729 +#define MAYBE_FlushConnectionFlushesBytesReceivedInQuietPeriod \ + DISABLED_FlushConnectionFlushesBytesReceivedInQuietPeriod +#else +#define MAYBE_FlushConnectionFlushesBytesReceivedInQuietPeriod \ + FlushConnectionFlushesBytesReceivedInQuietPeriod +#endif TEST_F(BattOrConnectionImplTest, - FlushConnectionFlushesBytesReceivedInQuietPeriod) { + MAYBE_FlushConnectionFlushesBytesReceivedInQuietPeriod) { OpenConnection(); ASSERT_TRUE(IsOpenComplete()); ASSERT_TRUE(GetOpenSuccess()); @@ -311,7 +328,13 @@ TEST_F(BattOrConnectionImplTest, FlushConnectionFlushesMultipleReadsOfData) { ASSERT_TRUE(GetReadSuccess()); } -TEST_F(BattOrConnectionImplTest, FlushIncompleteBeforeTimeout) { +#if defined(ADDRESS_SANITIZER) +// https://crbug.com/843729 +#define MAYBE_FlushIncompleteBeforeTimeout DISABLED_FlushIncompleteBeforeTimeout +#else +#define MAYBE_FlushIncompleteBeforeTimeout FlushIncompleteBeforeTimeout +#endif +TEST_F(BattOrConnectionImplTest, MAYBE_FlushIncompleteBeforeTimeout) { OpenConnection(); ASSERT_TRUE(IsOpenComplete()); ASSERT_TRUE(GetOpenSuccess()); diff --git a/chromium/tools/binary_size/BUILD.gn b/chromium/tools/binary_size/BUILD.gn index fb3ded94fdb..55e574acba5 100644 --- a/chromium/tools/binary_size/BUILD.gn +++ b/chromium/tools/binary_size/BUILD.gn @@ -10,5 +10,6 @@ python_library("binary_size_trybot_py") { pydeps_file = "supersize.pydeps" data = [ "diagnose_bloat.py", + "trybot_commit_size_checker.py", ] } diff --git a/chromium/tools/binary_size/README.md b/chromium/tools/binary_size/README.md index 8a98fe7b5d1..2d4ed3e38d5 100644 --- a/chromium/tools/binary_size/README.md +++ b/chromium/tools/binary_size/README.md @@ -232,16 +232,34 @@ tools/binary_size/supersize archive chrome.size --elf-file out/Release/chrome -v Creates an interactive size breakdown (by source path) as a stand-alone html report. -Example output: https://agrieve.github.io/chrome/ +Example output: https://notwoods.github.io/chrome-supersize-reports/ Example Usage: ``` bash -tools/binary_size/supersize html_report chrome.size --report-dir size-report -v -xdg-open size-report/index.html +# Creates the data file ./report.ndjson, generated based on ./chrome.size +tools/binary_size/supersize html_report chrome.size --report-file report.ndjson -v -# Report showing Dex method counts rather than binary size: -tools/binary_size/supersize html_report chrome.size --report-dir size-report -v --method-count +# Includes every symbol in the data file, although it will take longer to load. +tools/binary_size/supersize html_report chrome.size --report-file report.ndjson --all-symbols + +# Create a data file showing a diff between two .size files. +tools/binary_size/supersize html_report after.size --diff-with before.size --report-file report.ndjson +``` + +### Usage: start_server + +Locally view the data file generated by `html_report`, by starting a web server +that links to a data file. + +Example Usage: + +``` bash +# Starts a local server to view the data in ./report.ndjson +tools/binary_size/supersize start_server report.ndjson + +# Set a custom address and port. +tools/binary_size/supersize start_server report.ndjson -a localhost -p 8080 ``` ### Usage: diff @@ -297,10 +315,6 @@ Example session: 1. More `console` features: * Add `SplitByName()` - Like `GroupByName()`, but recursive. * A canned query, that does what ShowGlobals does (as described in [Windows Binary Sizes](https://www.chromium.org/developers/windows-binary-sizes)). -1. More `html_report` features: - * Able to render size diffs (tint negative size red). - * Break down by other groupings (Create from result of `SplitByName()`) - * Render as simple tree view rather than 2d boxes 1. Integrate with `resource_sizes.py` so that it tracks size of major components separately: chrome vs blink vs skia vs v8. 1. Add dependency graph info, perhaps just on a per-file basis. diff --git a/chromium/tools/binary_size/diagnose_bloat.py b/chromium/tools/binary_size/diagnose_bloat.py index ff201f38a58..ed69755367e 100755 --- a/chromium/tools/binary_size/diagnose_bloat.py +++ b/chromium/tools/binary_size/diagnose_bloat.py @@ -36,6 +36,10 @@ _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'binary-size-build') _BINARY_SIZE_DIR = os.path.join(_SRC_ROOT, 'tools', 'binary_size') _RESOURCE_SIZES_PATH = os.path.join( _SRC_ROOT, 'build', 'android', 'resource_sizes.py') +_LLVM_TOOLS_DIR = os.path.join( + _SRC_ROOT, 'third_party', 'llvm-build', 'Release+Asserts', 'bin') +_DOWNLOAD_OBJDUMP_PATH = os.path.join( + _SRC_ROOT, 'tools', 'clang', 'scripts', 'download_objdump.py') _DiffResult = collections.namedtuple('DiffResult', ['name', 'value', 'units']) @@ -62,7 +66,8 @@ class BaseDiff(object): @property def summary_stat(self): - return None + """Returns a tuple of (name, value, units) for the most important metric.""" + raise NotImplementedError() def Summary(self): """A short description that summarizes the source of binary size bloat.""" @@ -85,7 +90,7 @@ class BaseDiff(object): class NativeDiff(BaseDiff): # E.g.: Section Sizes (Total=1.2 kb (1222 bytes)): _RE_SUMMARY_STAT = re.compile( - r'Section Sizes \(Total=(?P\d+) ?(?P\w+)') + r'Section Sizes \(Total=(?P-?[0-9\.]+) ?(?P\w+)') _SUMMARY_STAT_NAME = 'Native Library Delta' def __init__(self, size_name, supersize_path): @@ -100,7 +105,7 @@ class NativeDiff(BaseDiff): if m: return _DiffResult( NativeDiff._SUMMARY_STAT_NAME, m.group('value'), m.group('units')) - return None + raise Exception('Could not extract total from:\n' + self._diff) def DetailedResults(self): return self._diff.splitlines() @@ -116,8 +121,10 @@ class NativeDiff(BaseDiff): class ResourceSizesDiff(BaseDiff): + # Ordered by output appearance. _SUMMARY_SECTIONS = ( - 'Breakdown', 'Dex', 'Specifics', 'StaticInitializersCount') + 'Specifics', 'InstallSize', 'InstallBreakdown', 'Dex', + 'StaticInitializersCount') # Sections where it makes sense to sum subsections into a section total. _AGGREGATE_SECTIONS = ( 'InstallBreakdown', 'Breakdown', 'MainLibInfo', 'Uncompressed') @@ -135,13 +142,18 @@ class ResourceSizesDiff(BaseDiff): if 'normalized' in subsection_name: full_name = '{} {}'.format(section_name, subsection_name) return _DiffResult(full_name, value, units) - return None + raise Exception('Could not find "normalized" in: ' + repr(self._diff)) def DetailedResults(self): return self._ResultLines() def Summary(self): - return self._ResultLines( + header_lines = [ + 'For an explanation of these metrics, see:', + ('https://chromium.googlesource.com/chromium/src/+/master/docs/speed/' + 'binary_size/metrics.md#Metrics-for-Android'), + ''] + return header_lines + self._ResultLines( include_sections=ResourceSizesDiff._SUMMARY_SECTIONS) def ProduceDiff(self, before_dir, after_dir): @@ -165,7 +177,7 @@ class ResourceSizesDiff(BaseDiff): def _ResultLines(self, include_sections=None): """Generates diff lines for the specified sections (defaults to all).""" - ret = [] + section_lines = collections.defaultdict(list) for section_name, section_results in self._diff.iteritems(): section_no_target = re.sub(r'^.*_', '', section_name) if not include_sections or section_no_target in include_sections: @@ -177,16 +189,20 @@ class ResourceSizesDiff(BaseDiff): if value == 0 and include_sections: continue section_sum += value - subsection_lines.append('{:>+10,} {} {}'.format(value, units, name)) - section_header = section_name + subsection_lines.append('{:>+14,} {} {}'.format(value, units, name)) + section_header = section_no_target if section_no_target in ResourceSizesDiff._AGGREGATE_SECTIONS: section_header += ' ({:+,} {})'.format(section_sum, units) + section_header += ':' # Omit sections with empty subsections. if subsection_lines: - ret.append(section_header) - ret.extend(subsection_lines) - if not ret: - ret = ['Empty ' + self.name] + section_lines[section_no_target].append(section_header) + section_lines[section_no_target].extend(subsection_lines) + if not section_lines: + return ['Empty ' + self.name] + ret = [] + for k in include_sections or sorted(section_lines): + ret.extend(section_lines[k]) return ret def _LoadResults(self, archive_dir): @@ -354,7 +370,7 @@ class _BuildArchive(object): self._slow_options = slow_options self._save_unstripped = save_unstripped - def ArchiveBuildResults(self, supersize_path): + def ArchiveBuildResults(self, supersize_path, tool_prefix=None): """Save build artifacts necessary for diffing.""" logging.info('Saving build results to: %s', self.dir) _EnsureDirsExist(self.dir) @@ -362,7 +378,7 @@ class _BuildArchive(object): self._ArchiveFile(self.build.abs_apk_path) self._ArchiveFile(self.build.abs_apk_path + '.mapping') self._ArchiveResourceSizes() - self._ArchiveSizeFile(supersize_path) + self._ArchiveSizeFile(supersize_path, tool_prefix) if self._save_unstripped: self._ArchiveFile(self.build.abs_main_lib_path) self.metadata.Write() @@ -396,7 +412,7 @@ class _BuildArchive(object): _Die('missing expected file: %s', filename) shutil.copy(filename, self.dir) - def _ArchiveSizeFile(self, supersize_path): + def _ArchiveSizeFile(self, supersize_path, tool_prefix): existing_size_file = self.build.abs_apk_path + '.size' if os.path.exists(existing_size_file): logging.info('Found existing .size file') @@ -404,6 +420,8 @@ class _BuildArchive(object): else: supersize_cmd = [supersize_path, 'archive', self.archived_size_path, '--elf-file', self.build.abs_main_lib_path] + if tool_prefix: + supersize_cmd += ['--tool-prefix', tool_prefix] if self.build.IsCloud(): supersize_cmd += ['--no-source-paths'] else: @@ -474,10 +492,12 @@ class _DiffArchiveManager(object): # Print cached file if all builds were cached. if os.path.exists(path): _PrintFile(path) - if self.build_archives: + if self.build_archives and len(self.build_archives) <= 2: + if not all(a.Exists() for a in self.build_archives): + return supersize_path = os.path.join(_BINARY_SIZE_DIR, 'supersize') size2 = '' - if len(self.build_archives) > 1: + if len(self.build_archives) == 2: size2 = os.path.relpath(self.build_archives[-1].archived_size_path) logging.info('Enter supersize console via: %s console %s %s', os.path.relpath(supersize_path), @@ -606,8 +626,9 @@ def _SyncAndBuild(archive, build, subrepo, no_gclient, extra_rev): # commits on a branch. _GitCmd(['checkout', '--detach'], subrepo) logging.info('Syncing to %s', archive.rev) - if _GclientSyncCmd(archive.rev, subrepo): - return False + ret = _GclientSyncCmd(archive.rev, subrepo) + if ret: + return ret with _ApplyPatch(extra_rev, subrepo): return build.Run() @@ -788,14 +809,22 @@ def _PrintFile(path): @contextmanager def _TmpCopyBinarySizeDir(): - """Recursively copy files to a temp dir and yield supersize path.""" + """Recursively copy files to a temp dir and yield temp paths.""" # Needs to be at same level of nesting as the real //tools/binary_size # since supersize uses this to find d3 in //third_party. tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT) try: bs_dir = os.path.join(tmp_dir, 'binary_size') shutil.copytree(_BINARY_SIZE_DIR, bs_dir) - yield os.path.join(bs_dir, 'supersize') + # We also copy the tools supersize needs, but only if they exist. + tool_prefix = None + if os.path.exists(_DOWNLOAD_OBJDUMP_PATH): + if not os.path.exists(os.path.join(_LLVM_TOOLS_DIR, 'llvm-readelf')): + _RunCmd([_DOWNLOAD_OBJDUMP_PATH]) + tools_dir = os.path.join(bs_dir, 'bintools') + tool_prefix = os.path.join(tools_dir, 'llvm-') + shutil.copytree(_LLVM_TOOLS_DIR, tools_dir) + yield (os.path.join(bs_dir, 'supersize'), tool_prefix) finally: shutil.rmtree(tmp_dir) @@ -829,21 +858,20 @@ def _DiffMain(args): parser.add_argument('--diff-output', required=True) args = parser.parse_args(args) - if args.diff_type == 'native': + is_native_diff = args.diff_type == 'native' + if is_native_diff: supersize_path = os.path.join(_BINARY_SIZE_DIR, 'supersize') diff = NativeDiff(args.apk_name + '.size', supersize_path) else: - diff = ResourceSizesDiff(args.apk_name, args.apk_name + '.json') + diff = ResourceSizesDiff(args.apk_name) diff.ProduceDiff(args.before_dir, args.after_dir) - with open(args.diff_output, 'w') as f: - f.writelines(l + '\n' for l in diff.DetailedResults()) + lines = diff.DetailedResults() if is_native_diff else diff.Summary() - stat = diff.summary_stat - if stat: - print 'Summary: {} {} {}'.format(*stat) - else: - print 'Missing Summary!' + with open(args.diff_output, 'w') as f: + f.writelines(l + '\n' for l in lines) + stat = diff.summary_stat + f.write('\n{}={}\n'.format(*stat[:2])) def main(): @@ -968,7 +996,8 @@ def main(): reference_rev = args.rev _ValidateRevs(args.rev, reference_rev, subrepo, args.extra_rev) revs = _GenerateRevList(args.rev, reference_rev, args.all, subrepo, args.step) - with _TmpCopyBinarySizeDir() as supersize_path: + with _TmpCopyBinarySizeDir() as paths: + supersize_path, tool_prefix = paths diffs = [NativeDiff(build.size_name, supersize_path)] if build.IsAndroid(): diffs += [ @@ -999,7 +1028,7 @@ def main(): _Die('%d builds failed in a row, last failure was %s.', consecutive_failures, archive.rev) else: - archive.ArchiveBuildResults(supersize_path) + archive.ArchiveBuildResults(supersize_path, tool_prefix) consecutive_failures = 0 if i != 0: diff --git a/chromium/tools/binary_size/html_report_faq.md b/chromium/tools/binary_size/html_report_faq.md new file mode 100644 index 00000000000..9cd321e6524 --- /dev/null +++ b/chromium/tools/binary_size/html_report_faq.md @@ -0,0 +1,80 @@ +# Frequently Asked Questions + +[TOC] + +## Usage + +### How do I create a data file? +See the [`html_report` command docs](README.md#Usage_html_report). + +### What do the different folder and file colors mean? +Containers (folders, files, and components) have different colors depending on +the symbols they contain. The color corresponds whatever symbol type has the +most bytes in that container. The color is the same as that symbol type's icon. + +When hovering over the container, you can see a breakdown of all the symbol +types inside. The first row of that table indicates the symbol type with the +most bytes. + +### What does "Type", "Count", "Total size", and "Percent" refer to? +When hovering over a container, a card appears breaking down the symbols stored +within that container. The data appears as a pie chart and a table with the +columns "Type", "Count", "Total size", and "Percent". + +- **Type** refers to the symbol type that row represents. +- **Count** indicates the number of symbols of that type present in the + container. +- **Total size** indicates how many bytes the symbols of that type take up in + total. +- **Percent** indicates how much the total size of a symbol type takes up of the + total size of the container. It also correlates to the pie chart sizes. + +### Which keyboard shortcuts are supported? +Once the symbol tree is focused on, various keyboard shortcuts are supported +to navigate around the tree. + +The symbol tree can be focused by clicking on it or by pressing _Tab_ until +the tree is focused. + +Key | Function +--- | -------- +_Enter_ or _Space_ | Open or close a container, just like clicking on it +↓ | Focus the node below the current node +↑ | Focus the node above the current node +→ | Move focus to the first child node, or open the current node if closed +← | Move focus to the parent node, or close the current node if open +_Home_ | Move focus to the topmost node +_End_ | Move focus to the bottommost node +_A-z_ | Focus the next node that starts with the given character +_*_ | Expand all sibling containers of the current node. + +## Symbols + +### What are "Other entries"? +The symbol type "Other entries" refers to symbols that don't fall in one of the +defined symbol categories. This includes images and `.bin` files. + +### What are "Other small" symbols for? +To reduce the size of the generated data file, small symbols are omitted by +default. Small symbols of the same type are combined into an "Other small +[type]" bucket. + +More symbols can be displayed by using the `--all-symbols` flag +when generating the data file. However, the data file will be larger and will +take longer to load. + +## Filters + +### What regular expressions syntax is supported? +The contain and exclude regular expressions are evaluated against the full path +of each symbol. Symbol names are appended onto the end of a path and prefixed +with a `:` (`path/to/file:symbol`). The full path is displayed below the byte +size on the info card, and also appears when hovering over a symbol's name. + +The "Symbols must contain" filter is applied before the "Symbols must exclude" +filter. + +Example filter | Regular expression +-------------- | ------------------ +Find symbols in `MyJavaClass` | `:MyJavaClass#` +Find folders named `myfolder` | `myfolder/` diff --git a/chromium/tools/binary_size/libsupersize/apkanalyzer.py b/chromium/tools/binary_size/libsupersize/apkanalyzer.py index 9c55e1dfd35..1efc52fbdce 100644 --- a/chromium/tools/binary_size/libsupersize/apkanalyzer.py +++ b/chromium/tools/binary_size/libsupersize/apkanalyzer.py @@ -7,6 +7,7 @@ Assumes that apk_path.mapping and apk_path.jar.info is available. """ +import logging import os import subprocess import zipfile @@ -128,8 +129,14 @@ def CreateDexSymbols(apk_path, output_directory): nodes = UndoHierarchicalSizing(_RunApkAnalyzer(apk_path, output_directory)) dex_expected_size = _ExpectedDexTotalSize(apk_path) total_node_size = sum(map(lambda x: x[1], nodes)) - assert dex_expected_size >= total_node_size, ( - 'Node size too large, check for node processing errors.') + # TODO(agrieve): Figure out why this log is triggering for + # ChromeModernPublic.apk (https://crbug.com/851535). + # Reporting: dex_expected_size=6546088 total_node_size=6559549 + if dex_expected_size < total_node_size: + logging.error( + 'Node size too large, check for node processing errors. ' + 'dex_expected_size=%d total_node_size=%d', dex_expected_size, + total_node_size) # We have more than 100KB of ids for methods, strings id_metadata_overhead_size = dex_expected_size - total_node_size symbols = [] diff --git a/chromium/tools/binary_size/libsupersize/archive.py b/chromium/tools/binary_size/libsupersize/archive.py index d95108f629c..687f9d34061 100644 --- a/chromium/tools/binary_size/libsupersize/archive.py +++ b/chromium/tools/binary_size/libsupersize/archive.py @@ -30,11 +30,15 @@ import linker_map_parser import models import ninja_parser import nm +import obj_analyzer import path_util sys.path.insert(1, os.path.join(path_util.SRC_ROOT, 'tools', 'grit')) from grit.format import data_pack +_OWNERS_FILENAME = 'OWNERS' +_COMPONENT_REGEX = re.compile(r'\s*#\s*COMPONENT\s*:\s*(\S+)') +_FILE_PATH_REGEX = re.compile(r'\s*file://(\S+)') # Holds computation state that is live only when an output directory exists. _OutputDirectoryContext = collections.namedtuple('_OutputDirectoryContext', [ @@ -92,6 +96,8 @@ class SectionSizeKnobs(object): 'META-INF/MANIFEST.MF', ]) + self.src_root = path_util.SRC_ROOT + def _OpenMaybeGz(path): """Calls `gzip.open()` if |path| ends in ".gz", otherwise calls `open()`.""" @@ -215,6 +221,11 @@ def _NormalizeNames(raw_symbols): def _NormalizeObjectPath(path): + """Normalizes object paths. + + Prefixes are removed: obj/, ../../ + Archive names made more pathy: foo/bar.a(baz.o) -> foo/bar.a/baz.o + """ if path.startswith('obj/'): # Convert obj/third_party/... -> third_party/... path = path[4:] @@ -538,6 +549,86 @@ def _CalculatePadding(raw_symbols): '%r\nprev symbol: %r' % (symbol, prev_symbol)) +def _ParseComponentFromOwners(filename): + """Searches an OWNERS file for lines that start with `# COMPONENT:`. + + If an OWNERS file has no COMPONENT but references another OWNERS file, follow + the reference and check that file instead. + + Args: + filename: Path to the file to parse. + Returns: + The text that follows the `# COMPONENT:` prefix, such as 'component>name'. + Empty string if no component found or the file didn't exist. + """ + reference_paths = [] + try: + with open(filename) as f: + for line in f: + component_matches = _COMPONENT_REGEX.match(line) + path_matches = _FILE_PATH_REGEX.match(line) + if component_matches: + return component_matches.group(1) + elif path_matches: + reference_paths.append(path_matches.group(1)) + except IOError: + return '' + + if len(reference_paths) == 1: + newpath = os.path.join(path_util.SRC_ROOT, reference_paths[0]) + return _ParseComponentFromOwners(newpath) + else: + return '' + + +def _FindComponentRoot(start_path, cache, knobs): + """Searches all parent directories for COMPONENT in OWNERS files. + + Args: + start_path: Path of directory to start searching from. Must be relative to + SRC_ROOT. + cache: Dict of OWNERS paths. Used instead of filesystem if paths are present + in the dict. + knobs: Instance of SectionSizeKnobs. Tunable knobs and options. + + Returns: + COMPONENT belonging to |start_path|, or empty string if not found. + """ + prev_dir = None + test_dir = start_path + # This loop will traverse the directory structure upwards until reaching + # SRC_ROOT, where test_dir and prev_dir will both equal an empty string. + while test_dir != prev_dir: + cached_component = cache.get(test_dir) + if cached_component: + return cached_component + elif cached_component is None: + owners_path = os.path.join(knobs.src_root, test_dir, _OWNERS_FILENAME) + component = _ParseComponentFromOwners(owners_path) + cache[test_dir] = component + if component: + return component + prev_dir = test_dir + test_dir = os.path.dirname(test_dir) + return '' + + +def _PopulateComponents(raw_symbols, knobs): + """Populates the |component| field based on |source_path|. + + Symbols without a |source_path| are skipped. + + Args: + raw_symbols: list of Symbol objects. + knobs: Instance of SectionSizeKnobs. Tunable knobs and options. + """ + seen_paths = {} + for symbol in raw_symbols: + if symbol.source_path: + folder_path = os.path.dirname(symbol.source_path) + symbol.component = _FindComponentRoot(folder_path, seen_paths, knobs) + + def _AddNmAliases(raw_symbols, names_by_address): """Adds symbols that were removed by identical code folding.""" # Step 1: Create list of (index_of_symbol, name_list). @@ -606,7 +697,25 @@ def LoadAndPostProcessSizeInfo(path): return size_info -def CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory): +def CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory, + linker_name): + """Creates metadata dict. + + Args: + map_path: Path to the linker .map(.gz) file to parse. + elf_path: Path to the corresponding unstripped ELF file. Used to find symbol + aliases and inlined functions. Can be None. + apk_path: Path to the .apk file to measure. + tool_prefix: Prefix for c++filt & nm. + output_directory: Build output directory. + linker_name: 'gold', 'lld_v#' (# is a number), 'lld-lto_v#', or None. + + Returns: + None if |elf_path| is not supplied. Otherwise returns dict mapping string + constants to values. + If |elf_path| is supplied, git revision and elf info are included. + If |output_directory| is also supplied, then filenames will be included. + """ metadata = None if elf_path: logging.debug('Constructing metadata') @@ -623,6 +732,7 @@ def CreateMetadata(map_path, elf_path, apk_path, tool_prefix, output_directory): models.METADATA_ELF_ARCHITECTURE: architecture, models.METADATA_ELF_MTIME: timestamp, models.METADATA_ELF_BUILD_ID: build_id, + models.METADATA_LINKER_NAME: linker_name, models.METADATA_TOOL_PREFIX: relative_tool_prefix, } @@ -652,7 +762,7 @@ def _ResolveThinArchivePaths(raw_symbols, thin_archives): def _ParseElfInfo(map_path, elf_path, tool_prefix, track_string_literals, - outdir_context=None): + outdir_context=None, linker_name=None): """Adds ELF section sizes and symbols.""" if elf_path: # Run nm on the elf file to retrieve the list of symbol names per-address. @@ -671,14 +781,14 @@ def _ParseElfInfo(map_path, elf_path, tool_prefix, track_string_literals, # Rather than record all paths for each symbol, set the paths to be the # common ancestor of all paths. if outdir_context: - bulk_analyzer = nm.BulkObjectFileAnalyzer( + bulk_analyzer = obj_analyzer.BulkObjectFileAnalyzer( tool_prefix, outdir_context.output_directory) bulk_analyzer.AnalyzePaths(outdir_context.elf_object_paths) logging.info('Parsing Linker Map') with _OpenMaybeGz(map_path) as map_file: section_sizes, raw_symbols = ( - linker_map_parser.MapFileParser().Parse(map_file)) + linker_map_parser.MapFileParser().Parse(linker_name, map_file)) if outdir_context and outdir_context.thin_archives: _ResolveThinArchivePaths(raw_symbols, outdir_context.thin_archives) @@ -770,7 +880,7 @@ def _ComputePakFileSymbols( section_name = models.SECTION_PAK_NONTRANSLATED overhead = (12 + 6) * compression_ratio # Header size plus extra offset symbols_by_id[file_name] = models.Symbol( - section_name, overhead, full_name='{}: overhead'.format(file_name)) + section_name, overhead, full_name='Overhead: {}'.format(file_name)) for resource_id in sorted(contents.resources): if resource_id in alias_map: # 4 extra bytes of metadata (2 16-bit ints) @@ -811,7 +921,7 @@ class _ResourceSourceMapper(object): for dest, renamed_dest in renames.iteritems(): # Allow one more level of indirection due to renaming renamed files renamed_dest = renames.get(renamed_dest, renamed_dest) - actual_source = res_info.get(renamed_dest); + actual_source = res_info.get(renamed_dest) if actual_source: res_info[dest] = actual_source return res_info @@ -858,6 +968,7 @@ def _ParsePakInfoFile(pak_info_path): def _ParsePakSymbols( section_sizes, object_paths, output_directory, symbols_by_id): + object_paths_by_id = collections.defaultdict(list) for path in object_paths: whitelist_path = os.path.join(output_directory, path + '.whitelist') if (not os.path.exists(whitelist_path) @@ -872,10 +983,24 @@ def _ParsePakSymbols( # resulting in resource_ids that don't end up being in the final apk. if resource_id not in symbols_by_id: continue - symbols_by_id[resource_id].object_path = path - - raw_symbols = sorted(symbols_by_id.values(), - key=lambda s: (s.section_name, s.address)) + object_paths_by_id[resource_id].append(path) + + raw_symbols = [] + for resource_id, symbol in symbols_by_id.iteritems(): + raw_symbols.append(symbol) + paths = set(object_paths_by_id[resource_id]) + if paths: + symbol.object_path = paths.pop() + if paths: + aliases = symbol.aliases or [symbol] + symbol.aliases = aliases + for path in paths: + new_sym = models.Symbol( + symbol.section_name, symbol.size, address=symbol.address, + full_name=symbol.full_name, object_path=path, aliases=aliases) + aliases.append(new_sym) + raw_symbols.append(new_sym) + raw_symbols.sort(key=lambda s: (s.section_name, s.address)) raw_total = 0.0 int_total = 0 for symbol in raw_symbols: @@ -1015,7 +1140,7 @@ def _CalculateElfOverhead(section_sizes, elf_path): def CreateSectionSizesAndSymbols( map_path=None, tool_prefix=None, output_directory=None, elf_path=None, apk_path=None, track_string_literals=True, metadata=None, - apk_so_path=None, pak_files=None, pak_info_file=None, + apk_so_path=None, pak_files=None, pak_info_file=None, linker_name=None, knobs=SectionSizeKnobs()): """Creates sections sizes and symbols for a SizeInfo. @@ -1028,6 +1153,11 @@ def CreateSectionSizesAndSymbols( alias information will not be recorded. track_string_literals: Whether to break down "** merge string" sections into smaller symbols (requires output_directory). + + Returns: + A tuple of (section_sizes, raw_symbols). + section_sizes is a dict mapping section names to their size + raw_symbols is a list of Symbol objects """ if apk_path and elf_path: # Extraction takes around 1 second, so do it in parallel. @@ -1069,7 +1199,8 @@ def CreateSectionSizesAndSymbols( thin_archives=thin_archives) section_sizes, raw_symbols = _ParseElfInfo( - map_path, elf_path, tool_prefix, track_string_literals, outdir_context) + map_path, elf_path, tool_prefix, track_string_literals, + outdir_context=outdir_context, linker_name=linker_name) elf_overhead_size = _CalculateElfOverhead(section_sizes, elf_path) pak_symbols_by_id = None @@ -1102,6 +1233,7 @@ def CreateSectionSizesAndSymbols( raw_symbols.extend(pak_raw_symbols) _ExtractSourcePathsAndNormalizeObjectPaths(raw_symbols, source_mapper) + _PopulateComponents(raw_symbols, knobs) logging.info('Converting excessive aliases into shared-path symbols') _CompactLargeAliasesIntoSharedSymbols(raw_symbols, knobs) logging.debug('Connecting nm aliases') @@ -1136,6 +1268,14 @@ def CreateSizeInfo( def _DetectGitRevision(directory): + """Runs git rev-parse to get the SHA1 hash of the current revision. + + Args: + directory: Path to directory where rev-parse command will be run. + + Returns: + A string with the SHA1 hash, or None if an error occured. + """ try: git_rev = subprocess.check_output( ['git', '-C', directory, 'rev-parse', 'HEAD']) @@ -1194,7 +1334,7 @@ def _ParseGnArgs(args_path): def _DetectLinkerName(map_path): with _OpenMaybeGz(map_path) as map_file: - return linker_map_parser.DetectLinkerNameFromMapFileHeader(next(map_file)) + return linker_map_parser.DetectLinkerNameFromMapFile(map_file) def _ElfInfoFromApk(apk_path, apk_so_path, tool_prefix): @@ -1265,6 +1405,8 @@ def AddArguments(parser): default=True, action='store_false', help='Disable breaking down "** merge strings" into more ' 'granular symbols.') + parser.add_argument('--source-directory', + help='Custom path to the root source directory.') AddMainPathsArguments(parser) @@ -1313,6 +1455,7 @@ def DeduceMainPaths(args, parser): 'linker map file.') linker_name = _DetectLinkerName(map_path) + logging.info('Linker name: %s' % linker_name) tool_prefix_finder = path_util.ToolPrefixFinder( value=args.tool_prefix, output_directory_finder=output_directory_finder, @@ -1322,25 +1465,30 @@ def DeduceMainPaths(args, parser): if not args.no_source_paths: output_directory = output_directory_finder.Finalized() return (output_directory, tool_prefix, apk_path, apk_so_path, elf_path, - map_path) + map_path, linker_name) def Run(args, parser): if not args.size_file.endswith('.size'): parser.error('size_file must end with .size') - (output_directory, tool_prefix, apk_path, apk_so_path, elf_path, map_path) = ( - DeduceMainPaths(args, parser)) + (output_directory, tool_prefix, apk_path, apk_so_path, elf_path, map_path, + linker_name) = (DeduceMainPaths(args, parser)) metadata = CreateMetadata(map_path, elf_path, apk_path, tool_prefix, - output_directory) + output_directory, linker_name) + + knobs = SectionSizeKnobs() + if args.source_directory: + knobs.src_root = args.source_directory section_sizes, raw_symbols = CreateSectionSizesAndSymbols( map_path=map_path, tool_prefix=tool_prefix, elf_path=elf_path, apk_path=apk_path, output_directory=output_directory, track_string_literals=args.track_string_literals, metadata=metadata, apk_so_path=apk_so_path, - pak_files=args.pak_file, pak_info_file=args.pak_info_file) + pak_files=args.pak_file, pak_info_file=args.pak_info_file, + linker_name=linker_name, knobs=knobs) size_info = CreateSizeInfo( section_sizes, raw_symbols, metadata=metadata, normalize_names=False) diff --git a/chromium/tools/binary_size/libsupersize/bcanalyzer.py b/chromium/tools/binary_size/libsupersize/bcanalyzer.py new file mode 100755 index 00000000000..edd1a1e7479 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/bcanalyzer.py @@ -0,0 +1,379 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs bcanalyzer to extract data from LLVM Bitcode (BC) files. + +IsBitcodeFile(): + Reads the magic header of a file to quickly decide whether it is a BC file. + +ParseTag(): + Heuristically parses a single-line tag from bcanalyzer dump (exporeted for + testing). + +RunBcAnalyzerOnIntermediates(): + BulkForkAndCall() target: Given BC file [paths], runs (llvm-)bcanalyzer on + each path, parses the output, extracts strings, and returns {path: [strings]}. + +This file can also be run stand-alone in order to test out the logic on smaller +sample sizes. +""" + +from __future__ import print_function + +import argparse +import os +import re +import subprocess + +import concurrent +import path_util + + +# Upper bound on number of bytes per character in strings. 4-byte / 32-bit +# strings are rare and are likely confused with 32-bit int arrays. So by +# default, only accept up to 2-byte / 16-bit strings. +_CHAR_WIDTH_LIMIT = 2 + +_RE_SPLIT = re.compile(r'=(\d+)') +# children tags that should not be counted as types. +# - is meta data. +# - with the following (or other tag) are counted +# as a single type entry. +_NON_TYPE_TAGS = set(['NUMENTRY', 'STRUCT_NAME']) + +# Use bit-fields for tag types: 1 => Opening tag, 2 => Closed tag. +OPENING_TAG = 1 +CLOSING_TAG = 2 +SELF_CLOSING_TAG = OPENING_TAG | CLOSING_TAG + + +def _IsOpeningTag(tag_type): + return tag_type & 1 + + +def _IsClosingTag(tag_type): + return tag_type & 2 + + +def IsBitcodeFile(path): + try: + with open(path, 'rb') as f: + return f.read(4) == 'BC\xc0\xde' + except IOError: + return False + + +def ParseTag(line): + """Heuristically parses a single-line tag from bcanalyzer dump. + + Since input data are machine-generated, so we only need "good enough" parsing + logic that favors simplicity. For example, '' is accepted. + + Args: + line: Stripped line that may have a single-line tag with trailing text. + + Returns: + (tag_type, tag, attrib_pos) if successful, else (None) * 3. Details: + tag_type: One of {OPENING_TAG, CLOSING_TAG, SELF_CLOSING_TAG}. + tag: The tag name. + attrib_pos: Position in |line| to start parsing attributes. + """ + # + # ==> (OPENING_TAG, 'TYPE_BLOCK_ID', 14). + # Trailing text! + # ==> (SELF_CLOSING_TAG, 'ARRAY', 6). + # + # ==> (CLOSING_TAG, 'TYPE_BLOCK_ID', 15). + + # Assumes |line| is stripped, i.e., so no indent and no trailing new line. + if len(line) < 2 or line[0] != '<': + return (None, None, None) + tag_type, pos = (CLOSING_TAG, 2) if line[1] == '/' else (OPENING_TAG, 1) + for i in xrange(pos, len(line)): + if not line[i].isalnum() and line[i] != '_': + if i == pos or not line[i] in ' >/': + break + end = line.find('>', i) + if end < 0: + break + if line[end - 1] == '/': + return (SELF_CLOSING_TAG, line[pos:i], i) + return (tag_type, line[pos:i], i) + return (None, None, None) + + +def _ParseOpItems(line, pos): + """Heuristically extracts op0=# op1=# ... values from a single-line tag.""" + # + # ^ pos = 8 + # ==> iter([42]). + # + # ^ pos = 8 + # ==> iter([84, 101, 115, 116, 56, 97]). + # + # ^ pos = 7 + # ==> iter([1, 0, 0, 1, 1, 0]). + # + # ^ pos = 5 + # ==> iter([8412, 101, 1150, 116, 5200, 98, 0]). + + # In particular, skip 'abbrevid=#'. + start = line.index(' op', pos) + end = line.index('>', start) + for t in _RE_SPLIT.finditer(line[start:end]): + yield int(t.group(1)) + + +# Emits uint16 values as a stream of 2 bytes (little-endian). +def _UnpackUint16ListToBytes(items): + for item in items: + yield item & 0xFF + yield (item >> 8) & 0xFF + + +# Emits uint32 values as a stream of 4 bytes (little-endian). +def _UnpackUint32ListToBytes(items): + for item in items: + yield item & 0xFF + yield (item >> 8) & 0xFF + yield (item >> 16) & 0xFF + yield (item >> 24) & 0xFF + + +class _BcIntArrayType: + """The specs of an integer array type.""" + + # Lookup table to map from width to an unpacker that splits ints into bytes. + _UNPACKER_MAP = { + 1: iter, + 2: _UnpackUint16ListToBytes, + 4: _UnpackUint32ListToBytes + } + + def __init__(self, length, width): + # Number of elements in the array. + self.length = length + # Number of bytes per element. + self.width = width + + def ParseOpItemsAsString(self, line, attrib_pos, add_null_at_end): + """Reads op0=# op=# ... values and returns them as a list of bytes. + + Interprets each op0=# op1=# ... value as a |self.width|-byte integer, splits + them into component bytes (little-endian), and returns the result as string. + + Args: + line: Stripped line of single-line tag with op0=# op1=# ... data. + attrib_pos: Position in |line| where attribute list starts. + add_null_add_end: Whether to append |'\x00' * self.width|. + """ + items = _ParseOpItems(line, attrib_pos) + unpacker = _BcIntArrayType._UNPACKER_MAP[self.width] + s = ''.join(chr(t) for t in unpacker(items)) + if add_null_at_end: + s += '\x00' * self.width + # Rather stringent check to ensure exact size match. + assert len(s) == self.length * self.width + return s + + +class _BcTypeInfo: + """Stateful parser of , specialized for integer arrays.""" + + # + # # Type ids should be in [0, 8]. + # # Type id = 0: int8. + # # Type id = 1: Pointer to type id 0 + # # ==> int8*. + # # Type id = 2: Array with 4 elements + # # of type id 0 ==> int8[4] + # # Joins next Tag. + # # Type id = 3: Struct (unused). + # # Type id = 4: Function (unused). + # # Type id = 5: int16. + # # Type id = 6: Pointer to type id 5 + # # ==> int16*. + # # Type id = 7: int32. + # # Type id = 8: Array with 4 elements + # # of type id 5 ==> int16[4] + # + + def __init__(self): + # Auto-incrementing current type id. + self.cur_type_id = 0 + # Maps from type id (of an integer) to number of bits. + self.int_types = {} + # Maps from type id (of an integer array) to _BcIntArrayType. + self.int_array_types = {} + + def Feed(self, line, tag, attrib_pos): + """Parses a single-line tag and store integer and integer array types. + + Args: + line: Stripped line of single-line tag with op0=# op1=# ... data. + tag: The tag type in |line| (child tag of ). + attrib_pos: Position in |line| where attribute list starts. + """ + if tag in _NON_TYPE_TAGS: + return + if tag == 'INTEGER': + num_bits = next(_ParseOpItems(line, attrib_pos)) # op0. + self.int_types[self.cur_type_id] = num_bits + elif tag == 'ARRAY': + [size, item_type_id] = list(_ParseOpItems(line, attrib_pos)) # op0, op1. + bits = self.int_types.get(item_type_id) + if bits is not None: # |bits| can be None for non-int arrays. + self.int_array_types[self.cur_type_id] = _BcIntArrayType(size, bits / 8) + self.cur_type_id += 1 + + def GetArrayType(self, idx): + return self.int_array_types.get(idx) + + +def _ParseBcAnalyzer(lines): + """A generator to extract strings from bcanalyzer dump of a BC file.""" + + # ... + # + # ... (See above; parsed by _BcTypeInfo) + # + # ... + # + # # Current type id := 1 ==> int8*. + # + # # Current type id := 2 ==> int8[4]. + # record string = 'Foo' + # # {'F','o','o',1}. + # # Current type id := 7 ==> int32. + # # Stores 1000. + # # Stores -1000. + # # Current type id := 8 ==> int16[4]. + # + # + # + # ... + + # Notes: + # - Only parse first and first . + # - is stateful: A "current type id" exists, and that's set + # by , with op0= referring to type id. + # - For array lengths one needs to refer to the corresponding . + # - Strings / arrays are in , , and . + # - abbrevid=# is redundant (repeats tag type) and unused + # - Character data are stored in op0=# op1=# ..., one per character. These + # values should fit in the proper range, and can be fairly large. + # - has implicit 0 at end. + # - Data lengths agree with the length in the matching entry. + # - "record string" text is not very useful: It only appears if all + # characters are printable. + # - Signed vs. unsigned types are undistinguished. + # - In , the op0= value is stored as 2 * abs(x) + (signed ? 0 : 1). + # - In of int, values are coerced to unsigned type. + # - Strings and int arrays are undistinguished. + # - : If an uint8 array happens to end with 0, then this gets used! + # - Arrays (or integers) of all-0 appear as . Presumably this gets + # placed into .bss section. + + STATE_VOID = 0 + STATE_TYPE_BLOCK = 1 + STATE_CONST_BLOCK = 2 + state = STATE_VOID + + type_info = None + consts_cur_type = None + + # State machine to parse the *first* to initialize + # |type_info|, then the *first* to yield strings. + for line in lines: + line = line.lstrip() + (tag_type, tag, attrib_pos) = ParseTag(line) + if tag_type is None: + continue + if state == STATE_VOID: + if _IsOpeningTag(tag_type): + if tag == 'TYPE_BLOCK_ID': + if type_info is None: + state = STATE_TYPE_BLOCK + type_info = _BcTypeInfo() + elif tag == 'CONSTANTS_BLOCK': + if type_info is not None: + state = STATE_CONST_BLOCK + + elif state == STATE_TYPE_BLOCK: + if _IsClosingTag(tag_type) and tag == 'TYPE_BLOCK_ID': + state = STATE_VOID + else: + type_info.Feed(line, tag, attrib_pos) + + elif state == STATE_CONST_BLOCK: + if _IsClosingTag(tag_type) and tag == 'CONSTANTS_BLOCK': + # Skip remaining data, including subsequent s. + break + elif tag == 'SETTYPE': + consts_cur_type_id = next(_ParseOpItems(line, attrib_pos)) # op0. + consts_cur_type = type_info.GetArrayType(consts_cur_type_id) + elif consts_cur_type and consts_cur_type.width <= _CHAR_WIDTH_LIMIT: + if tag in ['CSTRING', 'STRING', 'DATA']: + # Exclude 32-bit / 4-byte strings since they're rarely used, and are + # likely confused with 32-bit int arrays. + s = consts_cur_type.ParseOpItemsAsString( + line, attrib_pos, tag == 'CSTRING') + yield (consts_cur_type, s) + + +class _BcAnalyzerRunner: + """Helper to run bcanalyzer and extract output lines. """ + def __init__(self, tool_prefix, output_directory): + self._args = [path_util.GetBcAnalyzerPath(tool_prefix), '--dump', + '--disable-histogram'] + self._output_directory = output_directory + + def RunOnFile(self, obj_file): + output = subprocess.check_output(self._args + [obj_file], + cwd=self._output_directory) + return output.splitlines() + + +# This is a target for BulkForkAndCall(). +def RunBcAnalyzerOnIntermediates(target, tool_prefix, output_directory): + """Calls bcanalyzer and returns encoded map from path to strings. + + Args: + target: A list of BC file paths. + """ + assert isinstance(target, list) + runner = _BcAnalyzerRunner(tool_prefix, output_directory) + strings_by_path = {} + for t in target: + strings_by_path[t] = [s for _, s in _ParseBcAnalyzer(runner.RunOnFile(t))] + # Escape strings by repr() so there will be no special characters to interfere + # concurrent.EncodeDictOfLists() and decoding. + return concurrent.EncodeDictOfLists(strings_by_path, value_transform=repr) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--tool-prefix', required=True) + parser.add_argument('--output-directory', default='.') + parser.add_argument('--char-width-limit', type=int) + parser.add_argument('objects', type=os.path.realpath, nargs='+') + + args = parser.parse_args() + base_path = os.path.normpath(args.output_directory) + runner = _BcAnalyzerRunner(args.tool_prefix, args.output_directory) + if args.char_width_limit is not None: + global _CHAR_WIDTH_LIMIT + _CHAR_WIDTH_LIMIT = args.char_width_limit + + for obj_path in args.objects: + rel_path = os.path.relpath(obj_path, base_path) + print('File: %s' % rel_path) + for cur_type, s in _ParseBcAnalyzer(runner.RunOnFile(obj_path)): + print(' char%d[%d]: %r' % (cur_type.width * 8, cur_type.length, s)) + print('') + + +if __name__ == '__main__': + main() diff --git a/chromium/tools/binary_size/libsupersize/bcanalyzer_test.py b/chromium/tools/binary_size/libsupersize/bcanalyzer_test.py new file mode 100755 index 00000000000..089d622a62b --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/bcanalyzer_test.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import ast +import os +import unittest + +import bcanalyzer +import concurrent + + +_SCRIPT_DIR = os.path.dirname(__file__) +_TEST_DATA_DIR = os.path.join(_SCRIPT_DIR, 'testdata') +_TEST_SOURCE_DIR = os.path.join(_TEST_DATA_DIR, 'mock_source_directory') +_TEST_OUTPUT_DIR = os.path.join(_TEST_SOURCE_DIR, 'out', 'Release') +_TEST_TOOL_PREFIX = os.path.join( + os.path.abspath(_TEST_DATA_DIR), 'mock_toolchain', 'llvm-') + + +def _MakeString(bits, toks): + """Creates a multi-byte string from ASCII strings and/or ASCII codes. + + Args: + bits: Number of bits per character, must be in {8, 16, 32}. + toks: A list of tokens, each of which is a ASCII strings or an integer + representing a character's ASCII value (e.g., 0 for terminating null). + + Returns: A flattened string of the |bits|-bit string formed by |bit|-extending + the result of concatanating tokens. + """ + s = ''.join(tok if isinstance(tok, basestring) else chr(tok) for tok in toks) + padding = '\x00' * ((bits - 8) / 8) + return ''.join(ch + padding for ch in s) + + +class BcAnalyzerTest(unittest.TestCase): + + def testParseTag(self): + # Valid cases. + self.assertEquals((bcanalyzer.OPENING_TAG, 'FOO', 4), + bcanalyzer.ParseTag(' trailing')) + self.assertEquals((bcanalyzer.OPENING_TAG, 'BAR', 4), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.CLOSING_TAG, 'FOO', 5), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.SELF_CLOSING_TAG, 'FOO', 4), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.SELF_CLOSING_TAG, 'TOMATO2', 8), + bcanalyzer.ParseTag('')) + # Not self-closing: For simplicity we requires '/>' with space. + self.assertEquals((bcanalyzer.OPENING_TAG, 'TOMATO2', 8), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.SELF_CLOSING_TAG, 'BAR', 4), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.OPENING_TAG, 'FOO', 4), + bcanalyzer.ParseTag(' / trailing')) + self.assertEquals((bcanalyzer.SELF_CLOSING_TAG, 'STRUCT_NAME', 12), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.SELF_CLOSING_TAG, 'UnkownCode41', 13), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.CLOSING_TAG, 'FOO_BAR', 9), + bcanalyzer.ParseTag(' \'/>trailing\'')) + self.assertEquals((bcanalyzer.OPENING_TAG, 'A', 2), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.OPENING_TAG, 'lower', 6), + bcanalyzer.ParseTag('')) + # An invalid tag (all numbers), but we allow for simplicity. + self.assertEquals((bcanalyzer.OPENING_TAG, '123', 4), + bcanalyzer.ParseTag('<123>')) + # Abominations that are allowed for simplicity. + self.assertEquals((bcanalyzer.SELF_CLOSING_TAG, 'FOO', 5), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.SELF_CLOSING_TAG, 'FOO', 4), + bcanalyzer.ParseTag('')) + self.assertEquals((bcanalyzer.OPENING_TAG, 'FOO', 4), + bcanalyzer.ParseTag('>>>>')) + + # Invalid cases. + None3 = (None, None, None) + self.assertEquals(None3, bcanalyzer.ParseTag('')) + self.assertEquals(None3, bcanalyzer.ParseTag(' ')) + self.assertEquals(None3, bcanalyzer.ParseTag('<>')) + self.assertEquals(None3, bcanalyzer.ParseTag('<> ')) + self.assertEquals(None3, bcanalyzer.ParseTag('< >')) + self.assertEquals(None3, bcanalyzer.ParseTag('<')) + self.assertEquals(None3, bcanalyzer.ParseTag('< FOO>')) + self.assertEquals(None3, bcanalyzer.ParseTag('')) + self.assertEquals(None3, bcanalyzer.ParseTag('')) + self.assertEquals(None3, bcanalyzer.ParseTag('< FOO />')) + self.assertEquals(None3, bcanalyzer.ParseTag('')) + self.assertEquals(None3, bcanalyzer.ParseTag('')) + self.assertEquals(None3, bcanalyzer.ParseTag('<AAA>')) + + def testAnalyzer(self): + # Save global param in bcanalyzer. + saved_char_width_limit = bcanalyzer._CHAR_WIDTH_LIMIT + + for width_limit, include_4byte_strings in [(2, False), (4, True)]: + # Tweak global param in bcanalyzer. + bcanalyzer._CHAR_WIDTH_LIMIT = width_limit + + encoded_results = bcanalyzer.RunBcAnalyzerOnIntermediates( + ['test.o'], _TEST_TOOL_PREFIX, _TEST_OUTPUT_DIR) + results = concurrent.DecodeDictOfLists( + encoded_results, value_transform=ast.literal_eval) + self.assertEquals(['test.o'], results.keys()) + str_list = results['test.o'] + + # See mock_bcanalyzer.py for details on the C++ test file. + expected = [] + expected.append(_MakeString(8, ['Test1a', 0])) + expected.append(_MakeString(8, ['Test1b', 0])) + expected.append(_MakeString(8, ['Test2a', 0])) + expected.append(_MakeString(8, ['Test2b', 0])) + expected.append(_MakeString(16, ['Test3a', 0])) + expected.append(_MakeString(16, ['Test3b', 0])) + if include_4byte_strings: + expected.append(_MakeString(32, ['Test4a', 0])) + expected.append(_MakeString(32, ['Test4b', 0])) + expected.append(_MakeString(8, [1, 0, 0, 1, 1, 0])) + expected.append(_MakeString(8, [1, 0, 0, 1, 1, 1])) + expected.append(_MakeString(8, ['Test5a', 0])) + expected.append(_MakeString(8, ['Test5b', 1])) + expected.append(_MakeString(16, ['Test6a', 0])) + expected.append(_MakeString(16, ['Test6b', 1])) + if include_4byte_strings: + expected.append(_MakeString(32, ['Test7a', 0])) + expected.append(_MakeString(32, ['Test7b', 1])) + expected.append(_MakeString(8, ['Test8a', 0])) + expected.append(_MakeString(8, ['Test8b', 0])) + # Exclude |{u8a, u8b, u16a, u16b, u32a, u32b, u64a, u64b}|. + # Exclude |{s8empty, s16empty, s32empty}|. + expected.append(_MakeString(8, ['1a', 0])) + # Exclude |zeros|, which should be in .bss section. + + self.assertEquals(expected, str_list) + + # Restore globa param in bcanalyzer. + bcanalyzer._CHAR_WIDTH_LIMIT = saved_char_width_limit + +if __name__ == '__main__': + unittest.main() diff --git a/chromium/tools/binary_size/libsupersize/concurrent.py b/chromium/tools/binary_size/libsupersize/concurrent.py index 104308c05ee..b971d94518d 100644 --- a/chromium/tools/binary_size/libsupersize/concurrent.py +++ b/chromium/tools/binary_size/libsupersize/concurrent.py @@ -26,6 +26,7 @@ _silence_exceptions = False # Used to pass parameters to forked processes without pickling. _fork_params = None +_fork_kwargs = None class _ImmediateResult(object): @@ -66,7 +67,7 @@ class _FuncWrapper(object): def __call__(self, index, _=None): try: - return self._func(*_fork_params[index]) + return self._func(*_fork_params[index], **_fork_kwargs) except Exception, e: # Only keep the exception type for builtin exception types or else risk # further marshalling exceptions. @@ -150,14 +151,18 @@ def _CheckForException(value): sys.exit(1) -def _MakeProcessPool(job_params): +def _MakeProcessPool(job_params, **job_kwargs): global _all_pools global _fork_params + global _fork_kwargs assert _fork_params is None + assert _fork_kwargs is None pool_size = min(len(job_params), multiprocessing.cpu_count()) _fork_params = job_params + _fork_kwargs = job_kwargs ret = multiprocessing.Pool(pool_size) _fork_params = None + _fork_kwargs = None if _all_pools is None: _all_pools = [] atexit.register(_TerminatePools) @@ -175,15 +180,18 @@ def ForkAndCall(func, args, decode_func=None): pool = None result = _ImmediateResult(func(*args)) else: - pool = _MakeProcessPool([args]) + pool = _MakeProcessPool([args]) # Omit |kwargs|. result = pool.apply_async(_FuncWrapper(func), (0,)) pool.close() return _WrappedResult(result, pool=pool, decode_func=decode_func) -def BulkForkAndCall(func, arg_tuples): +def BulkForkAndCall(func, arg_tuples, **kwargs): """Calls |func| in a fork'ed process for each set of args within |arg_tuples|. + Args: + kwargs: Common key word arguments to be passed to |func|. + Yields the return values as they come in. """ arg_tuples = list(arg_tuples) @@ -192,10 +200,10 @@ def BulkForkAndCall(func, arg_tuples): if DISABLE_ASYNC: for args in arg_tuples: - yield func(*args) + yield func(*args, **kwargs) return - pool = _MakeProcessPool(arg_tuples) + pool = _MakeProcessPool(arg_tuples, **kwargs) wrapped_func = _FuncWrapper(func) for result in pool.imap_unordered(wrapped_func, xrange(len(arg_tuples))): _CheckForException(result) @@ -215,7 +223,7 @@ def CallOnThread(func, *args, **kwargs): return result -def EncodeDictOfLists(d, key_transform=None): +def EncodeDictOfLists(d, key_transform=None, value_transform=None): """Serializes a dict where values are lists of strings. Does not support '' as keys, nor [''] as values. @@ -226,7 +234,11 @@ def EncodeDictOfLists(d, key_transform=None): if key_transform: keys = (key_transform(k) for k in keys) keys = '\x01'.join(keys) - values = '\x01'.join('\x02'.join(x) for x in d.itervalues()) + if value_transform: + values = '\x01'.join('\x02'.join(value_transform(y) for y in x) for x in + d.itervalues()) + else: + values = '\x01'.join('\x02'.join(x) for x in d.itervalues()) return keys, values diff --git a/chromium/tools/binary_size/libsupersize/concurrent_test.py b/chromium/tools/binary_size/libsupersize/concurrent_test.py index 68f1fb8ae80..683e0e97d12 100755 --- a/chromium/tools/binary_size/libsupersize/concurrent_test.py +++ b/chromium/tools/binary_size/libsupersize/concurrent_test.py @@ -10,7 +10,8 @@ import unittest import concurrent -def _ForkTestHelper(test_instance, parent_pid, arg1, arg2, _=None): +def _ForkTestHelper(arg1, arg2, pickle_me_not, test_instance, parent_pid): + _ = pickle_me_not # Suppress lint warning. test_instance.assertNotEquals(os.getpid(), parent_pid) return arg1 + arg2 @@ -96,12 +97,13 @@ class ConcurrentTest(unittest.TestCase): def testForkAndCall_normal(self): parent_pid = os.getpid() result = concurrent.ForkAndCall( - _ForkTestHelper, (self, parent_pid, 1, 2, Unpicklable())) + _ForkTestHelper, (1, 2, Unpicklable(), self, parent_pid)) self.assertEquals(3, result.get()) def testForkAndCall_exception(self): parent_pid = os.getpid() - result = concurrent.ForkAndCall(_ForkTestHelper, (self, parent_pid, 1, 'a')) + result = concurrent.ForkAndCall( + _ForkTestHelper, (1, 'a', None, self, parent_pid)) self.assertRaises(TypeError, result.get) def testBulkForkAndCall_none(self): @@ -111,20 +113,35 @@ class ConcurrentTest(unittest.TestCase): def testBulkForkAndCall_few(self): parent_pid = os.getpid() results = concurrent.BulkForkAndCall(_ForkTestHelper, [ - (self, parent_pid, 1, 2, Unpicklable()), - (self, parent_pid, 3, 4)]) + (1, 2, Unpicklable(), self, parent_pid), + (3, 4, None, self, parent_pid)]) + self.assertEquals({3, 7}, set(results)) + + def testBulkForkAndCall_few_kwargs(self): + parent_pid = os.getpid() + results = concurrent.BulkForkAndCall(_ForkTestHelper, + [(1, 2, Unpicklable()), (3, 4, None)], + test_instance=self, parent_pid=parent_pid) self.assertEquals({3, 7}, set(results)) def testBulkForkAndCall_many(self): parent_pid = os.getpid() - args = [(self, parent_pid, 1, 2, Unpicklable())] * 100 + args = [(1, 2, Unpicklable(), self, parent_pid) for _ in xrange(100)] results = concurrent.BulkForkAndCall(_ForkTestHelper, args) self.assertEquals([3] * 100, list(results)) + def testBulkForkAndCall_many_kwargs(self): + parent_pid = os.getpid() + args = [(1, 2) for _ in xrange(100)] + results = concurrent.BulkForkAndCall( + _ForkTestHelper, args, pickle_me_not=Unpicklable(), test_instance=self, + parent_pid=parent_pid) + self.assertEquals([3] * 100, list(results)) + def testBulkForkAndCall_exception(self): parent_pid = os.getpid() results = concurrent.BulkForkAndCall(_ForkTestHelper, [ - (self, parent_pid, 1, 'a')]) + (1, 'a', self, parent_pid)]) self.assertRaises(TypeError, results.next) if __name__ == '__main__': diff --git a/chromium/tools/binary_size/libsupersize/console.py b/chromium/tools/binary_size/libsupersize/console.py index c6756e532a9..0517acf9de5 100644 --- a/chromium/tools/binary_size/libsupersize/console.py +++ b/chromium/tools/binary_size/libsupersize/console.py @@ -23,8 +23,8 @@ import diff import file_format import match_util import models -import nm import path_util +import string_extract # Number of lines before using less for Print(). @@ -122,7 +122,8 @@ class _Session(object): elf_path = self._ElfPathForSymbol( size_info, tool_prefix, elf_path) - address, offset, _ = nm.LookupElfRodataInfo(elf_path, tool_prefix) + address, offset, _ = string_extract.LookupElfRodataInfo( + elf_path, tool_prefix) adjust = offset - address ret = [] with open(elf_path, 'rb') as f: @@ -304,9 +305,11 @@ class _Session(object): size_info, tool_prefix, elf_path) args = [path_util.GetObjDumpPath(tool_prefix), '--disassemble', '--source', - '--line-numbers', '--demangle', - '--start-address=0x%x' % symbol.address, + '--line-numbers', '--start-address=0x%x' % symbol.address, '--stop-address=0x%x' % symbol.end_address, elf_path] + # llvm-objdump does not support '--demangle' switch. + if not self._tool_prefix_finder.IsLld(): + args.append('--demangle') if self._disassemble_prefix_len is None: prefix_len = self._DetectDisassemblePrefixLen(args) if prefix_len is not None: @@ -469,9 +472,11 @@ def Run(args, parser): output_directory_finder = path_util.OutputDirectoryFinder( value=args.output_directory, any_path_within_output_directory=args.inputs[0]) + linker_name = size_infos[-1].metadata.get(models.METADATA_LINKER_NAME) tool_prefix_finder = path_util.ToolPrefixFinder( value=args.tool_prefix, - output_directory_finder=output_directory_finder) + output_directory_finder=output_directory_finder, + linker_name=linker_name) session = _Session(size_infos, output_directory_finder, tool_prefix_finder) if args.query: diff --git a/chromium/tools/binary_size/libsupersize/file_format.py b/chromium/tools/binary_size/libsupersize/file_format.py index 6ee24654478..cfe6f63d93a 100644 --- a/chromium/tools/binary_size/libsupersize/file_format.py +++ b/chromium/tools/binary_size/libsupersize/file_format.py @@ -2,13 +2,77 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -"""Deals with loading & saving .size files.""" +"""Deals with loading & saving .size files. + +The .size file is written in the following format. There are no section +delimeters, instead the end of a section is usually determined by a row count +on the first line of a section, followed by that amount of rows. In other +cases, the sections have a known size. + +Header +------ +4 lines long. +Line 0 of the file is a header comment. +Line 1 is the serialization version of the file. +Line 2 is the number of characters in the metadata string. +Line 3 is the metadata string, a stringified JSON object. + +Path list +--------- +A list of paths. The first line is the size of the list, +and the next N lines that follow are items in the list. Each item is a tuple +of (object_path, source_path) where the two parts are tab seperated. + +Component list +-------------- +A list of components. The first line is the size of the list, +and the next N lines that follow are items in the list. Each item is a unique +COMPONENT which is referenced later. +This section is only present if 'has_components' is True in the metadata. + +Symbol counts +------------- +2 lines long. +The first line is a tab seperated list of section names. +The second line is a tab seperated list of symbol group lengths, in the same +order as the previous line. + +Numeric values +-------------- +In each section, the number of rows is the same as the number of section names +in Symbol counts. The values on a row are space seperated, in the order of the +symbols in each group. + +Addressses +~~~~~~~~~~ +Symbol start addresses which are delta-encoded. + +Sizes +~~~~~ +The number of bytes this symbol takes up. + +Path indicies +~~~~~~~~~~~~~ +Indicies that reference paths in the prior Path list section. Delta-encoded. + +Component indicies +~~~~~~~~~~~~~~~~~~ +Indicies that reference components in the prior Component list section. +Delta-encoded. + +Symbols +------- +The final section contains details info on each symbol. Each line represents +a single symbol. Values are tab seperated and follow this format: +symbol.full_name, symbol.num_aliases, symbol.flags +|num_aliases| will be omitted if the aliases of the symbol are the same as the +previous line. |flags| will be omitted if there are no flags. +""" import cStringIO -import calendar import contextlib -import datetime import gzip +import itertools import json import logging import os @@ -30,11 +94,20 @@ def _LogSize(file_obj, desc): def _SaveSizeInfoToFile(size_info, file_obj): + """Saves size info to a .size file. + + Args: + size_info: Data to write to the file + file_object: File opened for writing + """ + # Created by supersize header file_obj.write('# Created by //tools/binary_size\n') file_obj.write('%s\n' % _SERIALIZATION_VERSION) + # JSON metadata headers = { 'metadata': size_info.metadata, 'section_sizes': size_info.section_sizes, + 'has_components': True, } metadata_str = json.dumps(headers, file_obj, indent=2, sort_keys=True) file_obj.write('%d\n' % len(metadata_str)) @@ -45,19 +118,33 @@ def _SaveSizeInfoToFile(size_info, file_obj): # Store a single copy of all paths and have them referenced by index. unique_path_tuples = sorted(set( (s.object_path, s.source_path) for s in size_info.raw_symbols)) - path_tuples = dict.fromkeys(unique_path_tuples) - for i, tup in enumerate(unique_path_tuples): - path_tuples[tup] = i + path_tuples = {tup: i for i, tup in enumerate(unique_path_tuples)} file_obj.write('%d\n' % len(unique_path_tuples)) file_obj.writelines('%s\t%s\n' % pair for pair in unique_path_tuples) _LogSize(file_obj, 'paths') # For libchrome, adds 200kb. + # Store a single copy of all components and have them referenced by index. + unique_components = sorted(set(s.component for s in size_info.raw_symbols)) + components = {comp: i for i, comp in enumerate(unique_components)} + file_obj.write('%d\n' % len(unique_components)) + file_obj.writelines('%s\n' % comp for comp in unique_components) + _LogSize(file_obj, 'components') + # Symbol counts by section. by_section = size_info.raw_symbols.GroupedBySectionName() file_obj.write('%s\n' % '\t'.join(g.name for g in by_section)) file_obj.write('%s\n' % '\t'.join(str(len(g)) for g in by_section)) + # Addresses, sizes, path indicies, component indicies def write_numeric(func, delta=False): + """Write the result of func(symbol) for each symbol in each symbol group. + + Each line written represents one symbol group in |by_section|. + The values in each line are space seperated and are the result of calling + |func| with the Nth symbol in the group. + + If |delta| is True, the differences in values are written instead. + """ for group in by_section: prev_value = 0 last_sym = group[-1] @@ -79,6 +166,8 @@ def _SaveSizeInfoToFile(size_info, file_obj): write_numeric(lambda s: path_tuples[(s.object_path, s.source_path)], delta=True) _LogSize(file_obj, 'path indices') # For libchrome: adds 125kb. + write_numeric(lambda s: components[s.component], delta=True) + _LogSize(file_obj, 'component indices') prev_aliases = None for group in by_section: @@ -93,82 +182,145 @@ def _SaveSizeInfoToFile(size_info, file_obj): _LogSize(file_obj, 'names (final)') # For libchrome: adds 3.5mb. +def _ReadLine(file_iter): + """Read a line from a file object iterator and remove the newline character. + + Args: + file_iter: File object iterator + + Returns: + String + """ + # str[:-1] removes the last character from a string, specifically the newline + return next(file_iter)[:-1] + + +def _ReadValuesFromLine(file_iter, split): + """Read a list of values from a line in a file object iterator. + + Args: + file_iter: File object iterator + split: Splits the line with the given string + + Returns: + List of string values + """ + return _ReadLine(file_iter).split(split) + + def _LoadSizeInfoFromFile(file_obj, size_path): - """Loads a size_info from the given file.""" + """Loads a size_info from the given file. + + See _SaveSizeInfoToFile for details on the .size file format. + + Args: + file_obj: File to read, should be a GzipFile + """ lines = iter(file_obj) - next(lines) # Comment line. - actual_version = next(lines)[:-1] + _ReadLine(lines) # Line 0: Created by supersize header + actual_version = _ReadLine(lines) assert actual_version == _SERIALIZATION_VERSION, ( 'Version mismatch. Need to write some upgrade code.') - json_len = int(next(lines)) + # JSON metadata + json_len = int(_ReadLine(lines)) json_str = file_obj.read(json_len) + headers = json.loads(json_str) section_sizes = headers['section_sizes'] metadata = headers.get('metadata') + has_components = headers.get('has_components', False) lines = iter(file_obj) - next(lines) # newline after closing } of json. + _ReadLine(lines) + + # Path list + num_path_tuples = int(_ReadLine(lines)) # Line 4 - number of paths in list + # Read the path list values and store for later + path_tuples = [_ReadValuesFromLine(lines, split='\t') + for _ in xrange(num_path_tuples)] - num_path_tuples = int(next(lines)) - path_tuples = [None] * num_path_tuples - for i in xrange(num_path_tuples): - path_tuples[i] = next(lines)[:-1].split('\t') + # Component list + if has_components: + num_components = int(_ReadLine(lines)) # number of components in list + components = [_ReadLine(lines) for _ in xrange(num_components)] - section_names = next(lines)[:-1].split('\t') - section_counts = [int(c) for c in next(lines)[:-1].split('\t')] + # Symbol counts by section. + section_names = _ReadValuesFromLine(lines, split='\t') + section_counts = [int(c) for c in _ReadValuesFromLine(lines, split='\t')] + # Addresses, sizes, path indicies, component indicies def read_numeric(delta=False): + """Read numeric values, where each line corresponds to a symbol group. + + The values in each line are space seperated. + If |delta| is True, the numbers are read as a value to add to the sum of the + prior values in the line, or as the amount to change by. + """ ret = [] delta_multiplier = int(delta) for _ in section_counts: value = 0 - fields = next(lines).split(' ') - for i, f in enumerate(fields): + fields = [] + for f in _ReadValuesFromLine(lines, split=' '): value = value * delta_multiplier + int(f) - fields[i] = value + fields.append(value) ret.append(fields) return ret addresses = read_numeric(delta=True) sizes = read_numeric(delta=False) path_indices = read_numeric(delta=True) + if has_components: + component_indices = read_numeric(delta=True) + else: + component_indices = [None] * len(section_names) raw_symbols = [None] * sum(section_counts) symbol_idx = 0 - for section_index, cur_section_name in enumerate(section_names): + for (cur_section_name, cur_section_count, cur_addresses, cur_sizes, + cur_path_indicies, cur_component_indices) in itertools.izip( + section_names, section_counts, addresses, sizes, path_indices, + component_indices): alias_counter = 0 - for i in xrange(section_counts[section_index]): - parts = next(lines)[:-1].split('\t') + for i in xrange(cur_section_count): + parts = _ReadValuesFromLine(lines, split='\t') + full_name = parts[0] flags_part = None aliases_part = None + # aliases_part or flags_part may have been omitted. if len(parts) == 3: + # full_name aliases_part flags_part aliases_part = parts[1] flags_part = parts[2] elif len(parts) == 2: if parts[1][0] == '0': + # full_name aliases_part aliases_part = parts[1] else: + # full_name flags_part flags_part = parts[1] - full_name = parts[0] # Use a bit less RAM by using the same instance for this common string. if full_name == models.STRING_LITERAL_NAME: full_name = models.STRING_LITERAL_NAME flags = int(flags_part, 16) if flags_part else 0 num_aliases = int(aliases_part, 16) if aliases_part else 0 + # Skip the constructor to avoid default value checks new_sym = models.Symbol.__new__(models.Symbol) new_sym.section_name = cur_section_name - new_sym.address = addresses[section_index][i] - new_sym.size = sizes[section_index][i] new_sym.full_name = full_name - paths = path_tuples[path_indices[section_index][i]] - new_sym.object_path = paths[0] - new_sym.source_path = paths[1] + new_sym.address = cur_addresses[i] + new_sym.size = cur_sizes[i] + paths = path_tuples[cur_path_indicies[i]] + new_sym.object_path, new_sym.source_path = paths + component = components[cur_component_indices[i]] if has_components else '' + new_sym.component = component new_sym.flags = flags - new_sym.padding = 0 # Derived - new_sym.template_name = '' # Derived - new_sym.name = '' # Derived + # Derived + new_sym.padding = 0 + new_sym.template_name = '' + new_sym.name = '' if num_aliases: assert alias_counter == 0 diff --git a/chromium/tools/binary_size/libsupersize/html_report.py b/chromium/tools/binary_size/libsupersize/html_report.py index c47e354b784..fe8c3dd090e 100644 --- a/chromium/tools/binary_size/libsupersize/html_report.py +++ b/chromium/tools/binary_size/libsupersize/html_report.py @@ -4,227 +4,247 @@ """Creates an html report that allows you to view binary size by component.""" -import argparse +import codecs +import collections import json import logging import os -import shutil -import sys import archive -import path_util - - -# Node dictionary keys. These are output in json read by the webapp so -# keep them short to save file size. -# Note: If these change, the webapp must also change. -_METHOD_COUNT_MODE_KEY = 'methodCountMode' -_NODE_TYPE_KEY = 'k' -_NODE_TYPE_BUCKET = 'b' -_NODE_TYPE_PATH = 'p' -_NODE_TYPE_SYMBOL = 's' -_NODE_NAME_KEY = 'n' -_NODE_CHILDREN_KEY = 'children' -_NODE_SYMBOL_TYPE_KEY = 't' -_NODE_SYMBOL_TYPE_VTABLE = 'v' -_NODE_SYMBOL_TYPE_GENERATED = '*' -_NODE_SYMBOL_SIZE_KEY = 'value' -_NODE_MAX_DEPTH_KEY = 'maxDepth' -_NODE_LAST_PATH_ELEMENT_KEY = 'lastPathElement' - -# The display name of the bucket where we put symbols without path. -_NAME_NO_PATH_BUCKET = '(No Path)' - -# Try to keep data buckets smaller than this to avoid killing the -# graphing lib. -_BIG_BUCKET_LIMIT = 3000 - - -def _GetOrMakeChildNode(node, node_type, name): - child = node[_NODE_CHILDREN_KEY].get(name) - if child is None: - child = { - _NODE_TYPE_KEY: node_type, - _NODE_NAME_KEY: name, +import diff +import models + + +_SYMBOL_TYPE_VTABLE = 'v' +_SYMBOL_TYPE_GENERATED = '*' +_SYMBOL_TYPE_DEX_METHOD = 'm' +_SYMBOL_TYPE_OTHER = 'o' + +_COMPACT_FILE_PATH_KEY = 'p' +_COMPACT_FILE_COMPONENT_INDEX_KEY = 'c' +_COMPACT_FILE_SYMBOLS_KEY = 's' +_COMPACT_SYMBOL_NAME_KEY = 'n' +_COMPACT_SYMBOL_BYTE_SIZE_KEY = 'b' +_COMPACT_SYMBOL_TYPE_KEY = 't' +_COMPACT_SYMBOL_COUNT_KEY = 'u' + +_SMALL_SYMBOL_DESCRIPTIONS = { + 'b': 'Other small uninitialized data', + 'd': 'Other small initialized data', + 'r': 'Other small readonly data', + 't': 'Other small code', + 'v': 'Other small vtable entries', + '*': 'Other small generated symbols', + 'x': 'Other small dex non-method entries', + 'm': 'Other small dex methods', + 'p': 'Other small locale pak entries', + 'P': 'Other small non-locale pak entries', + 'o': 'Other small entries', +} + +_DEFAULT_SYMBOL_COUNT = 250000 + + +def _GetSymbolType(symbol): + symbol_type = symbol.section + if symbol.name.endswith('[vtable]'): + symbol_type = _SYMBOL_TYPE_VTABLE + elif symbol.name.endswith(']'): + symbol_type = _SYMBOL_TYPE_GENERATED + if symbol_type not in _SMALL_SYMBOL_DESCRIPTIONS: + symbol_type = _SYMBOL_TYPE_OTHER + return symbol_type + + +def _GetOrAddFileNode(symbol, file_nodes, components): + path = symbol.source_path or symbol.object_path + file_node = file_nodes.get(path) + if file_node is None: + component_index = components.GetOrAdd(symbol.component) + file_node = { + _COMPACT_FILE_PATH_KEY: path, + _COMPACT_FILE_COMPONENT_INDEX_KEY: component_index, + _COMPACT_FILE_SYMBOLS_KEY: [], } - if node_type != _NODE_TYPE_SYMBOL: - child[_NODE_CHILDREN_KEY] = {} - node[_NODE_CHILDREN_KEY][name] = child - else: - assert child[_NODE_TYPE_KEY] == node_type - return child - - -def _SplitLargeBucket(bucket): - """Split the given node into sub-buckets when it's too big.""" - old_children = bucket[_NODE_CHILDREN_KEY] - count = 0 - for symbol_type, symbol_bucket in old_children.iteritems(): - count += len(symbol_bucket[_NODE_CHILDREN_KEY]) - if count > _BIG_BUCKET_LIMIT: - new_children = {} - bucket[_NODE_CHILDREN_KEY] = new_children - current_bucket = None - index = 0 - for symbol_type, symbol_bucket in old_children.iteritems(): - for symbol_name, value in symbol_bucket[_NODE_CHILDREN_KEY].iteritems(): - if index % _BIG_BUCKET_LIMIT == 0: - group_no = (index / _BIG_BUCKET_LIMIT) + 1 - node_name = '%s subgroup %d' % (_NAME_NO_PATH_BUCKET, group_no) - current_bucket = _GetOrMakeChildNode( - bucket, _NODE_TYPE_PATH, node_name) - index += 1 - symbol_size = value[_NODE_SYMBOL_SIZE_KEY] - _AddSymbolIntoFileNode(current_bucket, symbol_type, symbol_name, - symbol_size, True) - - -def _MakeChildrenDictsIntoLists(node): - """Recursively converts all children from dicts -> lists.""" - children = node.get(_NODE_CHILDREN_KEY) - if children: - children = children.values() # Convert dict -> list. - node[_NODE_CHILDREN_KEY] = children - for child in children: - _MakeChildrenDictsIntoLists(child) - if len(children) > _BIG_BUCKET_LIMIT: - logging.warning('Bucket found with %d entries. Might be unusable.', - len(children)) - - -def _CombineSingleChildNodes(node): - """Collapse "java"->"com"->"google" into ."java/com/google".""" - children = node.get(_NODE_CHILDREN_KEY) - if children: - child = children[0] - if len(children) == 1 and node[_NODE_TYPE_KEY] == child[_NODE_TYPE_KEY]: - node[_NODE_NAME_KEY] = '{}/{}'.format( - node[_NODE_NAME_KEY], child[_NODE_NAME_KEY]) - node[_NODE_CHILDREN_KEY] = child[_NODE_CHILDREN_KEY] - _CombineSingleChildNodes(node) - else: - for child in children: - _CombineSingleChildNodes(child) - - -def _AddSymbolIntoFileNode(node, symbol_type, symbol_name, symbol_size, - min_symbol_size): - """Puts symbol into the file path node |node|.""" - node[_NODE_LAST_PATH_ELEMENT_KEY] = True - # Don't bother with buckets when not including symbols. - if min_symbol_size == 0: - node = _GetOrMakeChildNode(node, _NODE_TYPE_BUCKET, symbol_type) - node[_NODE_SYMBOL_TYPE_KEY] = symbol_type - - # 'node' is now the symbol-type bucket. Make the child entry. - if not symbol_name or symbol_size >= min_symbol_size: - node_name = symbol_name or '[Anonymous]' - elif symbol_name.startswith('*'): - node_name = symbol_name + file_nodes[path] = file_node + return file_node + + +class IndexedSet(object): + """Set-like object where values are unique and indexed. + + Values must be immutable. + """ + + def __init__(self): + self._index_dict = {} # Value -> Index dict + self.value_list = [] # List containing all the set items + + def GetOrAdd(self, value): + """Get the index of the value in the list. Append it if not yet present.""" + index = self._index_dict.get(value) + if index is None: + self.value_list.append(value) + index = len(self.value_list) - 1 + self._index_dict[value] = index + return index + + +def _MakeTreeViewList(symbols, include_all_symbols): + """Builds JSON data of the symbols for the tree view HTML report. + + As the tree is built on the client-side, this function creates a flat list + of files, where each file object contains symbols that have the same path. + + Args: + symbols: A SymbolGroup containing all symbols. + include_all_symbols: If true, include all symbols in the data file. + """ + file_nodes = {} + components = IndexedSet() + + # Build a container for symbols smaller than min_symbol_size + small_symbols = collections.defaultdict(dict) + + # Dex methods (type "m") are whitelisted for the method_count mode on the + # UI. It's important to see details on all the methods. + dex_symbols = symbols.WhereIsDex() + ordered_symbols = dex_symbols.Inverted().Sorted() + if include_all_symbols: + symbol_count = len(ordered_symbols) else: - node_name = symbol_type - node = _GetOrMakeChildNode(node, _NODE_TYPE_SYMBOL, node_name) - node[_NODE_SYMBOL_SIZE_KEY] = node.get(_NODE_SYMBOL_SIZE_KEY, 0) + symbol_size - node[_NODE_SYMBOL_TYPE_KEY] = symbol_type - - -def _MakeCompactTree(symbols, min_symbol_size, method_count_mode): - if method_count_mode: - # Include all symbols and avoid bucket nodes. - min_symbol_size = -1 - result = { - _NODE_NAME_KEY: '/', - _NODE_CHILDREN_KEY: {}, - _NODE_TYPE_KEY: 'p', - _NODE_MAX_DEPTH_KEY: 0, - _METHOD_COUNT_MODE_KEY: bool(method_count_mode), + symbol_count = max(_DEFAULT_SYMBOL_COUNT - len(dex_symbols), 0) + + main_symbols = dex_symbols + ordered_symbols[:symbol_count] + extra_symbols = ordered_symbols[symbol_count:] + + # Bundle symbols by the file they belong to, + # and add all the file buckets into file_nodes + for symbol in main_symbols: + symbol_type = _GetSymbolType(symbol) + symbol_size = round(symbol.pss, 2) + if symbol_size.is_integer(): + symbol_size = int(symbol_size) + symbol_count = 1 + if symbol.IsDelta() and symbol.diff_status == models.DIFF_STATUS_REMOVED: + symbol_count = -1 + + file_node = _GetOrAddFileNode(symbol, file_nodes, components) + + is_dex_method = symbol_type == _SYMBOL_TYPE_DEX_METHOD + symbol_entry = { + _COMPACT_SYMBOL_NAME_KEY: symbol.template_name, + _COMPACT_SYMBOL_TYPE_KEY: symbol_type, + _COMPACT_SYMBOL_BYTE_SIZE_KEY: symbol_size, + } + # We use symbol count for the method count mode in the diff mode report. + # Negative values are used to indicate a symbol was removed, so it should + # count as -1 rather than the default, 1. + # We don't care about accurate counts for other symbol types currently, + # so this data is only included for methods. + if is_dex_method and symbol_count != 1: + symbol_entry[_COMPACT_SYMBOL_COUNT_KEY] = symbol_count + file_node[_COMPACT_FILE_SYMBOLS_KEY].append(symbol_entry) + + for symbol in extra_symbols: + symbol_type = _GetSymbolType(symbol) + + file_node = _GetOrAddFileNode(symbol, file_nodes, components) + path = file_node[_COMPACT_FILE_PATH_KEY] + + small_type_symbol = small_symbols[path].get(symbol_type) + if small_type_symbol is None: + small_type_symbol = { + _COMPACT_SYMBOL_NAME_KEY: _SMALL_SYMBOL_DESCRIPTIONS[symbol_type], + _COMPACT_SYMBOL_TYPE_KEY: symbol_type, + _COMPACT_SYMBOL_BYTE_SIZE_KEY: 0, + } + small_symbols[path][symbol_type] = small_type_symbol + file_node[_COMPACT_FILE_SYMBOLS_KEY].append(small_type_symbol) + + small_type_symbol[_COMPACT_SYMBOL_BYTE_SIZE_KEY] += symbol.pss + + meta = { + 'components': components.value_list, + 'total': symbols.pss, } - for symbol in symbols: - file_path = symbol.source_path or symbol.object_path or _NAME_NO_PATH_BUCKET - node = result - depth = 0 - for path_part in file_path.split(os.path.sep): - if not path_part: - continue - depth += 1 - node = _GetOrMakeChildNode(node, _NODE_TYPE_PATH, path_part) - - symbol_type = symbol.section - if symbol.name.endswith('[vtable]'): - symbol_type = _NODE_SYMBOL_TYPE_VTABLE - elif symbol.name.endswith(']'): - symbol_type = _NODE_SYMBOL_TYPE_GENERATED - symbol_size = 1 if method_count_mode else symbol.pss - _AddSymbolIntoFileNode(node, symbol_type, symbol.template_name, symbol_size, - min_symbol_size) - depth += 2 - result[_NODE_MAX_DEPTH_KEY] = max(result[_NODE_MAX_DEPTH_KEY], depth) - - # The (no path) bucket can be extremely large if we failed to get - # path information. Split it into subgroups if needed. - no_path_bucket = result[_NODE_CHILDREN_KEY].get(_NAME_NO_PATH_BUCKET) - if no_path_bucket and min_symbol_size == 0: - _SplitLargeBucket(no_path_bucket) - - _MakeChildrenDictsIntoLists(result) - _CombineSingleChildNodes(result) - - return result - - -def _CopyTemplateFiles(dest_dir): - d3_out = os.path.join(dest_dir, 'd3') - if not os.path.exists(d3_out): - os.makedirs(d3_out, 0755) - d3_src = os.path.join(path_util.SRC_ROOT, 'third_party', 'd3', 'src') - template_src = os.path.join(os.path.dirname(__file__), 'template') - shutil.copy(os.path.join(d3_src, 'LICENSE'), d3_out) - shutil.copy(os.path.join(d3_src, 'd3.js'), d3_out) - shutil.copy(os.path.join(template_src, 'index.html'), dest_dir) - shutil.copy(os.path.join(template_src, 'D3SymbolTreeMap.js'), dest_dir) + return meta, file_nodes.values() + + +def _MakeDirIfDoesNotExist(rel_path): + """Ensures a directory exists.""" + abs_path = os.path.abspath(rel_path) + try: + os.makedirs(abs_path) + except OSError: + if not os.path.isdir(abs_path): + raise def AddArguments(parser): parser.add_argument('input_file', help='Path to input .size file.') - parser.add_argument('--report-dir', metavar='PATH', required=True, - help='Write output to the specified directory. An HTML ' - 'report is generated here.') - parser.add_argument('--include-bss', action='store_true', - help='Include symbols from .bss (which consume no real ' - 'space)') - parser.add_argument('--min-symbol-size', type=float, default=1024, - help='Minimum size (PSS) for a symbol to be included as ' - 'an independent node.') - parser.add_argument('--method-count', action='store_true', - help='Show dex method count rather than size') + parser.add_argument('--report-file', metavar='PATH', required=True, + help='Write generated data to the specified ' + '.ndjson file.') + parser.add_argument('--all-symbols', action='store_true', + help='Include all symbols. Will cause the data file to ' + 'take longer to load.') + parser.add_argument('--diff-with', + help='Diffs the input_file against an older .size file') def Run(args, parser): if not args.input_file.endswith('.size'): parser.error('Input must end with ".size"') + if args.diff_with and not args.diff_with.endswith('.size'): + parser.error('Diff input must end with ".size"') + if not args.report_file.endswith('.ndjson'): + parser.error('Output must end with ".ndjson"') logging.info('Reading .size file') size_info = archive.LoadAndPostProcessSizeInfo(args.input_file) - symbols = size_info.raw_symbols - if args.method_count: - symbols = symbols.WhereInSection('m') - elif not args.include_bss: - symbols = symbols.WhereInSection('b').Inverted() - - # Copy report boilerplate into output directory. This also proves that the - # output directory is safe for writing, so there should be no problems writing - # the nm.out file later. - _CopyTemplateFiles(args.report_dir) + if args.diff_with: + before_size_info = archive.LoadAndPostProcessSizeInfo(args.diff_with) + after_size_info = size_info + size_info = diff.Diff(before_size_info, after_size_info) + symbols = size_info.raw_symbols + symbols = symbols.WhereDiffStatusIs(models.DIFF_STATUS_UNCHANGED).Inverted() + else: + symbols = size_info.raw_symbols logging.info('Creating JSON objects') - tree_root = _MakeCompactTree(symbols, args.min_symbol_size, args.method_count) + meta, tree_nodes = _MakeTreeViewList(symbols, args.all_symbols) + meta.update({ + 'diff_mode': bool(args.diff_with), + 'section_sizes': size_info.section_sizes, + }) + if args.diff_with: + meta.update({ + 'before_metadata': size_info.before.metadata, + 'after_metadata': size_info.after.metadata, + }) + else: + meta['metadata'] = size_info.metadata logging.info('Serializing JSON') - with open(os.path.join(args.report_dir, 'data.js'), 'w') as out_file: - out_file.write('var tree_data=') + # Write newline-delimited JSON file + with codecs.open(args.report_file, 'w', encoding='ascii') as out_file: # Use separators without whitespace to get a smaller file. - json.dump(tree_root, out_file, ensure_ascii=False, check_circular=False, - separators=(',', ':')) + json_dump_args = { + 'separators': (',', ':'), + 'ensure_ascii': True, + 'check_circular': False, + } + + json.dump(meta, out_file, **json_dump_args) + out_file.write('\n') + + for tree_node in tree_nodes: + json.dump(tree_node, out_file, **json_dump_args) + out_file.write('\n') - logging.warning('Report saved to %s/index.html', args.report_dir) + logging.warning('Report saved to %s', args.report_file) + logging.warning('Open server by running: \n' + 'tools/binary_size/supersize start_server %s', + args.report_file) diff --git a/chromium/tools/binary_size/libsupersize/integration_test.py b/chromium/tools/binary_size/libsupersize/integration_test.py index 8fd8e64f53d..01c6343ed23 100755 --- a/chromium/tools/binary_size/libsupersize/integration_test.py +++ b/chromium/tools/binary_size/libsupersize/integration_test.py @@ -27,7 +27,8 @@ import models _SCRIPT_DIR = os.path.dirname(__file__) _TEST_DATA_DIR = os.path.join(_SCRIPT_DIR, 'testdata') -_TEST_OUTPUT_DIR = os.path.join(_TEST_DATA_DIR, 'mock_output_directory') +_TEST_SOURCE_DIR = os.path.join(_TEST_DATA_DIR, 'mock_source_directory') +_TEST_OUTPUT_DIR = os.path.join(_TEST_SOURCE_DIR, 'out', 'Release') _TEST_TOOL_PREFIX = os.path.join( os.path.abspath(_TEST_DATA_DIR), 'mock_toolchain', '') _TEST_APK_ROOT_DIR = os.path.join(_TEST_DATA_DIR, 'mock_apk') @@ -170,6 +171,7 @@ class IntegrationTest(unittest.TestCase): # Override for testing. Lower the bar for compacting symbols, to allow # smaller test cases to be created. knobs.max_same_name_alias_count = 3 + knobs.src_root = _TEST_SOURCE_DIR apk_path = None apk_so_path = None if use_apk: @@ -181,23 +183,29 @@ class IntegrationTest(unittest.TestCase): pak_files = [_TEST_APK_PAK_PATH] pak_info_file = _TEST_PAK_INFO_PATH metadata = None + linker_name = 'gold' if use_elf: with _AddMocksToPath(): metadata = archive.CreateMetadata( _TEST_MAP_PATH, elf_path, apk_path, _TEST_TOOL_PREFIX, - output_directory) + output_directory, linker_name) section_sizes, raw_symbols = archive.CreateSectionSizesAndSymbols( map_path=_TEST_MAP_PATH, tool_prefix=_TEST_TOOL_PREFIX, elf_path=elf_path, output_directory=output_directory, apk_path=apk_path, apk_so_path=apk_so_path, metadata=metadata, - pak_files=pak_files, pak_info_file=pak_info_file, knobs=knobs) + pak_files=pak_files, pak_info_file=pak_info_file, + linker_name=linker_name, knobs=knobs) IntegrationTest.cached_size_info[cache_key] = archive.CreateSizeInfo( section_sizes, raw_symbols, metadata=metadata) return copy.deepcopy(IntegrationTest.cached_size_info[cache_key]) def _DoArchive(self, archive_path, use_output_directory=True, use_elf=True, use_apk=False, use_pak=False, debug_measures=False): - args = [archive_path, '--map-file', _TEST_MAP_PATH] + args = [ + archive_path, + '--map-file', _TEST_MAP_PATH, + '--source-directory', _TEST_SOURCE_DIR, + ] if use_output_directory: # Let autodetection find output_directory when --elf-file is used. if not use_elf: @@ -322,67 +330,105 @@ class IntegrationTest(unittest.TestCase): size_info2 = self._CloneSizeInfo(use_elf=False) size_info1.metadata = {"foo": 1, "bar": [1,2,3], "baz": "yes"} size_info2.metadata = {"foo": 1, "bar": [1,3], "baz": "yes"} - size_info1.symbols -= size_info1.symbols[:2] - size_info2.symbols -= size_info2.symbols[-3:] - size_info1.symbols[1].size -= 10 + + size_info1.raw_symbols -= size_info1.raw_symbols[:2] + size_info2.raw_symbols -= size_info2.raw_symbols[-3:] + changed_sym = size_info1.raw_symbols.WhereNameMatches('Patcher::Name_')[0] + changed_sym.size -= 10 + padding_sym = size_info2.raw_symbols.WhereNameMatches('symbol gap 0')[0] + padding_sym.padding += 20 + padding_sym.size += 20 d = diff.Diff(size_info1, size_info2) - d.symbols = d.symbols.Sorted() + d.raw_symbols = d.raw_symbols.Sorted() + self.assertEquals(d.raw_symbols.CountsByDiffStatus()[1:], [2, 2, 3]) + changed_sym = d.raw_symbols.WhereNameMatches('Patcher::Name_')[0] + padding_sym = d.raw_symbols.WhereNameMatches('symbol gap 0')[0] + # Padding-only deltas should sort after all non-padding changes. + padding_idx = d.raw_symbols.index(padding_sym) + self.assertLess(d.raw_symbols.index(changed_sym), padding_idx) + # And before bss. + self.assertTrue(d.raw_symbols[padding_idx + 1].IsBss()) + return describe.GenerateLines(d, verbose=True) def test_Diff_Aliases1(self): size_info1 = self._CloneSizeInfo() size_info2 = self._CloneSizeInfo() - # Removing 1 alias should not change the size. + # Find a list of exact 4 symbols with the same aliases in |size_info2|: + # text@2a0010: BarAlias() + # text@2a0010: FooAlias() + # text@2a0010: blink::ContiguousContainerBase::shrinkToFit() @ path1 + # text@2a0010: blink::ContiguousContainerBase::shrinkToFit() @ path2 + # The blink::...::shrinkToFit() group has another member: + # text@2a0000: blink::ContiguousContainerBase::shrinkToFit() @ path3 a1, _, _, _ = ( size_info2.raw_symbols.Filter(lambda s: s.num_aliases == 4)[0].aliases) + # Remove FooAlias(). size_info2.raw_symbols -= [a1] a1.aliases.remove(a1) + + # From |size_info1| -> |size_info2|: 1 symbol is deleted. d = diff.Diff(size_info1, size_info2) + # Total size should not change. self.assertEquals(d.raw_symbols.pss, 0) - self.assertEquals((0, 0, 1), _DiffCounts(d.raw_symbols)) - self.assertEquals((0, 0, 1), _DiffCounts(d.symbols.GroupedByFullName())) - - # Adding one alias should not change size. + # 1 symbol is erased, and PSS distributed among 3 remaining aliases, and + # considered as change. + self.assertEquals((3, 0, 1), _DiffCounts(d.raw_symbols)) + # Grouping combines 2 x blink::ContiguousContainerBase::shrinkToFit(), so + # now ther are 2 changed aliases. + self.assertEquals((2, 0, 1), _DiffCounts(d.symbols.GroupedByFullName())) + + # From |size_info2| -> |size_info1|: 1 symbol is added. d = diff.Diff(size_info2, size_info1) self.assertEquals(d.raw_symbols.pss, 0) - self.assertEquals((0, 1, 0), _DiffCounts(d.raw_symbols)) - self.assertEquals((0, 1, 0), _DiffCounts(d.symbols.GroupedByFullName())) + self.assertEquals((3, 1, 0), _DiffCounts(d.raw_symbols)) + self.assertEquals((2, 1, 0), _DiffCounts(d.symbols.GroupedByFullName())) def test_Diff_Aliases2(self): size_info1 = self._CloneSizeInfo() size_info2 = self._CloneSizeInfo() - # Removing 2 aliases should not change the size. + # Same list of 4 symbols as before. a1, _, a2, _ = ( size_info2.raw_symbols.Filter(lambda s: s.num_aliases == 4)[0].aliases) + # Remove BarAlias() and blink::...::shrinkToFit(). size_info2.raw_symbols -= [a1, a2] a1.aliases.remove(a1) a1.aliases.remove(a2) + + # From |size_info1| -> |size_info2|: 2 symbols are deleted. d = diff.Diff(size_info1, size_info2) self.assertEquals(d.raw_symbols.pss, 0) - self.assertEquals((0, 0, 2), _DiffCounts(d.raw_symbols)) - self.assertEquals((1, 0, 1), _DiffCounts(d.symbols.GroupedByFullName())) + self.assertEquals((2, 0, 2), _DiffCounts(d.raw_symbols)) + self.assertEquals((2, 0, 1), _DiffCounts(d.symbols.GroupedByFullName())) - # Adding 2 aliases should not change size. + # From |size_info2| -> |size_info1|: 2 symbols are added. d = diff.Diff(size_info2, size_info1) self.assertEquals(d.raw_symbols.pss, 0) - self.assertEquals((0, 2, 0), _DiffCounts(d.raw_symbols)) - self.assertEquals((1, 1, 0), _DiffCounts(d.symbols.GroupedByFullName())) + self.assertEquals((2, 2, 0), _DiffCounts(d.raw_symbols)) + self.assertEquals((2, 1, 0), _DiffCounts(d.symbols.GroupedByFullName())) def test_Diff_Aliases4(self): size_info1 = self._CloneSizeInfo() size_info2 = self._CloneSizeInfo() - # Removing all 4 aliases should change the size. + # Same list of 4 symbols as before. a1, a2, a3, a4 = ( size_info2.raw_symbols.Filter(lambda s: s.num_aliases == 4)[0].aliases) + + # Remove all 4 aliases. size_info2.raw_symbols -= [a1, a2, a3, a4] + + # From |size_info1| -> |size_info2|: 4 symbols are deleted. d = diff.Diff(size_info1, size_info2) + self.assertEquals(d.raw_symbols.pss, -a1.size) self.assertEquals((0, 0, 4), _DiffCounts(d.raw_symbols)) + # When grouped, BarAlias() and FooAlias() are deleted, but the + # blink::...::shrinkToFit() has 1 remaining symbol, so is changed. self.assertEquals((1, 0, 2), _DiffCounts(d.symbols.GroupedByFullName())) - # Adding all 4 aliases should change size. + # From |size_info2| -> |size_info1|: 4 symbols are added. d = diff.Diff(size_info2, size_info1) self.assertEquals(d.raw_symbols.pss, a1.size) self.assertEquals((0, 4, 0), _DiffCounts(d.raw_symbols)) diff --git a/chromium/tools/binary_size/libsupersize/linker_map_parser.py b/chromium/tools/binary_size/libsupersize/linker_map_parser.py index edd0f91a751..679b90124c4 100644 --- a/chromium/tools/binary_size/libsupersize/linker_map_parser.py +++ b/chromium/tools/binary_size/libsupersize/linker_map_parser.py @@ -287,6 +287,7 @@ class MapFileParserLld(object): _LINE_RE_V0 = re.compile(r'([0-9a-f]+)\s+([0-9a-f]+)\s+(\d+) ( *)(.*)') _LINE_RE_V1 = re.compile( r'\s*[0-9a-f]+\s+([0-9a-f]+)\s+([0-9a-f]+)\s+(\d+) ( *)(.*)') + _LINE_RE = [_LINE_RE_V0, _LINE_RE_V1] def __init__(self, linker_name): self._linker_name = linker_name @@ -325,13 +326,13 @@ class MapFileParserLld(object): # 00000000002010c0 0000000000000000 0 frame_dummy # 00000000002010ed 0000000000000071 1 a.o:(.text) # 00000000002010ed 0000000000000071 0 main + # Extract e.g., 'lld_v0' -> 0, or 'lld-lto_v1' -> 1. + map_file_version = int(self._linker_name.split('_v')[1]) + pattern = MapFileParserLld._LINE_RE[map_file_version] + sym_maker = _SymbolMaker() cur_section = None cur_section_is_useful = None - if self._linker_name.endswith('v1'): - pattern = self._LINE_RE_V1 - else: - pattern = self._LINE_RE_V0 for line in lines: m = pattern.match(line) @@ -391,19 +392,58 @@ class MapFileParserLld(object): return self._section_sizes, sym_maker.syms -def DetectLinkerNameFromMapFileHeader(first_line): +def _DetectLto(lines): + """Scans LLD linker map file and returns whether LTO was used.""" + # It's assumed that the first line in |lines| was consumed to determine that + # LLD was used. Seek 'thinlto-cache' prefix within an "indicator section" as + # indicator for LTO. + found_indicator_section = False + # Potential names of "main section". Only one gets used. + indicator_section_set = set(['.rodata', '.ARM.exidx']) + start_pos = -1 + for line in lines: + # Shortcut to avoid regex: The first line seen (second line in file) should + # start a section, and start with '.', e.g.: + # 194 194 13 1 .interp + # Assign |start_pos| as position of '.', and trim everything before! + if start_pos < 0: + start_pos = line.index('.') + if len(line) < start_pos: + continue + line = line[start_pos:] + tok = line.lstrip() # Allow whitespace at right. + indent_size = len(line) - len(tok) + if indent_size == 0: # Section change. + if found_indicator_section: # Exit if just visited "main section". + break + if tok.strip() in indicator_section_set: + found_indicator_section = True + elif indent_size == 8: + if found_indicator_section: + if tok.startswith('thinlto-cache'): + return True + return False + + +def DetectLinkerNameFromMapFile(lines): + """Scans linker map file, and returns a coded linker name.""" + first_line = next(lines) + if first_line.startswith('Address'): - return 'lld_v0' - elif first_line.lstrip().startswith('VMA'): - return 'lld_v1' + return 'lld-lto_v0' if _DetectLto(lines) else 'lld_v0' + + if first_line.lstrip().startswith('VMA'): + return 'lld-lto_v1' if _DetectLto(lines) else 'lld_v1' + if first_line.startswith('Archive member'): return 'gold' + raise Exception('Invalid map file: ' + first_line) class MapFileParser(object): """Parses a linker map file, with heuristic linker detection.""" - def Parse(self, lines): + def Parse(self, linker_name, lines): """Parses a linker map file. Args: @@ -412,15 +452,18 @@ class MapFileParser(object): Returns: A tuple of (section_sizes, symbols). """ - linker_name = DetectLinkerNameFromMapFileHeader(next(lines)) + next(lines) # Consume the first line of headers. if linker_name.startswith('lld'): inner_parser = MapFileParserLld(linker_name) elif linker_name == 'gold': inner_parser = MapFileParserGold() else: raise Exception('.map file is from a unsupported linker.') + section_sizes, syms = inner_parser.Parse(lines) for sym in syms: - if sym.object_path: # Don't want '' to become '.'. + if sym.object_path and not sym.object_path.endswith(')'): + # Don't want '' to become '.'. + # Thin archives' paths will get fixed in |ar.CreateThinObjectPath|. sym.object_path = os.path.normpath(sym.object_path) return (section_sizes, syms) diff --git a/chromium/tools/binary_size/libsupersize/main.py b/chromium/tools/binary_size/libsupersize/main.py index a289ec28ebc..fd972ac6cb0 100755 --- a/chromium/tools/binary_size/libsupersize/main.py +++ b/chromium/tools/binary_size/libsupersize/main.py @@ -18,6 +18,7 @@ import sys import archive import console import html_report +import start_server def _LogPeakRamUsage(): @@ -69,7 +70,9 @@ def main(): actions = collections.OrderedDict() actions['archive'] = (archive, 'Create a .size file') actions['html_report'] = ( - html_report, 'Create a stand-alone html report from a .size file.') + html_report, 'Create a stand-alone report from a .size file.') + actions['start_server'] = ( + start_server, 'Start a web server to view data generated by html_report') actions['console'] = ( console, 'Starts an interactive Python console for analyzing .size files.') @@ -84,6 +87,7 @@ def main(): tup[0].AddArguments(sub_parser) sub_parser.set_defaults(func=tup[0].Run) + # Show help if the command or a subcommand is called with no arguments if len(sys.argv) == 1: parser.print_help() sys.exit(1) diff --git a/chromium/tools/binary_size/libsupersize/models.py b/chromium/tools/binary_size/libsupersize/models.py index 3488bc4178a..9f27e2ad377 100644 --- a/chromium/tools/binary_size/libsupersize/models.py +++ b/chromium/tools/binary_size/libsupersize/models.py @@ -24,6 +24,8 @@ Description of common properties: are removed from both full_name and name during normalization). * section_name: E.g. ".text", ".rodata", ".data.rel.local" * section: The second character of |section_name|. E.g. "t", "r", "d". + * component: The team that owns this feature. + Never None, but will be '' when no component exists. """ import collections @@ -43,6 +45,7 @@ METADATA_ELF_FILENAME = 'elf_file_name' # Path relative to output_directory. METADATA_ELF_MTIME = 'elf_mtime' # int timestamp in utc. METADATA_ELF_BUILD_ID = 'elf_build_id' METADATA_GN_ARGS = 'gn_args' +METADATA_LINKER_NAME = 'linker_name' METADATA_TOOL_PREFIX = 'tool_prefix' # Path relative to SRC_ROOT. SECTION_BSS = '.bss' @@ -338,6 +341,7 @@ class Symbol(BaseSymbol): 'section_name', 'source_path', 'size', + 'component', ) def __init__(self, section_name, size_without_padding, address=None, @@ -354,14 +358,16 @@ class Symbol(BaseSymbol): self.flags = flags self.aliases = aliases self.padding = 0 + self.component = '' def __repr__(self): template = ('{}@{:x}(size_without_padding={},padding={},full_name={},' - 'object_path={},source_path={},flags={},num_aliases={})') + 'object_path={},source_path={},flags={},num_aliases={},' + 'component={})') return template.format( self.section_name, self.address, self.size_without_padding, self.padding, self.full_name, self.object_path, self.source_path, - self.FlagsString(), self.num_aliases) + self.FlagsString(), self.num_aliases, self.component) @property def pss(self): @@ -411,9 +417,16 @@ class DeltaSymbol(BaseSymbol): return DIFF_STATUS_ADDED if self.after_symbol is None: return DIFF_STATUS_REMOVED - if self.size == 0: - return DIFF_STATUS_UNCHANGED - return DIFF_STATUS_CHANGED + # Use delta size and delta PSS as indicators of change. Delta size = 0 with + # delta PSS != 0 can be caused by: + # (1) Alias addition / removal without actual binary change. + # (2) Alias merging / splitting along with binary changes, where matched + # symbols all happen the same size (hence delta size = 0). + # The purpose of checking PSS is to account for (2). However, this means (1) + # would produce much more diffs than before! + if self.size != 0 or self.pss != 0: + return DIFF_STATUS_CHANGED + return DIFF_STATUS_UNCHANGED @property def address(self): @@ -453,6 +466,10 @@ class DeltaSymbol(BaseSymbol): def section_name(self): return (self.after_symbol or self.before_symbol).section_name + @property + def component(self): + return (self.after_symbol or self.before_symbol).component + @property def padding_pss(self): if self.after_symbol is None: @@ -619,6 +636,11 @@ class SymbolGroup(BaseSymbol): s.size for s in self.IterUniqueSymbols() if not s.IsBss()) return self._size + @property + def component(self): + first = self._symbols[0].component if self else '' + return first if all(s.component == first for s in self._symbols) else '' + @property def pss(self): if self._pss is None: @@ -679,8 +701,12 @@ class SymbolGroup(BaseSymbol): def Sorted(self, cmp_func=None, key=None, reverse=False): if cmp_func is None and key is None: - cmp_func = lambda a, b: cmp((a.IsBss(), abs(b.pss), a.name), - (b.IsBss(), abs(a.pss), b.name)) + if self.IsDelta(): + key = lambda s: (s.diff_status == DIFF_STATUS_UNCHANGED, s.IsBss(), + s.size_without_padding == 0, -abs(s.pss), s.name) + else: + key = lambda s: ( + s.IsBss(), s.size_without_padding == 0, -abs(s.pss), s.name) after_symbols = sorted(self._symbols, cmp_func, key, reverse) return self._CreateTransformed( @@ -746,6 +772,9 @@ class SymbolGroup(BaseSymbol): def WhereIsTemplate(self): return self.Filter(lambda s: s.template_name is not s.name) + def WhereHasComponent(self): + return self.Filter(lambda s: s.component) + def WhereSourceIsGenerated(self): return self.Filter(lambda s: s.generated_source) @@ -777,6 +806,10 @@ class SymbolGroup(BaseSymbol): return self.Filter(lambda s: (regex.search(s.source_path) or regex.search(s.object_path))) + def WhereComponentMatches(self, pattern): + regex = re.compile(match_util.ExpandRegexIdentifierPlaceholder(pattern)) + return self.Filter(lambda s: regex.search(s.component)) + def WhereMatches(self, pattern): """Looks for |pattern| within all paths & names.""" regex = re.compile(match_util.ExpandRegexIdentifierPlaceholder(pattern)) @@ -833,6 +866,9 @@ class SymbolGroup(BaseSymbol): Use a negative value to omit symbols entirely rather than include them outside of a group. group_factory: Function to create SymbolGroup from a list of Symbols. + + Returns: + SymbolGroup of SymbolGroups """ if group_factory is None: group_factory = lambda token, symbols: self._CreateTransformed( @@ -954,6 +990,9 @@ class SymbolGroup(BaseSymbol): def GroupedBySectionName(self): return self.GroupedBy(lambda s: s.section_name) + def GroupedByComponent(self): + return self.GroupedBy(lambda s: s.component) + def GroupedByFullName(self, min_count=2): """Groups by symbol.full_name. diff --git a/chromium/tools/binary_size/libsupersize/nm.py b/chromium/tools/binary_size/libsupersize/nm.py old mode 100755 new mode 100644 index 6ee1320e69e..e5afe97c995 --- a/chromium/tools/binary_size/libsupersize/nm.py +++ b/chromium/tools/binary_size/libsupersize/nm.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python # Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -7,85 +6,27 @@ The design of this file is entirely to work around Python's lack of concurrency. -CollectAliasesByAddress: +CollectAliasesByAddress(): Runs "nm" on the elf to collect all symbol names. This reveals symbol names of identical-code-folded functions. -CollectAliasesByAddressAsync: +CollectAliasesByAddressAsync(): Runs CollectAliasesByAddress in a subprocess and returns a promise. -_BulkObjectFileAnalyzerMaster: - Creates a subprocess and sends IPCs to it asking it to do work. - -_BulkObjectFileAnalyzerSlave: - Receives IPCs and delegates logic to _BulkObjectFileAnalyzerWorker. - Runs _BulkObjectFileAnalyzerWorker on a background thread in order to stay - responsive to IPCs. - -_BulkObjectFileAnalyzerWorker: - Performs the actual work. Uses Process Pools to shard out per-object-file - work and then aggregates results. - -BulkObjectFileAnalyzer: - Alias for _BulkObjectFileAnalyzerMaster, but when SUPERSIZE_DISABLE_ASYNC=1, - alias for _BulkObjectFileAnalyzerWorker. - * AnalyzePaths: Run "nm" on all .o files to collect symbol names that exist - within each. Does not work with thin archives (expand them first). - * SortPaths: Sort results of AnalyzePaths(). - * AnalyzeStringLiterals: Must be run after AnalyzePaths() has completed. - Extracts string literals from .o files, and then locates them within the - "** merge strings" sections within an ELF's .rodata section. - -This file can also be run stand-alone in order to test out the logic on smaller -sample sizes. +RunNmOnIntermediates(): + BulkForkAndCall() target: Runs nm on a .a file or a list of .o files, parses + the output, extracts symbol information, and (if available) extracts string + offset information. """ -from __future__ import print_function - -import argparse -import atexit import collections -import errno -import itertools -import logging import os -import multiprocessing -import Queue -import signal import subprocess -import sys -import threading -import traceback -import ar import concurrent import demangle -import models import path_util -_MSG_ANALYZE_PATHS = 1 -_MSG_SORT_PATHS = 2 -_MSG_ANALYZE_STRINGS = 3 -_MSG_GET_SYMBOL_NAMES = 4 -_MSG_GET_STRINGS = 5 - -_active_pids = None - - -def _DecodePosition(x): - # Encoded as "123:123" - sep_idx = x.index(':') - return (int(x[:sep_idx]), int(x[sep_idx + 1:])) - - -def _MakeToolPrefixAbsolute(tool_prefix): - # Ensure tool_prefix is absolute so that CWD does not affect it - if os.path.sep in tool_prefix: - # Use abspath() on the dirname to avoid it stripping a trailing /. - dirname = os.path.dirname(tool_prefix) - tool_prefix = os.path.abspath(dirname) + tool_prefix[len(dirname):] - return tool_prefix - def _IsRelevantNmName(name): # Skip lines like: @@ -184,86 +125,6 @@ def CollectAliasesByAddressAsync(elf_path, tool_prefix): decode_func=decode) -def _LookupStringSectionPositions(target, tool_prefix, output_directory): - """Returns a dict of object_path -> [(offset, size)...] of .rodata sections. - - Args: - target: An archive path string (e.g., "foo.a") or a list of object paths. - """ - is_archive = isinstance(target, basestring) - args = [path_util.GetReadElfPath(tool_prefix), '-S', '--wide'] - if is_archive: - args.append(target) - else: - # Assign path for when len(target) == 1, (no File: line exists). - path = target[0] - args.extend(target) - - output = subprocess.check_output(args, cwd=output_directory) - lines = output.splitlines() - section_positions_by_path = {} - cur_offsets = [] - for line in lines: - # File: base/third_party/libevent/libevent.a(buffer.o) - # [Nr] Name Type Addr Off Size ES Flg Lk Inf Al - # [11] .rodata.str1.1 PROGBITS 00000000 0000b4 000004 01 AMS 0 0 1 - # [11] .rodata.str4.4 PROGBITS 00000000 0000b4 000004 01 AMS 0 0 4 - # [11] .rodata.str8.8 PROGBITS 00000000 0000b4 000004 01 AMS 0 0 8 - # [80] .rodata..L.str PROGBITS 00000000 000530 000002 00 A 0 0 1 - # The various string sections differ by alignment. - # The presence of a wchar_t literal (L"asdf") seems to make a str4 section. - # When multiple sections exist, nm gives us no indication as to which - # section each string corresponds to. - if line.startswith('File: '): - if cur_offsets: - section_positions_by_path[path] = cur_offsets - cur_offsets = [] - path = line[6:] - elif '.rodata.' in line: - progbits_idx = line.find('PROGBITS ') - if progbits_idx != -1: - fields = line[progbits_idx:].split() - position = (int(fields[2], 16), int(fields[3], 16)) - # The heuristics in _IterStringLiterals rely on str1 coming first. - if fields[-1] == '1': - cur_offsets.insert(0, position) - else: - cur_offsets.append(position) - if cur_offsets: - section_positions_by_path[path] = cur_offsets - return section_positions_by_path - - -def LookupElfRodataInfo(elf_path, tool_prefix): - """Returns (address, offset, size) for the .rodata section.""" - args = [path_util.GetReadElfPath(tool_prefix), '-S', '--wide', elf_path] - output = subprocess.check_output(args) - lines = output.splitlines() - for line in lines: - # [Nr] Name Type Addr Off Size ES Flg Lk Inf Al - # [07] .rodata PROGBITS 025e7000 237c000 5ec4f6 00 A 0 0 256 - if '.rodata ' in line: - fields = line[line.index(models.SECTION_RODATA):].split() - return int(fields[2], 16), int(fields[3], 16), int(fields[4], 16) - raise AssertionError('No .rodata for command: ' + repr(args)) - - -def _ReadFileChunks(path, positions): - """Returns a list of strings corresponding to |positions|. - - Args: - positions: List of (offset, size). - """ - ret = [] - if not positions: - return ret - with open(path, 'rb') as f: - for offset, size in positions: - f.seek(offset) - ret.append(f.read(size)) - return ret - - def _ParseOneObjectFileNmOutput(lines): # Constructors are often repeated because they have the same unmangled # name, but multiple mangled names. See: @@ -288,145 +149,8 @@ def _ParseOneObjectFileNmOutput(lines): return string_addresses, symbol_names -def _ReadStringSections(target, output_directory, positions_by_path): - """Returns a dict of object_path -> [string...] of .rodata chunks. - - Args: - target: An archive path string (e.g., "foo.a") or a list of object paths. - positions_by_path: A dict of object_path -> [(offset, size)...] - """ - is_archive = isinstance(target, basestring) - string_sections_by_path = {} - if is_archive: - for subpath, chunk in ar.IterArchiveChunks( - os.path.join(output_directory, target)): - path = '{}({})'.format(target, subpath) - positions = positions_by_path.get(path) - # No positions if file has no string literals. - if positions: - string_sections_by_path[path] = ( - [chunk[offset:offset + size] for offset, size in positions]) - else: - for path in target: - positions = positions_by_path.get(path) - # We already log a warning about this in _IterStringLiterals(). - if positions: - string_sections_by_path[path] = _ReadFileChunks( - os.path.join(output_directory, path), positions) - return string_sections_by_path - - -def _ExtractArchivePath(path): - # E.g. foo/bar.a(baz.o) - if path.endswith(')'): - start_idx = path.index('(') - return path[:start_idx] - return None - - -def _IterStringLiterals(path, addresses, obj_sections): - """Yields all string literals (including \0) for the given object path. - - Args: - path: Object file path. - addresses: List of string offsets encoded as hex strings. - obj_sections: List of contents of .rodata.str sections read from the given - object file. - """ - - next_offsets = sorted(int(a, 16) for a in addresses) - if not obj_sections: - # Happens when there is an address for a symbol which is not actually a - # string literal, or when string_sections_by_path is missing an entry. - logging.warning('Object has %d strings but no string sections: %s', - len(addresses), path) - return - for section_data in obj_sections: - cur_offsets = next_offsets - # Always assume first element is 0. I'm not entirely sure why this is - # necessary, but strings get missed without it. - next_offsets = [0] - prev_offset = 0 - # TODO(agrieve): Switch to using nm --print-size in order to capture the - # address+size of each string rather than just the address. - for offset in cur_offsets[1:]: - if offset >= len(section_data): - # Remaining offsets are for next section. - next_offsets.append(offset) - continue - # Figure out which offsets apply to this section via heuristic of them - # all ending with a null character. - if offset == prev_offset or section_data[offset - 1] != '\0': - next_offsets.append(offset) - continue - yield section_data[prev_offset:offset] - prev_offset = offset - - if prev_offset < len(section_data): - yield section_data[prev_offset:] - - # This is a target for BulkForkAndCall(). -def _ResolveStringPieces(encoded_string_addresses_by_path, string_data, - tool_prefix, output_directory): - string_addresses_by_path = concurrent.DecodeDictOfLists( - encoded_string_addresses_by_path) - # Assign |target| as archive path, or a list of object paths. - any_path = next(string_addresses_by_path.iterkeys()) - target = _ExtractArchivePath(any_path) - if not target: - target = string_addresses_by_path.keys() - - # Run readelf to find location of .rodata within the .o files. - section_positions_by_path = _LookupStringSectionPositions( - target, tool_prefix, output_directory) - # Load the .rodata sections (from object files) as strings. - string_sections_by_path = _ReadStringSections( - target, output_directory, section_positions_by_path) - - # list of elf_positions_by_path. - ret = [collections.defaultdict(list) for _ in string_data] - # Brute-force search of strings within ** merge strings sections. - # This is by far the slowest part of AnalyzeStringLiterals(). - # TODO(agrieve): Pre-process string_data into a dict of literal->address (at - # least for ascii strings). - for path, object_addresses in string_addresses_by_path.iteritems(): - for value in _IterStringLiterals( - path, object_addresses, string_sections_by_path.get(path)): - first_match = -1 - first_match_dict = None - for target_dict, data in itertools.izip(ret, string_data): - # Set offset so that it will be 0 when len(value) is added to it below. - offset = -len(value) - while True: - offset = data.find(value, offset + len(value)) - if offset == -1: - break - # Preferring exact matches (those following \0) over substring matches - # significantly increases accuracy (although shows that linker isn't - # being optimal). - if offset == 0 or data[offset - 1] == '\0': - break - if first_match == -1: - first_match = offset - first_match_dict = target_dict - if offset != -1: - break - if offset == -1: - # Exact match not found, so take suffix match if it exists. - offset = first_match - target_dict = first_match_dict - # Missing strings happen when optimization make them unused. - if offset != -1: - # Encode tuple as a string for easier mashalling. - target_dict[path].append( - str(offset) + ':' + str(len(value))) - - return [concurrent.EncodeDictOfLists(x) for x in ret] - - -# This is a target for BulkForkAndCall(). -def _RunNmOnIntermediates(target, tool_prefix, output_directory): +def RunNmOnIntermediates(target, tool_prefix, output_directory): """Returns encoded_symbol_names_by_path, encoded_string_addresses_by_path. Args: @@ -471,287 +195,3 @@ def _RunNmOnIntermediates(target, tool_prefix, output_directory): # down on marshalling overhead. return (concurrent.EncodeDictOfLists(symbol_names_by_path), concurrent.EncodeDictOfLists(string_addresses_by_path)) - - -class _BulkObjectFileAnalyzerWorker(object): - def __init__(self, tool_prefix, output_directory): - self._tool_prefix = _MakeToolPrefixAbsolute(tool_prefix) - self._output_directory = output_directory - self._paths_by_name = collections.defaultdict(list) - self._encoded_string_addresses_by_path_chunks = [] - self._list_of_encoded_elf_string_positions_by_path = None - - def AnalyzePaths(self, paths): - def iter_job_params(): - object_paths = [] - for path in paths: - # Note: _ResolveStringPieces relies upon .a not being grouped. - if path.endswith('.a'): - yield path, self._tool_prefix, self._output_directory - else: - object_paths.append(path) - - BATCH_SIZE = 50 # Chosen arbitrarily. - for i in xrange(0, len(object_paths), BATCH_SIZE): - batch = object_paths[i:i + BATCH_SIZE] - yield batch, self._tool_prefix, self._output_directory - - params = list(iter_job_params()) - # Order of the jobs doesn't matter since each job owns independent paths, - # and our output is a dict where paths are the key. - results = concurrent.BulkForkAndCall(_RunNmOnIntermediates, params) - - # Names are still mangled. - all_paths_by_name = self._paths_by_name - for encoded_syms, encoded_strs in results: - symbol_names_by_path = concurrent.DecodeDictOfLists(encoded_syms) - for path, names in symbol_names_by_path.iteritems(): - for name in names: - all_paths_by_name[name].append(path) - - if encoded_strs != concurrent.EMPTY_ENCODED_DICT: - self._encoded_string_addresses_by_path_chunks.append(encoded_strs) - logging.debug('worker: AnalyzePaths() completed.') - - def SortPaths(self): - # Finally, demangle all names, which can result in some merging of lists. - self._paths_by_name = demangle.DemangleKeysAndMergeLists( - self._paths_by_name, self._tool_prefix) - # Sort and uniquefy. - for key in self._paths_by_name.iterkeys(): - self._paths_by_name[key] = sorted(set(self._paths_by_name[key])) - - def AnalyzeStringLiterals(self, elf_path, elf_string_positions): - logging.debug('worker: AnalyzeStringLiterals() started.') - # Read string_data from elf_path, to be shared by forked processes. - address, offset, _ = LookupElfRodataInfo(elf_path, self._tool_prefix) - adjust = address - offset - abs_string_positions = ( - (addr - adjust, s) for addr, s in elf_string_positions) - string_data = _ReadFileChunks(elf_path, abs_string_positions) - - params = ( - (chunk, string_data, self._tool_prefix, self._output_directory) - for chunk in self._encoded_string_addresses_by_path_chunks) - # Order of the jobs doesn't matter since each job owns independent paths, - # and our output is a dict where paths are the key. - results = concurrent.BulkForkAndCall(_ResolveStringPieces, params) - results = list(results) - - final_result = [] - for i in xrange(len(elf_string_positions)): - final_result.append( - concurrent.JoinEncodedDictOfLists([r[i] for r in results])) - self._list_of_encoded_elf_string_positions_by_path = final_result - logging.debug('worker: AnalyzeStringLiterals() completed.') - - def GetSymbolNames(self): - return self._paths_by_name - - def GetStringPositions(self): - return [concurrent.DecodeDictOfLists(x, value_transform=_DecodePosition) - for x in self._list_of_encoded_elf_string_positions_by_path] - - def GetEncodedStringPositions(self): - return self._list_of_encoded_elf_string_positions_by_path - - def Close(self): - pass - - -def _TerminateSubprocesses(): - global _active_pids - if _active_pids: - for pid in _active_pids: - os.kill(pid, signal.SIGKILL) - _active_pids = [] - - -class _BulkObjectFileAnalyzerMaster(object): - """Runs BulkObjectFileAnalyzer in a subprocess.""" - def __init__(self, tool_prefix, output_directory): - self._child_pid = None - self._pipe = None - self._tool_prefix = tool_prefix - self._output_directory = output_directory - - def _Spawn(self): - global _active_pids - parent_conn, child_conn = multiprocessing.Pipe() - self._child_pid = os.fork() - if self._child_pid: - # We are the parent process. - if _active_pids is None: - _active_pids = [] - atexit.register(_TerminateSubprocesses) - _active_pids.append(self._child_pid) - self._pipe = parent_conn - else: - # We are the child process. - logging.root.handlers[0].setFormatter(logging.Formatter( - 'nm: %(levelname).1s %(relativeCreated)6d %(message)s')) - worker_analyzer = _BulkObjectFileAnalyzerWorker( - self._tool_prefix, self._output_directory) - slave = _BulkObjectFileAnalyzerSlave(worker_analyzer, child_conn) - slave.Run() - - def AnalyzePaths(self, paths): - if self._child_pid is None: - self._Spawn() - - logging.debug('Sending batch of %d paths to subprocess', len(paths)) - payload = '\x01'.join(paths) - self._pipe.send((_MSG_ANALYZE_PATHS, payload)) - - def SortPaths(self): - self._pipe.send((_MSG_SORT_PATHS,)) - - def AnalyzeStringLiterals(self, elf_path, string_positions): - self._pipe.send((_MSG_ANALYZE_STRINGS, elf_path, string_positions)) - - def GetSymbolNames(self): - self._pipe.send((_MSG_GET_SYMBOL_NAMES,)) - self._pipe.recv() # None - logging.debug('Decoding nm results from forked process') - encoded_paths_by_name = self._pipe.recv() - return concurrent.DecodeDictOfLists(encoded_paths_by_name) - - def GetStringPositions(self): - self._pipe.send((_MSG_GET_STRINGS,)) - self._pipe.recv() # None - logging.debug('Decoding string symbol results from forked process') - result = self._pipe.recv() - return [concurrent.DecodeDictOfLists(x, value_transform=_DecodePosition) - for x in result] - - def Close(self): - self._pipe.close() - # Child process should terminate gracefully at this point, but leave it in - # _active_pids to be killed just in case. - - -class _BulkObjectFileAnalyzerSlave(object): - """The subprocess entry point.""" - def __init__(self, worker_analyzer, pipe): - self._worker_analyzer = worker_analyzer - self._pipe = pipe - # Use a worker thread so that AnalyzeStringLiterals() is non-blocking. The - # thread allows the main thread to process a call to GetSymbolNames() while - # AnalyzeStringLiterals() is in progress. - self._job_queue = Queue.Queue() - self._worker_thread = threading.Thread(target=self._WorkerThreadMain) - self._allow_analyze_paths = True - - def _WorkerThreadMain(self): - while True: - # Handle exceptions so test failure will be explicit and not block. - try: - func = self._job_queue.get() - func() - except Exception: - traceback.print_exc() - self._job_queue.task_done() - - def _WaitForAnalyzePathJobs(self): - if self._allow_analyze_paths: - self._job_queue.join() - self._allow_analyze_paths = False - - def Run(self): - try: - self._worker_thread.start() - while True: - message = self._pipe.recv() - if message[0] == _MSG_ANALYZE_PATHS: - assert self._allow_analyze_paths, ( - 'Cannot call AnalyzePaths() after AnalyzeStringLiterals()s.') - paths = message[1].split('\x01') - self._job_queue.put(lambda: self._worker_analyzer.AnalyzePaths(paths)) - elif message[0] == _MSG_SORT_PATHS: - assert self._allow_analyze_paths, ( - 'Cannot call SortPaths() after AnalyzeStringLiterals()s.') - self._job_queue.put(self._worker_analyzer.SortPaths) - elif message[0] == _MSG_ANALYZE_STRINGS: - self._WaitForAnalyzePathJobs() - elf_path, string_positions = message[1:] - self._job_queue.put( - lambda: self._worker_analyzer.AnalyzeStringLiterals( - elf_path, string_positions)) - elif message[0] == _MSG_GET_SYMBOL_NAMES: - self._WaitForAnalyzePathJobs() - self._pipe.send(None) - paths_by_name = self._worker_analyzer.GetSymbolNames() - self._pipe.send(concurrent.EncodeDictOfLists(paths_by_name)) - elif message[0] == _MSG_GET_STRINGS: - self._job_queue.join() - # Send a None packet so that other side can measure IPC transfer time. - self._pipe.send(None) - self._pipe.send(self._worker_analyzer.GetEncodedStringPositions()) - except EOFError: - pass - except EnvironmentError, e: - # Parent process exited so don't log. - if e.errno in (errno.EPIPE, errno.ECONNRESET): - sys.exit(1) - - logging.debug('nm bulk subprocess finished.') - sys.exit(0) - - -BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerMaster -if concurrent.DISABLE_ASYNC: - BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerWorker - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--multiprocess', action='store_true') - parser.add_argument('--tool-prefix', required=True) - parser.add_argument('--output-directory', required=True) - parser.add_argument('--elf-file', type=os.path.realpath) - parser.add_argument('--show-names', action='store_true') - parser.add_argument('--show-strings', action='store_true') - parser.add_argument('objects', type=os.path.realpath, nargs='+') - - args = parser.parse_args() - logging.basicConfig(level=logging.DEBUG, - format='%(levelname).1s %(relativeCreated)6d %(message)s') - - if args.multiprocess: - bulk_analyzer = _BulkObjectFileAnalyzerMaster( - args.tool_prefix, args.output_directory) - else: - concurrent.DISABLE_ASYNC = True - bulk_analyzer = _BulkObjectFileAnalyzerWorker( - args.tool_prefix, args.output_directory) - - # Pass individually to test multiple calls. - for path in args.objects: - bulk_analyzer.AnalyzePaths([path]) - bulk_analyzer.SortPaths() - - names_to_paths = bulk_analyzer.GetSymbolNames() - print('Found {} names'.format(len(names_to_paths))) - if args.show_names: - for name, paths in names_to_paths.iteritems(): - print('{}: {!r}'.format(name, paths)) - - if args.elf_file: - address, offset, size = LookupElfRodataInfo( - args.elf_file, args.tool_prefix) - bulk_analyzer.AnalyzeStringLiterals(args.elf_file, ((address, size),)) - - positions_by_path = bulk_analyzer.GetStringPositions()[0] - print('Found {} string literals'.format(sum( - len(v) for v in positions_by_path.itervalues()))) - if args.show_strings: - logging.debug('.rodata adjust=%d', address - offset) - for path, positions in positions_by_path.iteritems(): - strs = _ReadFileChunks( - args.elf_file, ((offset + addr, size) for addr, size in positions)) - print('{}: {!r}'.format( - path, [s if len(s) < 20 else s[:20] + '...' for s in strs])) - - -if __name__ == '__main__': - main() diff --git a/chromium/tools/binary_size/libsupersize/obj_analyzer.py b/chromium/tools/binary_size/libsupersize/obj_analyzer.py new file mode 100755 index 00000000000..eee679babce --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/obj_analyzer.py @@ -0,0 +1,371 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Analyzer for Object Files. + +This file works around Python's lack of concurrency. + +_BulkObjectFileAnalyzerMaster: + Creates a subprocess and sends IPCs to it asking it to do work. + +_BulkObjectFileAnalyzerSlave: + Receives IPCs and delegates logic to _BulkObjectFileAnalyzerWorker. + Runs _BulkObjectFileAnalyzerWorker on a background thread in order to stay + responsive to IPCs. + +_BulkObjectFileAnalyzerWorker: + Performs the actual work. Uses Process Pools to shard out per-object-file + work and then aggregates results. + +BulkObjectFileAnalyzer: + Alias for _BulkObjectFileAnalyzerMaster, but when SUPERSIZE_DISABLE_ASYNC=1, + alias for _BulkObjectFileAnalyzerWorker. + * AnalyzePaths: Run "nm" on all .o files to collect symbol names that exist + within each. Does not work with thin archives (expand them first). + * SortPaths: Sort results of AnalyzePaths(). + * AnalyzeStringLiterals: Must be run after AnalyzePaths() has completed. + Extracts string literals from .o files, and then locates them within the + "** merge strings" sections within an ELF's .rodata section. + +This file can also be run stand-alone in order to test out the logic on smaller +sample sizes. +""" + +from __future__ import print_function + +import argparse +import atexit +import collections +import errno +import logging +import os +import multiprocessing +import Queue +import signal +import sys +import threading +import traceback + +import concurrent +import demangle +import nm +import string_extract + + +_MSG_ANALYZE_PATHS = 1 +_MSG_SORT_PATHS = 2 +_MSG_ANALYZE_STRINGS = 3 +_MSG_GET_SYMBOL_NAMES = 4 +_MSG_GET_STRINGS = 5 + +_active_pids = None + + +def _DecodePosition(x): + # Encoded as "123:123" + sep_idx = x.index(':') + return (int(x[:sep_idx]), int(x[sep_idx + 1:])) + + +def _MakeToolPrefixAbsolute(tool_prefix): + # Ensure tool_prefix is absolute so that CWD does not affect it + if os.path.sep in tool_prefix: + # Use abspath() on the dirname to avoid it stripping a trailing /. + dirname = os.path.dirname(tool_prefix) + tool_prefix = os.path.abspath(dirname) + tool_prefix[len(dirname):] + return tool_prefix + + +class _BulkObjectFileAnalyzerWorker(object): + def __init__(self, tool_prefix, output_directory): + self._tool_prefix = _MakeToolPrefixAbsolute(tool_prefix) + self._output_directory = output_directory + self._paths_by_name = collections.defaultdict(list) + self._encoded_string_addresses_by_path_chunks = [] + self._list_of_encoded_elf_string_positions_by_path = None + + def AnalyzePaths(self, paths): + def iter_job_params(): + object_paths = [] + for path in paths: + # Note: ResolveStringPieces() relies upon .a not being grouped. + if path.endswith('.a'): + yield (path,) + else: + object_paths.append(path) + + BATCH_SIZE = 50 # Chosen arbitrarily. + for i in xrange(0, len(object_paths), BATCH_SIZE): + batch = object_paths[i:i + BATCH_SIZE] + yield (batch,) + + params = list(iter_job_params()) + # Order of the jobs doesn't matter since each job owns independent paths, + # and our output is a dict where paths are the key. + results = concurrent.BulkForkAndCall( + nm.RunNmOnIntermediates, params, tool_prefix=self._tool_prefix, + output_directory=self._output_directory) + + # Names are still mangled. + all_paths_by_name = self._paths_by_name + for encoded_syms, encoded_strs in results: + symbol_names_by_path = concurrent.DecodeDictOfLists(encoded_syms) + for path, names in symbol_names_by_path.iteritems(): + for name in names: + all_paths_by_name[name].append(path) + + if encoded_strs != concurrent.EMPTY_ENCODED_DICT: + self._encoded_string_addresses_by_path_chunks.append(encoded_strs) + logging.debug('worker: AnalyzePaths() completed.') + + def SortPaths(self): + # Finally, demangle all names, which can result in some merging of lists. + self._paths_by_name = demangle.DemangleKeysAndMergeLists( + self._paths_by_name, self._tool_prefix) + # Sort and uniquefy. + for key in self._paths_by_name.iterkeys(): + self._paths_by_name[key] = sorted(set(self._paths_by_name[key])) + + def AnalyzeStringLiterals(self, elf_path, elf_string_positions): + logging.debug('worker: AnalyzeStringLiterals() started.') + # Read string_data from elf_path, to be shared by forked processes. + address, offset, _ = string_extract.LookupElfRodataInfo( + elf_path, self._tool_prefix) + adjust = address - offset + abs_string_positions = ( + (addr - adjust, s) for addr, s in elf_string_positions) + string_data = string_extract.ReadFileChunks(elf_path, abs_string_positions) + + params = ((chunk,) + for chunk in self._encoded_string_addresses_by_path_chunks) + # Order of the jobs doesn't matter since each job owns independent paths, + # and our output is a dict where paths are the key. + results = concurrent.BulkForkAndCall( + string_extract.ResolveStringPieces, params, string_data=string_data, + tool_prefix=self._tool_prefix, output_directory=self._output_directory) + results = list(results) + + final_result = [] + for i in xrange(len(elf_string_positions)): + final_result.append( + concurrent.JoinEncodedDictOfLists([r[i] for r in results])) + self._list_of_encoded_elf_string_positions_by_path = final_result + logging.debug('worker: AnalyzeStringLiterals() completed.') + + def GetSymbolNames(self): + return self._paths_by_name + + def GetStringPositions(self): + return [concurrent.DecodeDictOfLists(x, value_transform=_DecodePosition) + for x in self._list_of_encoded_elf_string_positions_by_path] + + def GetEncodedStringPositions(self): + return self._list_of_encoded_elf_string_positions_by_path + + def Close(self): + pass + + +def _TerminateSubprocesses(): + global _active_pids + if _active_pids: + for pid in _active_pids: + os.kill(pid, signal.SIGKILL) + _active_pids = [] + + +class _BulkObjectFileAnalyzerMaster(object): + """Runs BulkObjectFileAnalyzer in a subprocess.""" + def __init__(self, tool_prefix, output_directory): + self._child_pid = None + self._pipe = None + self._tool_prefix = tool_prefix + self._output_directory = output_directory + + def _Spawn(self): + global _active_pids + parent_conn, child_conn = multiprocessing.Pipe() + self._child_pid = os.fork() + if self._child_pid: + # We are the parent process. + if _active_pids is None: + _active_pids = [] + atexit.register(_TerminateSubprocesses) + _active_pids.append(self._child_pid) + self._pipe = parent_conn + else: + # We are the child process. + logging.root.handlers[0].setFormatter(logging.Formatter( + 'nm: %(levelname).1s %(relativeCreated)6d %(message)s')) + worker_analyzer = _BulkObjectFileAnalyzerWorker( + self._tool_prefix, self._output_directory) + slave = _BulkObjectFileAnalyzerSlave(worker_analyzer, child_conn) + slave.Run() + + def AnalyzePaths(self, paths): + if self._child_pid is None: + self._Spawn() + + logging.debug('Sending batch of %d paths to subprocess', len(paths)) + payload = '\x01'.join(paths) + self._pipe.send((_MSG_ANALYZE_PATHS, payload)) + + def SortPaths(self): + self._pipe.send((_MSG_SORT_PATHS,)) + + def AnalyzeStringLiterals(self, elf_path, string_positions): + self._pipe.send((_MSG_ANALYZE_STRINGS, elf_path, string_positions)) + + def GetSymbolNames(self): + self._pipe.send((_MSG_GET_SYMBOL_NAMES,)) + self._pipe.recv() # None + logging.debug('Decoding nm results from forked process') + encoded_paths_by_name = self._pipe.recv() + return concurrent.DecodeDictOfLists(encoded_paths_by_name) + + def GetStringPositions(self): + self._pipe.send((_MSG_GET_STRINGS,)) + self._pipe.recv() # None + logging.debug('Decoding string symbol results from forked process') + result = self._pipe.recv() + return [concurrent.DecodeDictOfLists(x, value_transform=_DecodePosition) + for x in result] + + def Close(self): + self._pipe.close() + # Child process should terminate gracefully at this point, but leave it in + # _active_pids to be killed just in case. + + +class _BulkObjectFileAnalyzerSlave(object): + """The subprocess entry point.""" + def __init__(self, worker_analyzer, pipe): + self._worker_analyzer = worker_analyzer + self._pipe = pipe + # Use a worker thread so that AnalyzeStringLiterals() is non-blocking. The + # thread allows the main thread to process a call to GetSymbolNames() while + # AnalyzeStringLiterals() is in progress. + self._job_queue = Queue.Queue() + self._worker_thread = threading.Thread(target=self._WorkerThreadMain) + self._allow_analyze_paths = True + + def _WorkerThreadMain(self): + while True: + # Handle exceptions so test failure will be explicit and not block. + try: + func = self._job_queue.get() + func() + except Exception: + traceback.print_exc() + self._job_queue.task_done() + + def _WaitForAnalyzePathJobs(self): + if self._allow_analyze_paths: + self._job_queue.join() + self._allow_analyze_paths = False + + # Handle messages in a function outside the event loop, so local variables are + # independent across messages, and can be bound to jobs by lambdas using + # closures instead of functools.partial(). + def _HandleMessage(self, message): + if message[0] == _MSG_ANALYZE_PATHS: + assert self._allow_analyze_paths, ( + 'Cannot call AnalyzePaths() after AnalyzeStringLiterals()s.') + paths = message[1].split('\x01') + self._job_queue.put(lambda: self._worker_analyzer.AnalyzePaths(paths)) + elif message[0] == _MSG_SORT_PATHS: + assert self._allow_analyze_paths, ( + 'Cannot call SortPaths() after AnalyzeStringLiterals()s.') + self._job_queue.put(self._worker_analyzer.SortPaths) + elif message[0] == _MSG_ANALYZE_STRINGS: + self._WaitForAnalyzePathJobs() + elf_path, string_positions = message[1:] + self._job_queue.put( + lambda: self._worker_analyzer.AnalyzeStringLiterals( + elf_path, string_positions)) + elif message[0] == _MSG_GET_SYMBOL_NAMES: + self._WaitForAnalyzePathJobs() + self._pipe.send(None) + paths_by_name = self._worker_analyzer.GetSymbolNames() + self._pipe.send(concurrent.EncodeDictOfLists(paths_by_name)) + elif message[0] == _MSG_GET_STRINGS: + self._job_queue.join() + # Send a None packet so that other side can measure IPC transfer time. + self._pipe.send(None) + self._pipe.send(self._worker_analyzer.GetEncodedStringPositions()) + + def Run(self): + try: + self._worker_thread.start() + while True: + self._HandleMessage(self._pipe.recv()) + except EOFError: + pass + except EnvironmentError, e: + # Parent process exited so don't log. + if e.errno in (errno.EPIPE, errno.ECONNRESET): + sys.exit(1) + + logging.debug('nm bulk subprocess finished.') + sys.exit(0) + + +BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerMaster +if concurrent.DISABLE_ASYNC: + BulkObjectFileAnalyzer = _BulkObjectFileAnalyzerWorker + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--multiprocess', action='store_true') + parser.add_argument('--tool-prefix', required=True) + parser.add_argument('--output-directory', required=True) + parser.add_argument('--elf-file', type=os.path.realpath) + parser.add_argument('--show-names', action='store_true') + parser.add_argument('--show-strings', action='store_true') + parser.add_argument('objects', type=os.path.realpath, nargs='+') + + args = parser.parse_args() + logging.basicConfig(level=logging.DEBUG, + format='%(levelname).1s %(relativeCreated)6d %(message)s') + + if args.multiprocess: + bulk_analyzer = _BulkObjectFileAnalyzerMaster( + args.tool_prefix, args.output_directory) + else: + concurrent.DISABLE_ASYNC = True + bulk_analyzer = _BulkObjectFileAnalyzerWorker( + args.tool_prefix, args.output_directory) + + # Pass individually to test multiple calls. + for path in args.objects: + bulk_analyzer.AnalyzePaths([path]) + bulk_analyzer.SortPaths() + + names_to_paths = bulk_analyzer.GetSymbolNames() + print('Found {} names'.format(len(names_to_paths))) + if args.show_names: + for name, paths in names_to_paths.iteritems(): + print('{}: {!r}'.format(name, paths)) + + if args.elf_file: + address, offset, size = string_extract.LookupElfRodataInfo( + args.elf_file, args.tool_prefix) + bulk_analyzer.AnalyzeStringLiterals(args.elf_file, ((address, size),)) + + positions_by_path = bulk_analyzer.GetStringPositions()[0] + print('Found {} string literals'.format(sum( + len(v) for v in positions_by_path.itervalues()))) + if args.show_strings: + logging.debug('.rodata adjust=%d', address - offset) + for path, positions in positions_by_path.iteritems(): + strs = string_extract.ReadFileChunks( + args.elf_file, ((offset + addr, size) for addr, size in positions)) + print('{}: {!r}'.format( + path, [s if len(s) < 20 else s[:20] + '...' for s in strs])) + + +if __name__ == '__main__': + main() diff --git a/chromium/tools/binary_size/libsupersize/path_util.py b/chromium/tools/binary_size/libsupersize/path_util.py index 1432fd3a69a..9ef38105a8f 100644 --- a/chromium/tools/binary_size/libsupersize/path_util.py +++ b/chromium/tools/binary_size/libsupersize/path_util.py @@ -85,11 +85,14 @@ class ToolPrefixFinder(_PathFinder): self._output_directory_finder = output_directory_finder self._linker_name = linker_name; + def IsLld(self): + return self._linker_name.startswith('lld') if self._linker_name else True + def Detect(self): output_directory = self._output_directory_finder.Tentative() if output_directory: ret = None - if self._linker_name.startswith('lld'): + if self.IsLld(): ret = os.path.join(SRC_ROOT, 'third_party', 'llvm-build', 'Release+Asserts', 'bin', 'llvm-') else: @@ -182,3 +185,9 @@ def GetReadElfPath(tool_prefix): if tool_prefix[-5:] == 'llvm-': return 'readelf' return tool_prefix + 'readelf' + + +def GetBcAnalyzerPath(tool_prefix): + if tool_prefix[-5:] != 'llvm-': + raise ValueError('BC analyzer is only supported in LLVM.') + return tool_prefix + 'bcanalyzer' diff --git a/chromium/tools/binary_size/libsupersize/start_server.py b/chromium/tools/binary_size/libsupersize/start_server.py new file mode 100644 index 00000000000..23ef088f38f --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/start_server.py @@ -0,0 +1,51 @@ +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs a server to let the user interact with supersize using a web UI.""" + +import BaseHTTPServer +import logging +import os +import SimpleHTTPServer + + +class SupersizeHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler, + object): + # Directory to serve files from + serve_from = None + # Path to data file + data_file_path = None + + #override + def translate_path(self, path): + f = super(SupersizeHTTPRequestHandler, self).translate_path(path) + relative_path = os.path.relpath(f, os.getcwd()) + if relative_path == 'data.ndjson': + return SupersizeHTTPRequestHandler.data_file_path + else: + return os.path.join(SupersizeHTTPRequestHandler.serve_from, relative_path) + + +def AddArguments(parser): + parser.add_argument('report_file', + help='Path to a custom html_report data file to load.') + parser.add_argument('-p', '--port', type=int, default=8000, + help='Port for the HTTP server') + parser.add_argument('-a', '--address', default='localhost', + help='Address for the HTTP server') + + +def Run(args, _parser): + logging.info('Starting server') + server_addr = ('', args.port) + + static_files = os.path.join(os.path.dirname(__file__), 'static') + + SupersizeHTTPRequestHandler.serve_from = static_files + SupersizeHTTPRequestHandler.data_file_path = args.report_file + httpd = BaseHTTPServer.HTTPServer(server_addr, SupersizeHTTPRequestHandler) + + sa = httpd.socket.getsockname() + logging.warning('Server ready at http://%s:%d', sa[0], sa[1]) + httpd.serve_forever() diff --git a/chromium/tools/binary_size/libsupersize/static/favicon.ico b/chromium/tools/binary_size/libsupersize/static/favicon.ico new file mode 100644 index 00000000000..ff2c6a9af95 Binary files /dev/null and b/chromium/tools/binary_size/libsupersize/static/favicon.ico differ diff --git a/chromium/tools/binary_size/libsupersize/static/index.html b/chromium/tools/binary_size/libsupersize/static/index.html new file mode 100644 index 00000000000..3a29bf074d6 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/index.html @@ -0,0 +1,603 @@ + + + + + + Binary Size Analysis + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+ +

Size options

+

+ + +

+

+ + +

+ +
+ Group symbols by +
+ + +
+
+ + +
+
+ +
+ Symbol types to show +
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+ + +
+ +
+ Advanced filters +

+ + +

+
+ Regular expressions +
+ + +
+
+ + +
+
+
+
+
+ + + +
+
+ Name + Size +
+
    +
    + +
    + + +
    +
    + + + \ No newline at end of file diff --git a/chromium/tools/binary_size/libsupersize/static/infocard-ui.js b/chromium/tools/binary_size/libsupersize/static/infocard-ui.js new file mode 100644 index 00000000000..7d04641f871 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/infocard-ui.js @@ -0,0 +1,328 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// @ts-check +'use strict'; + +/** + * @fileoverview + * UI classes and methods for the info cards that display informations about + * symbols as the user hovers or focuses on them. + */ + +const displayInfocard = (() => { + const _CANVAS_RADIUS = 40; + + class Infocard { + /** + * @param {string} id + */ + constructor(id) { + this._infocard = document.getElementById(id); + /** @type {HTMLHeadingElement} */ + this._sizeInfo = this._infocard.querySelector('.size-info'); + /** @type {HTMLParagraphElement} */ + this._pathInfo = this._infocard.querySelector('.path-info'); + /** @type {HTMLDivElement} */ + this._iconInfo = this._infocard.querySelector('.icon-info'); + /** @type {HTMLParagraphElement} */ + this._typeInfo = this._infocard.querySelector('.type-info'); + + /** + * Last symbol type displayed. + * Tracked to avoid re-cloning the same icon. + * @type {string} + */ + this._lastType = ''; + } + + /** + * Updates the size header, which normally displayed the byte size of the + * node followed by an abbreviated version. + * + * Example: "1,234 bytes (1.23 KiB)" + * @param {TreeNode} node + */ + _updateSize(node) { + const {description, element, value} = getSizeContents(node); + const sizeFragment = dom.createFragment([ + document.createTextNode(`${description} (`), + element, + document.createTextNode(')'), + ]); + + // Update DOM + setSizeClasses(this._sizeInfo, value); + + dom.replace(this._sizeInfo, sizeFragment); + } + + /** + * Updates the path text, which shows the idPath of the node but highlights + * the symbol name portion using bold text. + * @param {TreeNode} node + */ + _updatePath(node) { + const path = node.idPath.slice(0, node.shortNameIndex); + const boldShortName = dom.textElement( + 'span', + shortName(node), + 'symbol-name-info' + ); + const pathFragment = dom.createFragment([ + document.createTextNode(path), + boldShortName, + ]); + + // Update DOM + dom.replace(this._pathInfo, pathFragment); + } + + /** + * Updates the icon and type text. The type label is pulled from the + * title of the icon supplied. + * @param {SVGSVGElement} icon Icon to display + */ + _setTypeContent(icon) { + const typeDescription = icon.querySelector('title').textContent; + icon.setAttribute('fill', '#fff'); + + this._typeInfo.textContent = typeDescription; + this._iconInfo.removeChild(this._iconInfo.lastElementChild); + this._iconInfo.appendChild(icon); + } + + /** + * Toggle wheter or not the card is visible. + * @param {boolean} isHidden + */ + setHidden(isHidden) { + if (isHidden) { + this._infocard.setAttribute('hidden', ''); + } else { + this._infocard.removeAttribute('hidden'); + } + } + + /** + * Updates the DOM for the info card. + * @param {TreeNode} node + */ + _updateInfocard(node) { + const type = node.type[0]; + + // Update DOM + this._updateSize(node); + this._updatePath(node); + if (type !== this._lastType) { + // No need to create a new icon if it is identical. + const icon = getIconTemplate(type); + this._setTypeContent(icon); + this._lastType = type; + } + } + + /** + * Updates the card on the next animation frame. + * @param {TreeNode} node + */ + updateInfocard(node) { + cancelAnimationFrame(Infocard._pendingFrame); + Infocard._pendingFrame = requestAnimationFrame(() => + this._updateInfocard(node) + ); + } + } + + class SymbolInfocard extends Infocard { + /** + * @param {SVGSVGElement} icon Icon to display + */ + _setTypeContent(icon) { + const color = icon.getAttribute('fill'); + super._setTypeContent(icon); + this._iconInfo.style.backgroundColor = color; + } + } + + class ContainerInfocard extends Infocard { + constructor(id) { + super(id); + this._tableBody = this._infocard.querySelector('tbody'); + this._ctx = this._infocard.querySelector('canvas').getContext('2d'); + + /** + * @type {{[type:string]: HTMLTableRowElement}} Rows in the container + * infocard that represent a particular symbol type. + */ + this._infoRows = { + b: this._tableBody.querySelector('.bss-info'), + d: this._tableBody.querySelector('.data-info'), + r: this._tableBody.querySelector('.rodata-info'), + t: this._tableBody.querySelector('.text-info'), + v: this._tableBody.querySelector('.vtable-info'), + '*': this._tableBody.querySelector('.gen-info'), + x: this._tableBody.querySelector('.dexnon-info'), + m: this._tableBody.querySelector('.dex-info'), + p: this._tableBody.querySelector('.pak-info'), + P: this._tableBody.querySelector('.paknon-info'), + o: this._tableBody.querySelector('.other-info'), + }; + + /** + * Update the DPI of the canvas for zoomed in and high density screens. + */ + const _updateCanvasDpi = () => { + this._ctx.canvas.height = _CANVAS_RADIUS * 2 * devicePixelRatio; + this._ctx.canvas.width = _CANVAS_RADIUS * 2 * devicePixelRatio; + this._ctx.scale(devicePixelRatio, devicePixelRatio); + }; + + _updateCanvasDpi(); + window.addEventListener('resize', _updateCanvasDpi); + } + + /** + * @param {SVGSVGElement} icon Icon to display + */ + _setTypeContent(icon) { + super._setTypeContent(icon); + icon.classList.add('canvas-overlay'); + } + + /** + * Draw a slice of a pie chart. + * @param {number} angleStart Starting angle, in radians. + * @param {number} percentage Percentage of circle to draw. + * @param {string} fillColor Color of the pie slice. + * @param {string} strokeColor Color of the pie slice border. + * @returns {number} Ending angle, in radians. + */ + _drawSlice(angleStart, percentage, fillColor, strokeColor) { + const arcLength = Math.abs(percentage) * 2 * Math.PI; + const angleEnd = angleStart + arcLength; + if (arcLength === 0) return angleEnd; + + // Update DOM + this._ctx.fillStyle = fillColor; + // Move cursor to center, where line will start + this._ctx.beginPath(); + this._ctx.moveTo(40, 40); + // Move cursor to start of arc then draw arc + this._ctx.arc(40, 40, _CANVAS_RADIUS, angleStart, angleEnd); + // Move cursor back to center + this._ctx.closePath(); + this._ctx.fill(); + + if (strokeColor) { + this._ctx.strokeStyle = strokeColor; + this._ctx.lineWidth = 16; + this._ctx.beginPath(); + this._ctx.arc(40, 40, _CANVAS_RADIUS, angleStart, angleEnd); + this._ctx.stroke(); + } + + return angleEnd; + } + + /** + * Update a row in the breakdown table with the given values. + * @param {HTMLTableRowElement} row + * @param {{size:number,count:number} | null} stats Total size of the + * symbols of a given type in a container. + * @param {number} percentage How much the size represents in relation to + * the total size of the symbols in the container. + */ + _updateBreakdownRow(row, stats, percentage) { + if (stats == null || stats.size === 0) { + if (row.parentElement != null) { + this._tableBody.removeChild(row); + } + return; + } + + const countColumn = row.querySelector('.count'); + const sizeColumn = row.querySelector('.size'); + const percentColumn = row.querySelector('.percent'); + + const countString = stats.count.toLocaleString(_LOCALE, { + useGrouping: true, + }); + const sizeString = stats.size.toLocaleString(_LOCALE, { + minimumFractionDigits: 2, + maximumFractionDigits: 2, + useGrouping: true, + }); + const percentString = percentage.toLocaleString(_LOCALE, { + style: 'percent', + minimumFractionDigits: 2, + maximumFractionDigits: 2, + }); + + // Update DOM + countColumn.textContent = countString; + sizeColumn.textContent = sizeString; + percentColumn.textContent = percentString; + this._tableBody.appendChild(row); + } + + /** + * Update DOM for the container infocard + * @param {TreeNode} containerNode + */ + _updateInfocard(containerNode) { + const extraRows = {...this._infoRows}; + const statsEntries = Object.entries(containerNode.childStats).sort( + (a, b) => b[1].size - a[1].size + ); + const diffMode = state.has('diff_mode'); + let totalSize = 0; + for (const [, stats] of statsEntries) { + totalSize += Math.abs(stats.size); + } + + // Update DOM + super._updateInfocard(containerNode); + let angleStart = 0; + for (const [type, stats] of statsEntries) { + delete extraRows[type]; + const {color} = getIconStyle(type); + const percentage = stats.size / totalSize; + let stroke = ''; + if (diffMode) { + stroke = stats.size > 0 ? '#ea4335' : '#34a853'; + } + + angleStart = this._drawSlice(angleStart, percentage, color, stroke); + this._updateBreakdownRow(this._infoRows[type], stats, percentage); + } + + // Hide unused types + for (const row of Object.values(extraRows)) { + this._updateBreakdownRow(row, null, 0); + } + } + } + + const _containerInfo = new ContainerInfocard('infocard-container'); + const _symbolInfo = new SymbolInfocard('infocard-symbol'); + + /** + * Displays an infocard for the given symbol on the next frame. + * @param {TreeNode} node + */ + function displayInfocard(node) { + if (_CONTAINER_TYPE_SET.has(node.type[0])) { + _containerInfo.updateInfocard(node); + _containerInfo.setHidden(false); + _symbolInfo.setHidden(true); + } else { + _symbolInfo.updateInfocard(node); + _symbolInfo.setHidden(false); + _containerInfo.setHidden(true); + } + } + + return displayInfocard; +})(); diff --git a/chromium/tools/binary_size/libsupersize/static/infocard.css b/chromium/tools/binary_size/libsupersize/static/infocard.css new file mode 100644 index 00000000000..b8105844067 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/infocard.css @@ -0,0 +1,113 @@ +/* Copyright 2018 The Chromium Authors. All rights reserved. + * Use of this source code is governed by a BSD-style license that can be + * found in the LICENSE file. */ + +.infocards { + visibility: hidden; + position: fixed; + bottom: 8px; + left: 8px; + right: 8px; + margin: 0 auto; + max-width: 512px; + max-height: 50vh; + overflow-y: auto; + background: white; + border-radius: 8px; + box-shadow: 0 1px 2px #3c40434d, 0 1px 3px 1px #3c404326; + transform: translateY(16px); + opacity: 0; + transition: 0.3s ease transform, 0.3s ease opacity, 0.3s ease visibility; +} +.tree-container:hover ~ .infocards, +.tree-container.focused ~ .infocards { + visibility: visible; + opacity: 1; + transform: none; +} + +.infocard { + display: grid; + padding: 16px; +} +.infocard-container { + grid-template-areas: 'header icon' 'type type'; + grid-template-columns: auto 80px; + grid-column-gap: 16px; + grid-row-gap: 8px; +} +.infocard-symbol { + grid-template-areas: 'icon header' 'type type'; + grid-template-columns: 40px auto; + grid-column-gap: 16px; +} + +.infocard[hidden] { + display: none; +} +@media (min-width: 700px) { + .show-options .infocards { + right: 256px; + } +} + +.icon-info { + grid-area: icon; + align-self: center; + padding: 8px 2px 8px 8px; + border-radius: 50%; +} +.container-icon-info { + position: relative; + padding: 0; + height: 80px; +} +.header-info { + grid-area: header; + color: #202124; +} +.size-info { + margin: 0 0 2px; +} +.path-info { + margin: 0 0 8px; + word-break: break-word; + color: #3c4043; +} +.symbol-name-info { + font-weight: 500; +} +.type-info { + grid-area: type; + margin-bottom: 0; +} + +.type-pie-info { + height: 80px; + width: 80px; + border-radius: 50%; +} +.type-breakdown-info { + grid-area: type; + border-top: 1px solid #dadce0; + padding-top: 8px; + clear: right; + height: 0; /* Fixes bug with table height in Firefox */ +} +.canvas-overlay { + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + margin: auto; +} + +th { + text-align: left; +} +th[scope='row'], +td { + font-weight: normal; + font-size: 14px; +} diff --git a/chromium/tools/binary_size/libsupersize/static/options.css b/chromium/tools/binary_size/libsupersize/static/options.css new file mode 100644 index 00000000000..20b15235cab --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/options.css @@ -0,0 +1,291 @@ +/* Copyright 2018 The Chromium Authors. All rights reserved. + * Use of this source code is governed by a BSD-style license that can be + * found in the LICENSE file. */ + +/** Body modifier class, indicates when options should be visible. */ +.show-options { + grid-template-columns: auto 256px; +} + +.show-options .options { + visibility: visible; +} +.show-options .settings { + fill: #1a73e8; +} + +/** Black overlay shown on smaller screens when options is visible. */ +.scrim { + z-index: 5; /* Side panel layer */ + position: fixed; + top: 0; + left: 0; + bottom: 0; + right: 256px; + background: #00000050; +} + +/** Options side panel */ +.options { + z-index: 5; /* Side panel layer */ + grid-area: options; + padding: 0 16px; + overflow-y: auto; + position: fixed; + right: 0; + top: 0; + height: 100vh; + width: 224px; + background: #fffffff5; + box-shadow: 0 1px 2px #3c40434d, 0 2px 6px 2px #3c404326; +} + +fieldset { + border: 0; + padding: 0; + margin: 2em 0 1em; +} +.options fieldset:first-of-type { + margin-top: 1em; +} +legend { + margin: 1em 0; + padding: 0; +} + +/** Toolbar */ +.form-bar { + display: flex; + justify-content: flex-end; + height: 64px; + align-items: center; +} + +/** Buttons */ +.icon-button, +.text-button { + display: inline-flex; + align-items: center; + justify-content: center; + cursor: pointer; + background: transparent; + border: 0; +} + +.icon-button { + height: 40px; + width: 40px; + border-radius: 50%; +} +.icon-button:hover { + background: #0000001f; +} + +.text-button { + padding: 0 8px; + line-height: 36px; + border-radius: 4px; + color: #1a73e8; + font-family: 'Google Sans', Arial, sans-serif; + font-weight: 500; + font-size: 14px; +} +.text-button:hover { + background: #d2e3fc80; +} +.text-button:hover:focus { + background: #d2e3fc; +} +.text-button:focus, +.text-button:active { + background: #d2e3fce6; +} + +.text-button.with-icon { + display: flex; + align-items: center; + padding: 0 16px 0 12px; + margin: 0 8px; +} + +.filled-button { + background: #1a73e8; + color: white; +} +.filled-button:hover { + background: #287ae6; + box-shadow: 0 1px 2px #3c40434d, 0 1px 3px 1px #3c404326; +} +input:focus + label.filled-button:hover { + background: #5d9cee; +} +input:focus + label.filled-button { + background: #5094ed; + outline: #2e3436 dotted 1px; + outline: -webkit-focus-ring-color auto 5px; +} +.filled-button:active, +input:focus + label.filled-button:active { + background: #1a73e8; + box-shadow: 0 1px 2px #3c40434d, 0 2px 6px 2px #3c404326; +} + +/** or elements */ +input[type='checkbox'], +input[type='radio'] { + position: absolute; + margin: 0; + height: 18px; + width: 18px; + opacity: 0; +} +.checkbox-wrapper, +.radio-wrapper { + position: relative; +} +.checkbox-label, +.radio-label { + display: block; + position: relative; + padding-left: 34px; + margin: 4px 0; + cursor: pointer; + font-size: 14px; +} +.checkbox-label::before, +.checkbox-label::after, +.radio-label::before, +.radio-label::after { + position: absolute; + content: ''; + border: 2px solid currentColor; +} +.checkbox-label::before, +.radio-label::before { + width: 14px; + height: 14px; + border-radius: 2px; + left: 0; +} +.checkbox-label::after, +.radio-label::after { + width: 4px; + opacity: 0; + transition: opacity 0.2s ease; +} +.checkbox-label::after { + height: 9px; + left: 6px; + top: 2px; + border-top-width: 0; + border-left-width: 0; + transform: rotate(45deg); +} +.radio-label::before { + border-radius: 50%; +} +.radio-label::after { + height: 4px; + left: 5px; + top: 5px; + background: currentColor; + border-radius: 50%; +} +input[type='checkbox']:checked + .checkbox-label, +input[type='radio']:checked + .radio-label { + color: #1a73e8; +} +input[type='checkbox']:checked + .checkbox-label::after, +input[type='radio']:checked + .radio-label::after { + opacity: 1; +} +input[type='checkbox']:disabled + .checkbox-label, +input[type='radio']:disabled + .radio-label { + color: #80868b; +} +input[type='checkbox']:focus + .checkbox-label, +input[type='radio']:focus + .radio-label { + outline: #2e3436 dotted 1px; + outline: -webkit-focus-ring-color auto 5px; +} + +input[type='file'] { + opacity: 0; +} + +/** Tweaks for smaller screen sizes */ +@media (max-width: 700px) { + .show-options { + grid-template-columns: auto 0; + } + .show-options .scrim { + display: block; + } + .appbar, + .symbols { + padding: 0 16px; + } + .appbar-progress { + margin: 0 -16px; + width: calc(100% + 32px); + } +} diff --git a/chromium/tools/binary_size/libsupersize/static/shared.js b/chromium/tools/binary_size/libsupersize/static/shared.js new file mode 100644 index 00000000000..6892a57138a --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/shared.js @@ -0,0 +1,141 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// @ts-check +'use strict'; + +/** + * @fileoverview + * Constants used by both the UI and Web Worker scripts. + */ + +/** + * @typedef {object} TreeNode Node object used to represent the file tree. Can + * represent either a container or a symbol. + * @prop {TreeNode[] | null} children Child tree nodes. Null values indicate + * that there are children, but that they haven't been loaded in yet. Empty + * arrays indicate this is a leaf node. + * @prop {TreeNode | null} parent Parent tree node. null if this is a root node. + * @prop {string} idPath Full path to this node. + * @prop {number} shortNameIndex The name of the node is include in the idPath. + * This index indicates where to start to slice the idPath to read the name. + * @prop {number} size Byte size of this node and its children. + * @prop {string} type Type of this node. If this node has children, the string + * may have a second character to denote the most common child. + * @prop {{[type: string]: {size:number,count:number}}} childStats Stats about + * this node's descendants, organized by symbol type. + */ + +/** + * @typedef {object} TreeProgress + * @prop {TreeNode} root Root node and its direct children. + * @prop {number} percent Number from (0-1] to represent percentage. + * @prop {boolean} diffMode True if we are currently showing the diff of two + * different size files. + * @prop {string} [error] Error message, if an error occured in the worker. + * If unset, then there was no error. + */ + +/** + * @typedef {object} GetSizeResult + * @prop {string} description Description of the size, shown as hover text + * @prop {Node} element Abbreviated representation of the size, which can + * include DOM elements for styling. + * @prop {number} value The size number used to create the other strings. + */ +/** + * @typedef {(node: TreeNode, unit: string) => GetSizeResult} GetSize + */ + +/** Abberivated keys used by FileEntrys in the JSON data file. */ +const _KEYS = Object.freeze({ + SOURCE_PATH: /** @type {'p'} */ ('p'), + COMPONENT_INDEX: /** @type {'c'} */ ('c'), + FILE_SYMBOLS: /** @type {'s'} */ ('s'), + SYMBOL_NAME: /** @type {'n'} */ ('n'), + SIZE: /** @type {'b'} */ ('b'), + TYPE: /** @type {'t'} */ ('t'), + COUNT: /** @type {'u'} */ ('u'), +}); + +/** + * @enum {number} Various byte units and the corresponding amount of bytes + * that one unit represents. + */ +const _BYTE_UNITS = Object.freeze({ + GiB: 1024 ** 3, + MiB: 1024 ** 2, + KiB: 1024 ** 1, + B: 1024 ** 0, +}); +/** Set of all byte units */ +const _BYTE_UNITS_SET = new Set(Object.keys(_BYTE_UNITS)); + +/** + * Special types used by containers, such as folders and files. + */ +const _CONTAINER_TYPES = { + DIRECTORY: 'D', + COMPONENT: 'C', + FILE: 'F', + JAVA_CLASS: 'J', +}; +const _CONTAINER_TYPE_SET = new Set(Object.values(_CONTAINER_TYPES)); + +/** Type for a dex method symbol */ +const _DEX_METHOD_SYMBOL_TYPE = 'm'; +/** Type for an 'other' symbol */ +const _OTHER_SYMBOL_TYPE = 'o'; + +/** Set of all known symbol types. Container types are not included. */ +const _SYMBOL_TYPE_SET = new Set('bdrtv*xmpP' + _OTHER_SYMBOL_TYPE); + +/** Name used by a directory created to hold symbols with no name. */ +const _NO_NAME = '(No path)'; + +/** Key where type is stored in the query string state. */ +const _TYPE_STATE_KEY = 'type'; + +/** @type {string | string[]} */ +const _LOCALE = navigator.languages || navigator.language; + +/** + * Returns shortName for a tree node. + * @param {TreeNode} node + */ +function shortName(node) { + return node.idPath.slice(node.shortNameIndex); +} + +/** + * Iterate through each type in the query string. Types can be expressed as + * repeats of the same key in the query string ("type=b&type=p") or as a long + * string with multiple characters ("type=bp"). + * @param {string[]} typesList All values associated with the "type" key in the + * query string. + */ +function* types(typesList) { + for (const typeOrTypes of typesList) { + for (const typeChar of typeOrTypes) { + yield typeChar; + } + } +} + +/** + * Limit how frequently `func` is called. + * @template {T} + * @param {T & Function} func + * @param {number} wait Time to wait before func can be called again (ms). + * @returns {T} + */ +function debounce(func, wait) { + /** @type {number} */ + let timeoutId; + function debounced (...args) { + clearTimeout(timeoutId); + timeoutId = setTimeout(() => func(...args), wait); + }; + return /** @type {any} */ (debounced); +} diff --git a/chromium/tools/binary_size/libsupersize/static/start-worker.js b/chromium/tools/binary_size/libsupersize/static/start-worker.js new file mode 100644 index 00000000000..fb983b29cb1 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/start-worker.js @@ -0,0 +1,95 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// @ts-check +'use strict'; + +const _innerWorker = new Worker('tree-worker.js'); + +/** + * We use a worker to keep large tree creation logic off the UI thread. + * This class is used to interact with the worker. + */ +class TreeWorker { + /** + * @param {Worker} worker Web worker to wrap + */ + constructor(worker) { + this._worker = worker; + /** ID counter used by `waitForResponse` */ + this._requestId = 1; + + /** @type {(data: TreeProgress) => void | null} callback for `loadTree` */ + this._loadTreeCallback = null; + + this._worker.addEventListener('message', event => { + if (this._loadTreeCallback && event.data.id === 0) { + this._loadTreeCallback(event.data); + } + }); + } + + /** + * + * @param {string} action + * @param {any} data + */ + _waitForResponse(action, data) { + const id = ++this._requestId; + return new Promise((resolve, reject) => { + const handleResponse = event => { + if (event.data.id === id) { + this._worker.removeEventListener('message', handleResponse); + if (event.data.error) { + reject(event.data.error); + } else { + resolve(event.data.result); + } + } + }; + + this._worker.addEventListener('message', handleResponse); + this._worker.postMessage({id, action, data}); + }); + } + + /** + * Get data for a node with `idPath`. Loads information about the node and its + * direct children. Deeper children can be loaded by calling this function + * again. + * @param {string} idPath Path of the node to find + * @returns {Promise} + */ + openNode(idPath) { + return this._waitForResponse('open', idPath); + } + + /** + * Set callback used after `loadTree` is first called. + * @param {(data: TreeProgress) => void} callback Called when the worker + * has some data to display. Complete when `progress` is 1. + */ + setOnProgressHandler(callback) { + this._loadTreeCallback = callback; + } + + /** + * Loads the tree data given on a worker thread and replaces the tree view in + * the UI once complete. Uses query string as state for the options. + * Use `onProgress` before calling `loadTree`. + * @param {string} input + * @returns {Promise} + */ + loadTree(input = null) { + return this._waitForResponse('load', { + input, + options: location.search.slice(1), + }); + } +} + +const worker = new TreeWorker(_innerWorker); +// Kick off the worker ASAP so it can start parsing data faster. +// Subsequent calls will just use a worker locally. +const treeReady = worker.loadTree('data.ndjson'); diff --git a/chromium/tools/binary_size/libsupersize/static/state.js b/chromium/tools/binary_size/libsupersize/static/state.js new file mode 100644 index 00000000000..0b34bdce6c8 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/state.js @@ -0,0 +1,402 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// @ts-check +'use strict'; + +/** + * @fileoverview + * Methods for manipulating the state and the DOM of the page + */ + +/** @type {HTMLFormElement} Form containing options and filters */ +const form = document.getElementById('options'); + +/** Utilities for working with the DOM */ +const dom = { + /** + * Create a document fragment from the given nodes + * @param {Iterable} nodes + * @returns {DocumentFragment} + */ + createFragment(nodes) { + const fragment = document.createDocumentFragment(); + for (const node of nodes) fragment.appendChild(node); + return fragment; + }, + /** + * Removes all the existing children of `parent` and inserts + * `newChild` in their place + * @param {Node} parent + * @param {Node | null} newChild + */ + replace(parent, newChild) { + while (parent.firstChild) parent.removeChild(parent.firstChild); + if (newChild != null) parent.appendChild(newChild); + }, + /** + * Builds a text element in a single statement. + * @param {string} tagName Type of the element, such as "span". + * @param {string} text Text content for the element. + * @param {string} [className] Class to apply to the element. + */ + textElement(tagName, text, className) { + const element = document.createElement(tagName); + element.textContent = text; + if (className) element.className = className; + return element; + }, +}; + +/** Build utilities for working with the state. */ +function _initState() { + const _DEFAULT_FORM = new FormData(form); + + /** + * State is represented in the query string and + * can be manipulated by this object. Keys in the query match with + * input names. + */ + let _filterParams = new URLSearchParams(location.search.slice(1)); + const typeList = _filterParams.getAll(_TYPE_STATE_KEY); + _filterParams.delete(_TYPE_STATE_KEY); + for (const type of types(typeList)) { + _filterParams.append(_TYPE_STATE_KEY, type); + } + + const state = Object.freeze({ + /** + * Returns a string from the current query string state. + * Can optionally restrict valid values for the query. + * Values not present in the query will return null, or the default + * value if supplied. + * @param {string} key + * @param {object} [options] + * @param {string} [options.default] Default to use if key is not present + * in the state + * @param {Set} [options.valid] If provided, values must be in this + * set to be returned. Invalid values will return null or `defaultValue`. + * @returns {string | null} + */ + get(key, options = {}) { + const [val = null] = state.getAll(key, { + default: options.default ? [options.default] : null, + valid: options.valid, + }); + return val; + }, + /** + * Returns all string values for a key from the current query string state. + * Can optionally provide default values used if there are no values. + * @param {string} key + * @param {object} [options] + * @param {string[]} [options.default] Default to use if key is not present + * in the state. + * @param {Set} [options.valid] If provided, values must be in this + * set to be returned. Invalid values will be omitted. + * @returns {string[]} + */ + getAll(key, options = {}) { + let vals = _filterParams.getAll(key); + if (options.valid != null) { + vals = vals.filter(val => options.valid.has(val)); + } + if (options.default != null && vals.length === 0) { + vals = options.default; + } + return vals; + }, + /** + * Checks if a key is present in the query string state. + * @param {string} key + * @returns {boolean} + */ + has(key) { + return _filterParams.has(key); + }, + /** + * Formats the filter state as a string. + */ + toString() { + const copy = new URLSearchParams(_filterParams); + const types = [...new Set(copy.getAll(_TYPE_STATE_KEY))]; + if (types.length > 0) copy.set(_TYPE_STATE_KEY, types.join('')); + return `?${copy.toString()}`; + }, + /** + * Saves a key and value into a temporary state not displayed in the URL. + * @param {string} key + * @param {string | null} value + */ + set(key, value) { + if (value == null) { + _filterParams.delete(key); + } else { + _filterParams.set(key, value); + } + history.replaceState(null, null, state.toString()); + }, + }); + + // Update form inputs to reflect the state from URL. + for (const element of form.elements) { + if (element.name) { + const input = /** @type {HTMLInputElement} */ (element); + const values = _filterParams.getAll(input.name); + const [value] = values; + if (value) { + switch (input.type) { + case 'checkbox': + input.checked = values.includes(input.value); + break; + case 'radio': + input.checked = value === input.value; + break; + default: + input.value = value; + break; + } + } + } + } + + /** + * Yields only entries that have been modified in + * comparison to `_DEFAULT_FORM`. + * @param {FormData} modifiedForm + */ + function* onlyChangedEntries(modifiedForm) { + // Remove default values + for (const key of modifiedForm.keys()) { + const modifiedValues = modifiedForm.getAll(key); + const defaultValues = _DEFAULT_FORM.getAll(key); + + const valuesChanged = + modifiedValues.length !== defaultValues.length || + modifiedValues.some((v, i) => v !== defaultValues[i]); + if (valuesChanged) { + for (const value of modifiedValues) { + yield [key, value]; + } + } + } + } + + // Update the state when the form changes. + function _updateStateFromForm() { + const modifiedForm = new FormData(form); + _filterParams = new URLSearchParams(onlyChangedEntries(modifiedForm)); + history.replaceState(null, null, state.toString()); + } + + form.addEventListener('change', _updateStateFromForm); + + return state; +} + +function _startListeners() { + const _SHOW_OPTIONS_STORAGE_KEY = 'show-options'; + + /** @type {HTMLFieldSetElement} */ + const typesFilterContainer = document.getElementById('types-filter'); + /** @type {HTMLInputElement} */ + const methodCountInput = form.elements.namedItem('method_count'); + /** @type {HTMLFieldSetElement} */ + const byteunit = form.elements.namedItem('byteunit'); + /** @type {HTMLCollectionOf} */ + const typeCheckboxes = form.elements.namedItem(_TYPE_STATE_KEY); + /** @type {HTMLSpanElement} */ + const sizeHeader = document.getElementById('size-header'); + + /** + * The settings dialog on the side can be toggled on and off by elements with + * a 'toggle-options' class. + */ + function _toggleOptions() { + const openedOptions = document.body.classList.toggle('show-options'); + localStorage.setItem(_SHOW_OPTIONS_STORAGE_KEY, openedOptions.toString()); + } + for (const button of document.getElementsByClassName('toggle-options')) { + button.addEventListener('click', _toggleOptions); + } + // Default to open if getItem returns null + if (localStorage.getItem(_SHOW_OPTIONS_STORAGE_KEY) !== 'false') { + document.body.classList.add('show-options'); + } + + /** + * Disable some fields when method_count is set + */ + function setMethodCountModeUI() { + if (methodCountInput.checked) { + byteunit.setAttribute('disabled', ''); + typesFilterContainer.setAttribute('disabled', ''); + sizeHeader.textContent = 'Methods'; + } else { + byteunit.removeAttribute('disabled'); + typesFilterContainer.removeAttribute('disabled'); + sizeHeader.textContent = 'Size'; + } + } + setMethodCountModeUI(); + methodCountInput.addEventListener('change', setMethodCountModeUI); + + document.getElementById('type-all').addEventListener('click', () => { + for (const checkbox of typeCheckboxes) { + checkbox.checked = true; + } + form.dispatchEvent(new Event('change')); + }); + document.getElementById('type-none').addEventListener('click', () => { + for (const checkbox of typeCheckboxes) { + checkbox.checked = false; + } + form.dispatchEvent(new Event('change')); + }); +} + +function _makeIconTemplateGetter() { + const _icons = document.getElementById('icons'); + + /** + * @type {{[type:string]: SVGSVGElement}} Icon elements + * that correspond to each symbol type. + */ + const symbolIcons = { + D: _icons.querySelector('.foldericon'), + C: _icons.querySelector('.componenticon'), + J: _icons.querySelector('.javaclassicon'), + F: _icons.querySelector('.fileicon'), + b: _icons.querySelector('.bssicon'), + d: _icons.querySelector('.dataicon'), + r: _icons.querySelector('.readonlyicon'), + t: _icons.querySelector('.codeicon'), + v: _icons.querySelector('.vtableicon'), + '*': _icons.querySelector('.generatedicon'), + x: _icons.querySelector('.dexicon'), + m: _icons.querySelector('.dexmethodicon'), + p: _icons.querySelector('.localpakicon'), + P: _icons.querySelector('.nonlocalpakicon'), + o: _icons.querySelector('.othericon'), // used as default icon + }; + + /** @type {Map} */ + const iconInfoCache = new Map(); + + /** + * Returns the SVG icon template element corresponding to the given type. + * @param {string} type Symbol type character. + * @param {boolean} readonly If true, the original template is returned. + * If false, a copy is returned that can be modified. + * @returns {SVGSVGElement} + */ + function getIconTemplate(type, readonly = false) { + const iconTemplate = symbolIcons[type] || symbolIcons[_OTHER_SYMBOL_TYPE]; + return readonly ? iconTemplate : iconTemplate.cloneNode(true); + } + + /** + * Returns style info about SVG icon template element corresponding to the + * given type. + * @param {string} type Symbol type character. + */ + function getIconStyle(type) { + let info = iconInfoCache.get(type); + if (info == null) { + const icon = getIconTemplate(type, true); + info = { + color: icon.getAttribute('fill'), + description: icon.querySelector('title').textContent, + }; + iconInfoCache.set(type, info); + } + return info; + } + + return {getIconTemplate, getIconStyle}; +} + +function _makeSizeTextGetter() { + const _SIZE_CHANGE_CUTOFF = 50000; + + /** + * Create the contents for the size element of a tree node. + * The unit to use is selected from the current state. + * + * If in method count mode, size instead represents the amount of methods in + * the node. Otherwise, the original number of bytes will be displayed. + * + * @param {TreeNode} node Node whose size is the number of bytes to use for + * the size text + * @returns {GetSizeResult} Object with hover text title and + * size element body. Can be consumed by `_applySizeFunc()` + */ + function getSizeContents(node) { + if (state.has('method_count')) { + const {count: methodCount = 0} = + node.childStats[_DEX_METHOD_SYMBOL_TYPE] || {}; + const methodStr = methodCount.toLocaleString(_LOCALE, { + useGrouping: true, + }); + + return { + element: document.createTextNode(methodStr), + description: `${methodStr} method${methodCount === 1 ? '' : 's'}`, + value: methodCount, + }; + } else { + const bytes = node.size; + const unit = state.get('byteunit', { + default: 'MiB', + valid: _BYTE_UNITS_SET, + }); + // Format the bytes as a number with 2 digits after the decimal point + const text = (bytes / _BYTE_UNITS[unit]).toLocaleString(_LOCALE, { + minimumFractionDigits: 2, + maximumFractionDigits: 2, + }); + const textNode = document.createTextNode(`${text} `); + + // Display the suffix with a smaller font + const suffixElement = dom.textElement('small', unit); + + const bytesGrouped = bytes.toLocaleString(_LOCALE, {useGrouping: true}); + + return { + element: dom.createFragment([textNode, suffixElement]), + description: `${bytesGrouped} bytes`, + value: bytes, + }; + } + } + + /** + * Set classes on an element based on the size it represents. + * @param {HTMLElement} sizeElement + * @param {number} value + */ + function setSizeClasses(sizeElement, value) { + const shouldHaveStyle = + state.has('diff_mode') && Math.abs(value) > _SIZE_CHANGE_CUTOFF; + if (shouldHaveStyle) { + if (value < 0) { + sizeElement.classList.add('shrunk'); + sizeElement.classList.remove('grew'); + } else { + sizeElement.classList.remove('shrunk'); + sizeElement.classList.add('grew'); + } + } else { + sizeElement.classList.remove('shrunk', 'grew'); + } + } + + return {getSizeContents, setSizeClasses}; +} + +/** Utilities for working with the state */ +const state = _initState(); +const {getIconTemplate, getIconStyle} = _makeIconTemplateGetter(); +const {getSizeContents, setSizeClasses} = _makeSizeTextGetter(); +_startListeners(); diff --git a/chromium/tools/binary_size/libsupersize/static/tree-ui.js b/chromium/tools/binary_size/libsupersize/static/tree-ui.js new file mode 100644 index 00000000000..5e8d6143b7f --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/tree-ui.js @@ -0,0 +1,411 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// @ts-check +'use strict'; + +/** + * @fileoverview + * UI classes and methods for the Tree View in the + * Binary Size Analysis HTML report. + */ + +{ + /** Capture one of: "::", "../", "./", "/", "#" */ + const _SPECIAL_CHAR_REGEX = /(::|(?:\.*\/)+|#)/g; + /** Insert zero-width space after capture group */ + const _ZERO_WIDTH_SPACE = '$&\u200b'; + + // Templates for tree nodes in the UI. + /** @type {HTMLTemplateElement} Template for leaves in the tree */ + const _leafTemplate = document.getElementById('treenode-symbol'); + /** @type {HTMLTemplateElement} Template for trees */ + const _treeTemplate = document.getElementById('treenode-container'); + + const _symbolTree = document.getElementById('symboltree'); + + /** + * @type {HTMLCollectionOf} + * HTMLCollection of all nodes. Updates itself automatically. + */ + const _liveNodeList = document.getElementsByClassName('node'); + /** + * @type {HTMLCollectionOf} + * HTMLCollection of all size elements. Updates itself automatically. + */ + const _liveSizeSpanList = document.getElementsByClassName('size'); + + /** + * @type {WeakMap>} + * Associates UI nodes with the corresponding tree data object + * so that event listeners and other methods can + * query the original data. + */ + const _uiNodeData = new WeakMap(); + + /** + * Sets focus to a new tree element while updating the element that last had + * focus. The tabindex property is used to avoid needing to tab through every + * single tree item in the page to reach other areas. + * @param {number | HTMLElement} el Index of tree node in `_liveNodeList` + */ + function _focusTreeElement(el) { + const lastFocused = document.activeElement; + if (_uiNodeData.has(lastFocused)) { + // Update DOM + lastFocused.tabIndex = -1; + } + const element = typeof el === 'number' ? _liveNodeList[el] : el; + if (element != null) { + // Update DOM + element.tabIndex = 0; + element.focus(); + } + } + + /** + * Click event handler to expand or close the child group of a tree. + * @param {Event} event + */ + async function _toggleTreeElement(event) { + event.preventDefault(); + + /** @type {HTMLAnchorElement | HTMLSpanElement} */ + const link = event.currentTarget; + const element = link.parentElement; + const group = link.nextElementSibling; + + const isExpanded = element.getAttribute('aria-expanded') === 'true'; + if (isExpanded) { + // Update DOM + element.setAttribute('aria-expanded', 'false'); + dom.replace(group, null); + } else { + element.setAttribute('aria-expanded', 'true'); + + let data = _uiNodeData.get(link); + if (data == null || data.children == null) { + const idPath = link.querySelector('.symbol-name').title; + data = await worker.openNode(idPath); + _uiNodeData.set(link, data); + } + + const newElements = data.children.map(child => newTreeElement(child)); + if (newElements.length === 1) { + // Open the inner element if it only has a single child. + // Ensures nodes like "java"->"com"->"google" are opened all at once. + newElements[0].querySelector('.node').click(); + } + const newElementsFragment = dom.createFragment(newElements); + + // Update DOM + requestAnimationFrame(() => { + group.appendChild(newElementsFragment); + }); + } + } + + /** + * Keydown event handler to move focus for the given element + * @param {KeyboardEvent} event + */ + function _handleKeyNavigation(event) { + /** @type {HTMLAnchorElement | HTMLSpanElement} */ + const link = event.target; + const focusIndex = Array.prototype.indexOf.call(_liveNodeList, link); + + /** Focus the tree element immediately following this one */ + function _focusNext() { + if (focusIndex > -1 && focusIndex < _liveNodeList.length - 1) { + event.preventDefault(); + _focusTreeElement(focusIndex + 1); + } + } + + /** Open or close the tree element */ + function _toggle() { + event.preventDefault(); + link.click(); + } + + /** + * Focus the tree element at `index` if it starts with `char` + * @param {string} char + * @param {number} index + */ + function _focusIfStartsWith(char, index) { + const data = _uiNodeData.get(_liveNodeList[index]); + if (shortName(data).startsWith(char)) { + event.preventDefault(); + _focusTreeElement(index); + return true; + } else { + return false; + } + } + + switch (event.key) { + // Space should act like clicking or pressing enter & toggle the tree. + case ' ': + _toggle(); + break; + // Move to previous focusable node + case 'ArrowUp': + if (focusIndex > 0) { + event.preventDefault(); + _focusTreeElement(focusIndex - 1); + } + break; + // Move to next focusable node + case 'ArrowDown': + _focusNext(); + break; + // If closed tree, open tree. Otherwise, move to first child. + case 'ArrowRight': { + const data = _uiNodeData.get(link); + if (!data.children || data.children.length !== 0) { + const isExpanded = + link.parentElement.getAttribute('aria-expanded') === 'true'; + if (isExpanded) { + _focusNext(); + } else { + _toggle(); + } + } + break; + } + // If opened tree, close tree. Otherwise, move to parent. + case 'ArrowLeft': + { + const isExpanded = + link.parentElement.getAttribute('aria-expanded') === 'true'; + if (isExpanded) { + _toggle(); + } else { + const groupList = link.parentElement.parentElement; + if (groupList.getAttribute('role') === 'group') { + event.preventDefault(); + _focusTreeElement(groupList.previousElementSibling); + } + } + } + break; + // Focus first node + case 'Home': + event.preventDefault(); + _focusTreeElement(0); + break; + // Focus last node on screen + case 'End': + event.preventDefault(); + _focusTreeElement(_liveNodeList.length - 1); + break; + // Expand all sibling nodes + case '*': + const groupList = link.parentElement.parentElement; + if (groupList.getAttribute('role') === 'group') { + event.preventDefault(); + for (const li of groupList.children) { + if (li.getAttribute('aria-expanded') !== 'true') { + li.querySelector('.node').click(); + } + } + } + break; + // If a letter was pressed, find a node starting with that character. + default: + if (event.key.length === 1 && event.key.match(/\S/)) { + for (let i = focusIndex + 1; i < _liveNodeList.length; i++) { + if (_focusIfStartsWith(event.key, i)) return; + } + for (let i = 0; i < focusIndex; i++) { + if (_focusIfStartsWith(event.key, i)) return; + } + } + break; + } + } + + /** + * Replace the contents of the size element for a tree node. + * @param {HTMLElement} sizeElement Element that should display the size + * @param {TreeNode} node + */ + function _setSize(sizeElement, node) { + const {description, element, value} = getSizeContents(node); + + // Replace the contents of '.size' and change its title + dom.replace(sizeElement, element); + sizeElement.title = description; + setSizeClasses(sizeElement, value); + } + + /** + * Inflate a template to create an element that represents one tree node. + * The element will represent a tree or a leaf, depending on if the tree + * node object has any children. Trees use a slightly different template + * and have click event listeners attached. + * @param {TreeNode} data Data to use for the UI. + * @returns {DocumentFragment} + */ + function newTreeElement(data) { + const isLeaf = data.children && data.children.length === 0; + const template = isLeaf ? _leafTemplate : _treeTemplate; + const element = document.importNode(template.content, true); + + // Associate clickable node & tree data + /** @type {HTMLAnchorElement | HTMLSpanElement} */ + const link = element.querySelector('.node'); + _uiNodeData.set(link, Object.freeze(data)); + + // Icons are predefined in the HTML through hidden SVG elements + const type = data.type[0]; + const icon = getIconTemplate(type); + if (!isLeaf) { + const symbolStyle = getIconStyle(data.type[1]); + icon.setAttribute('fill', symbolStyle.color); + } + // Insert an SVG icon at the start of the link to represent type + link.insertBefore(icon, link.firstElementChild); + + // Set the symbol name and hover text + /** @type {HTMLSpanElement} */ + const symbolName = element.querySelector('.symbol-name'); + symbolName.textContent = shortName(data).replace( + _SPECIAL_CHAR_REGEX, + _ZERO_WIDTH_SPACE + ); + symbolName.title = data.idPath; + + if (state.has('method_count') && type === _DEX_METHOD_SYMBOL_TYPE) { + const {count = 0} = data.childStats[type] || {}; + if (count < 0) { + symbolName.classList.add('removed'); + } + } + + // Set the byte size and hover text + _setSize(element.querySelector('.size'), data); + + link.addEventListener('mouseover', event => + displayInfocard(_uiNodeData.get(event.currentTarget)) + ); + if (!isLeaf) { + link.addEventListener('click', _toggleTreeElement); + } + + return element; + } + + // When the `byteunit` state changes, update all .size elements in the page + form.elements.namedItem('byteunit').addEventListener('change', event => { + event.stopPropagation(); + state.set(event.currentTarget.name, event.currentTarget.value); + // Update existing size elements with the new unit + for (const sizeElement of _liveSizeSpanList) { + const data = _uiNodeData.get(sizeElement.parentElement); + if (data) _setSize(sizeElement, data); + } + }); + + _symbolTree.addEventListener('keydown', _handleKeyNavigation); + _symbolTree.addEventListener('focusin', event => { + displayInfocard(_uiNodeData.get(event.target)); + event.currentTarget.parentElement.classList.add('focused'); + }); + _symbolTree.addEventListener('focusout', event => + event.currentTarget.parentElement.classList.remove('focused') + ); + + self.newTreeElement = newTreeElement; +} + +{ + class ProgressBar { + /** @param {string} id */ + constructor(id) { + /** @type {HTMLProgressElement} */ + this._element = document.getElementById(id); + this.lastValue = this._element.value; + } + + setValue(val) { + if (val === 0 || val >= this.lastValue) { + this._element.value = val; + this.lastValue = val; + } else { + // Reset to 0 so the progress bar doesn't animate backwards. + this.setValue(0); + requestAnimationFrame(() => this.setValue(val)); + } + } + } + + /** @type {HTMLUListElement} */ + const _symbolTree = document.getElementById('symboltree'); + /** @type {HTMLInputElement} */ + const _fileUpload = document.getElementById('upload'); + const _progress = new ProgressBar('progress'); + + /** + * Displays the given data as a tree view + * @param {TreeProgress} message + */ + function displayTree(message) { + const {root, percent, diffMode, error} = message; + /** @type {DocumentFragment | null} */ + let rootElement = null; + if (root) { + rootElement = newTreeElement(root); + /** @type {HTMLAnchorElement} */ + const link = rootElement.querySelector('.node'); + // Expand the root UI node + link.click(); + link.tabIndex = 0; + } + state.set('diff_mode', diffMode ? 'on' : null); + + // Double requestAnimationFrame ensures that the code inside executes in a + // different frame than the above tree element creation. + requestAnimationFrame(() => + requestAnimationFrame(() => { + _progress.setValue(percent); + if (error) { + document.body.classList.add('error'); + } else { + document.body.classList.remove('error'); + } + if (diffMode) { + document.body.classList.add('diff'); + } else { + document.body.classList.remove('diff'); + } + + dom.replace(_symbolTree, rootElement); + }) + ); + } + + treeReady.then(displayTree); + worker.setOnProgressHandler(displayTree); + + _fileUpload.addEventListener('change', event => { + const input = /** @type {HTMLInputElement} */ (event.currentTarget); + const file = input.files.item(0); + const fileUrl = URL.createObjectURL(file); + worker.loadTree(fileUrl).then(displayTree); + // Clean up afterwards so new files trigger event + input.value = ''; + }); + + form.addEventListener('change', () => { + _progress.setValue(0); + worker.loadTree().then(displayTree); + }); + form.addEventListener('submit', event => { + event.preventDefault(); + _progress.setValue(0); + worker.loadTree().then(displayTree); + }); +} diff --git a/chromium/tools/binary_size/libsupersize/static/tree-worker.js b/chromium/tools/binary_size/libsupersize/static/tree-worker.js new file mode 100644 index 00000000000..46aa5323d80 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/static/tree-worker.js @@ -0,0 +1,759 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// @ts-check +'use strict'; + +/** + * @fileoverview + * Web worker code to parse JSON data from binary_size into data for the UI to + * display. + */ + +/** + * @typedef {object} Meta + * @prop {string[]} components + * @prop {number} total + * @prop {boolean} diff_mode + */ +/** + * @typedef {object} SymbolEntry JSON object representing a single symbol. + * @prop {string} n Name of the symbol. + * @prop {number} b Byte size of the symbol, divided by num_aliases. + * @prop {string} t Single character string to indicate the symbol type. + * @prop {number} [u] Count value indicating how many symbols this entry + * represents. Negative value when removed in a diff. + */ +/** + * @typedef {object} FileEntry JSON object representing a single file and its + * symbols. + * @prop {string} p Path to the file (source_path). + * @prop {number} c Index of the file's component in meta (component_index). + * @prop {SymbolEntry[]} s - Symbols belonging to this node. Array of objects. + */ + +importScripts('./shared.js'); + +const _PATH_SEP = '/'; + +/** @param {FileEntry} fileEntry */ +function getSourcePath(fileEntry) { + return fileEntry[_KEYS.SOURCE_PATH]; +} + +/** + * Find the last index of either '/' or `sep` in the given path. + * @param {string} path + * @param {string} sep + */ +function lastIndexOf(path, sep) { + if (sep === _PATH_SEP) { + return path.lastIndexOf(_PATH_SEP); + } else { + return Math.max(path.lastIndexOf(sep), path.lastIndexOf(_PATH_SEP)); + } +} + +/** + * Return the dirname of the pathname 'path'. In a file path, this is the + * full path of its folder. + * @param {string} path Path to find dirname of. + * @param {string} sep Path seperator, such as '/'. + */ +function dirname(path, sep) { + return path.substring(0, lastIndexOf(path, sep)); +} + +/** + * Compare two nodes for sorting. Used in sortTree. + * @param {TreeNode} a + * @param {TreeNode} b + */ +function _compareFunc(a, b) { + return Math.abs(b.size) - Math.abs(a.size); +} + +/** + * Make a node with some default arguments + * @param {Partial} options + * Values to use for the node. If a value is + * omitted, a default will be used instead. + * @returns {TreeNode} + */ +function createNode(options) { + const {idPath, type, shortNameIndex, size = 0, childStats = {}} = options; + return { + children: [], + parent: null, + childStats, + idPath, + shortNameIndex, + size, + type, + }; +} + +/** + * Class used to build a tree from a list of symbol objects. + * Add each file node using `addFileEntry()`, then call `build()` to finalize + * the tree and return the root node. The in-progress tree can be obtained from + * the `rootNode` property. + */ +class TreeBuilder { + /** + * @param {object} options + * @param {(fileEntry: FileEntry) => string} options.getPath Called to get the + * id path of a symbol's file entry. + * @param {(symbolNode: TreeNode) => boolean} options.filterTest Called to see + * if a symbol should be included. If a symbol fails the test, it will not be + * attached to the tree. + * @param {string} options.sep Path seperator used to find parent names. + */ + constructor(options) { + this._getPath = options.getPath; + this._filterTest = options.filterTest; + this._sep = options.sep || _PATH_SEP; + + this.rootNode = createNode({ + idPath: this._sep, + shortNameIndex: 0, + type: this._containerType(this._sep), + }); + /** @type {Map} Cache for directory nodes */ + this._parents = new Map(); + + /** + * Regex used to split the `idPath` when finding nodes. Equivalent to + * one of: "/" or |sep| + */ + this._splitter = new RegExp(`[/${this._sep}]`); + } + + /** + * Link a node to a new parent. Will go up the tree to update parent sizes to + * include the new child. + * @param {TreeNode} node Child node. + * @param {TreeNode} directParent New parent node. + */ + static _attachToParent(node, directParent) { + // Link the nodes together + directParent.children.push(node); + node.parent = directParent; + + const additionalSize = node.size; + const additionalStats = Object.entries(node.childStats); + + // Update the size and childStats of all ancestors + while (node.parent != null) { + const {parent} = node; + const [containerType, lastBiggestType] = parent.type; + let {size: lastBiggestSize = 0} = + parent.childStats[lastBiggestType] || {}; + for (const [type, stat] of additionalStats) { + const parentStat = parent.childStats[type] || {size: 0, count: 0}; + + parentStat.size += stat.size; + parentStat.count += stat.count; + parent.childStats[type] = parentStat; + + const absSize = Math.abs(parentStat.size); + if (absSize > lastBiggestSize) { + parent.type = `${containerType}${type}`; + lastBiggestSize = absSize; + } + } + + parent.size += additionalSize; + node = parent; + } + } + + /** + * Merges dex method symbols such as "Controller#get" and "Controller#set" + * into containers, based on the class of the dex methods. + * @param {TreeNode} node + */ + static _joinDexMethodClasses(node) { + const hasDexMethods = node.childStats[_DEX_METHOD_SYMBOL_TYPE] != null; + if (!hasDexMethods || node.children == null) return node; + + if (node.type[0] === _CONTAINER_TYPES.FILE) { + /** @type {Map} */ + const javaClassContainers = new Map(); + /** @type {TreeNode[]} */ + const otherSymbols = []; + + // Place all dex methods into buckets + for (const childNode of node.children) { + // Java classes are denoted with a "#", such as "LogoView#onDraw" + const splitIndex = childNode.idPath.lastIndexOf('#'); + + const isDexMethodWithClass = + childNode.type === _DEX_METHOD_SYMBOL_TYPE && + splitIndex > childNode.shortNameIndex; + + if (isDexMethodWithClass) { + // Get the idPath of the class + const classIdPath = childNode.idPath.slice(0, splitIndex); + + let classNode = javaClassContainers.get(classIdPath); + if (classNode == null) { + classNode = createNode({ + idPath: classIdPath, + shortNameIndex: childNode.shortNameIndex, + type: _CONTAINER_TYPES.JAVA_CLASS, + }); + javaClassContainers.set(classIdPath, classNode); + } + + // Adjust the dex method's short name so it starts after the "#" + childNode.shortNameIndex = splitIndex + 1; + TreeBuilder._attachToParent(childNode, classNode); + } else { + otherSymbols.push(childNode); + } + } + + node.children = otherSymbols; + for (const containerNode of javaClassContainers.values()) { + // Delay setting the parent until here so that `_attachToParent` + // doesn't add method stats twice + containerNode.parent = node; + node.children.push(containerNode); + } + } else { + node.children.forEach(TreeBuilder._joinDexMethodClasses); + } + return node; + } + + /** + * Formats a tree node by removing references to its desendants and ancestors. + * This reduces how much data is sent to the UI thread at once. For large + * trees, serialization and deserialization of the entire tree can take ~7s. + * + * Only children up to `depth` will be kept, and deeper children will be + * replaced with `null` to indicate that there were children by they were + * removed. + * + * Leaves with no children will always have an empty children array. + * If a tree has only 1 child, it is kept as the UI will expand chains of + * single children in the tree. + * + * Additionally sorts the formatted portion of the tree. + * @param {TreeNode} node Node to format + * @param {number} depth How many levels of children to keep. + * @returns {TreeNode} + */ + static formatNode(node, depth = 1) { + const childDepth = depth - 1; + // `null` represents that the children have not been loaded yet + let children = null; + if (depth > 0 || node.children.length <= 1) { + // If depth is larger than 0, include the children. + // If there are 0 children, include the empty array to indicate the node + // is a leaf. + // If there is 1 child, include it so the UI doesn't need to make a + // roundtrip in order to expand the chain. + children = node.children + .map(n => TreeBuilder.formatNode(n, childDepth)) + .sort(_compareFunc); + } + + return TreeBuilder._joinDexMethodClasses({ + ...node, + children, + parent: null, + }); + } + + /** + * Returns the container type for a parent node. + * @param {string} childIdPath + * @private + */ + _containerType(childIdPath) { + const useAlternateType = + childIdPath.lastIndexOf(this._sep) > childIdPath.lastIndexOf(_PATH_SEP); + if (useAlternateType) { + return _CONTAINER_TYPES.COMPONENT; + } else { + return _CONTAINER_TYPES.DIRECTORY; + } + } + + /** + * Helper to return the parent of the given node. The parent is determined + * based in the idPath and the path seperator. If the parent doesn't yet + * exist, one is created and stored in the parents map. + * @param {TreeNode} childNode + * @private + */ + _getOrMakeParentNode(childNode) { + // Get idPath of this node's parent. + let parentPath; + if (childNode.idPath === '') parentPath = _NO_NAME; + else parentPath = dirname(childNode.idPath, this._sep); + + // check if parent exists + let parentNode; + if (parentPath === '') { + // parent is root node if dirname is '' + parentNode = this.rootNode; + } else { + // get parent from cache if it exists, otherwise create it + parentNode = this._parents.get(parentPath); + if (parentNode == null) { + parentNode = createNode({ + idPath: parentPath, + shortNameIndex: lastIndexOf(parentPath, this._sep) + 1, + type: this._containerType(childNode.idPath), + }); + this._parents.set(parentPath, parentNode); + } + } + + // attach node to the newly found parent + TreeBuilder._attachToParent(childNode, parentNode); + return parentNode; + } + + /** + * Iterate through every file node generated by supersize. Each node includes + * symbols that belong to that file. Create a tree node for each file with + * tree nodes for that file's symbols attached. Afterwards attach that node to + * its parent directory node, or create it if missing. + * @param {FileEntry} fileEntry File entry from data file + */ + addFileEntry(fileEntry) { + const idPath = this._getPath(fileEntry); + // make node for this + const fileNode = createNode({ + idPath, + shortNameIndex: lastIndexOf(idPath, this._sep) + 1, + type: _CONTAINER_TYPES.FILE, + }); + // build child nodes for this file's symbols and attach to self + for (const symbol of fileEntry[_KEYS.FILE_SYMBOLS]) { + const size = symbol[_KEYS.SIZE]; + const type = symbol[_KEYS.TYPE]; + const count = symbol[_KEYS.COUNT] || 1; + const symbolNode = createNode({ + // Join file path to symbol name with a ":" + idPath: `${idPath}:${symbol[_KEYS.SYMBOL_NAME]}`, + shortNameIndex: idPath.length + 1, + size, + type: symbol[_KEYS.TYPE], + childStats: {[type]: {size, count}}, + }); + + if (this._filterTest(symbolNode)) { + TreeBuilder._attachToParent(symbolNode, fileNode); + } + } + // unless we filtered out every symbol belonging to this file, + if (fileNode.children.length > 0) { + // build all ancestor nodes for this file + let orphanNode = fileNode; + while (orphanNode.parent == null && orphanNode !== this.rootNode) { + orphanNode = this._getOrMakeParentNode(orphanNode); + } + } + } + + /** + * Finalize the creation of the tree and return the root node. + */ + build() { + this._getPath = () => ''; + this._filterTest = () => false; + this._parents.clear(); + return this.rootNode; + } + + /** + * Internal handler for `find` to search for a node. + * @private + * @param {string[]} idPathList + * @param {TreeNode} node + * @returns {TreeNode | null} + */ + _find(idPathList, node) { + if (node == null) { + return null; + } else if (idPathList.length === 0) { + // Found desired node + return node; + } + + const [shortNameToFind] = idPathList; + const child = node.children.find(n => shortName(n) === shortNameToFind); + + return this._find(idPathList.slice(1), child); + } + + /** + * Find a node with a given `idPath` by traversing the tree. + * @param {string} idPath + */ + find(idPath) { + // If `idPath` is the root's ID, return the root + if (idPath === this.rootNode.idPath) { + return this.rootNode; + } + + const symbolIndex = idPath.indexOf(':'); + let path; + if (symbolIndex > -1) { + const filePath = idPath.slice(0, symbolIndex); + const symbolName = idPath.slice(symbolIndex + 1); + + path = filePath.split(this._splitter); + path.push(symbolName); + } else { + path = idPath.split(this._splitter); + } + + // If the path is empty, it refers to the _NO_NAME container. + if (path[0] === '') { + path.unshift(_NO_NAME); + } + + return this._find(path, this.rootNode); + } +} + +/** + * Wrapper around fetch for requesting the same resource multiple times. + */ +class DataFetcher { + constructor(input) { + /** @type {AbortController | null} */ + this._controller = null; + this.setInput(input); + } + + /** + * Sets the input that describes what will be fetched. Also clears the cache. + * @param {string | Request} input URL to the resource you want to fetch. + */ + setInput(input) { + if (typeof this._input === 'string' && this._input.startsWith('blob:')) { + // Revoke the previous Blob url to prevent memory leaks + URL.revokeObjectURL(this._input); + } + + /** @type {Uint8Array | null} */ + this._cache = null; + this._input = input; + } + + /** + * Starts a new request and aborts the previous one. + * @param {string | Request} url + */ + async fetch(url) { + if (this._controller) this._controller.abort(); + this._controller = new AbortController(); + return fetch(url, { + credentials: 'same-origin', + signal: this._controller.signal, + }); + } + + /** + * Yields binary chunks as Uint8Arrays. After a complete run, the bytes are + * cached and future calls will yield the cached Uint8Array instead. + */ + async *arrayBufferStream() { + if (this._cache) { + yield this._cache; + return; + } + + const response = await this.fetch(this._input); + let result; + // Use streams, if supported, so that we can show in-progress data instead + // of waiting for the entire data file to download. The file can be >100 MB, + // so streams ensure slow connections still see some data. + if (response.body) { + const reader = response.body.getReader(); + + /** @type {Uint8Array[]} Store received bytes to merge later */ + let buffer = []; + /** Total size of received bytes */ + let byteSize = 0; + while (true) { + // Read values from the stream + const {done, value} = await reader.read(); + if (done) break; + + const chunk = new Uint8Array(value, 0, value.length); + yield chunk; + buffer.push(chunk); + byteSize += chunk.length; + } + + // We just cache a single typed array to save some memory and make future + // runs consistent with the no streams mode. + result = new Uint8Array(byteSize); + let i = 0; + for (const chunk of buffer) { + result.set(chunk, i); + i += chunk.length; + } + } else { + // In-memory version for browsers without stream support + result = new Uint8Array(await response.arrayBuffer()); + yield result; + } + + this._cache = result; + } + + /** + * Transforms a binary stream into a newline delimited JSON (.ndjson) stream. + * Each yielded value corresponds to one line in the stream. + * @returns {AsyncIterable} + */ + async *newlineDelimtedJsonStream() { + const decoder = new TextDecoder(); + const decoderArgs = {stream: true}; + let textBuffer = ''; + + for await (const bytes of this.arrayBufferStream()) { + if (this._controller.signal.aborted) { + throw new DOMException('Request was aborted', 'AbortError'); + } + + textBuffer += decoder.decode(bytes, decoderArgs); + const lines = textBuffer.split('\n'); + [textBuffer] = lines.splice(lines.length - 1, 1); + + for (const line of lines) { + yield JSON.parse(line); + } + } + } +} + +/** + * Parse the options represented as a query string, into an object. + * Includes checks for valid values. + * @param {string} options Query string + */ +function parseOptions(options) { + const params = new URLSearchParams(options); + + const groupBy = params.get('group_by') || 'source_path'; + const methodCountMode = params.has('method_count'); + + let minSymbolSize = Number(params.get('min_size')); + if (Number.isNaN(minSymbolSize)) { + minSymbolSize = 0; + } + + const includeRegex = params.get('include'); + const excludeRegex = params.get('exclude'); + + /** @type {Set} */ + let typeFilter; + if (methodCountMode) { + typeFilter = new Set(_DEX_METHOD_SYMBOL_TYPE); + } else { + typeFilter = new Set(types(params.getAll(_TYPE_STATE_KEY))); + if (typeFilter.size === 0) { + typeFilter = new Set(_SYMBOL_TYPE_SET); + typeFilter.delete('b'); + } + } + + /** @type {Array<(symbolNode: TreeNode) => boolean>} */ + const filters = []; + + /** Ensure symbol size is past the minimum */ + if (minSymbolSize > 0) { + filters.push(s => Math.abs(s.size) >= minSymbolSize); + } + + /** Ensure the symbol size wasn't filtered out */ + if (typeFilter.size < _SYMBOL_TYPE_SET.size) { + filters.push(s => typeFilter.has(s.type)); + } + + if (includeRegex) { + const regex = new RegExp(includeRegex); + filters.push(s => regex.test(s.idPath)); + } + if (excludeRegex) { + const regex = new RegExp(excludeRegex); + filters.push(s => !regex.test(s.idPath)); + } + + /** + * Check that a symbol node passes all the filters in the filters array. + * @param {TreeNode} symbolNode + */ + function filterTest(symbolNode) { + return filters.every(fn => fn(symbolNode)); + } + + return {groupBy, filterTest}; +} + +/** @type {TreeBuilder | null} */ +let builder = null; +const fetcher = new DataFetcher('data.ndjson'); + +/** + * Assemble a tree when this worker receives a message. + * @param {string} options Query string containing options for the builder. + * @param {(msg: TreeProgress) => void} onProgress + * @returns {Promise} + */ +async function buildTree(options, onProgress) { + const {groupBy, filterTest} = parseOptions(options); + + /** @type {Meta | null} Object from the first line of the data file */ + let meta = null; + + /** @type {{ [gropyBy: string]: (fileEntry: FileEntry) => string }} */ + const getPathMap = { + component(fileEntry) { + const component = meta.components[fileEntry[_KEYS.COMPONENT_INDEX]]; + const path = getSourcePath(fileEntry); + return `${component || '(No component)'}>${path}`; + }, + source_path: getSourcePath, + }; + + builder = new TreeBuilder({ + sep: groupBy === 'component' ? '>' : _PATH_SEP, + getPath: getPathMap[groupBy], + filterTest, + }); + + /** + * Creates data to post to the UI thread. Defaults will be used for the root + * and percent values if not specified. + * @param {{root?:TreeNode,percent?:number,error?:Error}} data Default data + * values to post. + */ + function createProgressMessage(data = {}) { + let {percent} = data; + if (percent == null) { + if (meta == null) { + percent = 0; + } else { + percent = Math.max(builder.rootNode.size / meta.total, 0.1); + } + } + + const message = { + root: TreeBuilder.formatNode(data.root || builder.rootNode), + percent, + diffMode: meta && meta.diff_mode, + }; + if (data.error) { + message.error = data.error.message; + } + return message; + } + + /** + * Post data to the UI thread. Defaults will be used for the root and percent + * values if not specified. + */ + function postToUi() { + const message = createProgressMessage(); + message.id = 0; + onProgress(message); + } + + try { + // Post partial state every second + let lastBatchSent = Date.now(); + for await (const dataObj of fetcher.newlineDelimtedJsonStream()) { + if (meta == null) { + // First line of data is used to store meta information. + meta = /** @type {Meta} */ (dataObj); + postToUi(); + } else { + builder.addFileEntry(/** @type {FileEntry} */ (dataObj)); + const currentTime = Date.now(); + if (currentTime - lastBatchSent > 500) { + postToUi(); + await Promise.resolve(); // Pause loop to check for worker messages + lastBatchSent = currentTime; + } + } + } + + return createProgressMessage({ + root: builder.build(), + percent: 1, + }); + } catch (error) { + if (error.name === 'AbortError') { + console.info(error.message); + } else { + console.error(error); + } + return createProgressMessage({error}); + } +} + +const actions = { + /** @param {{input:string,options:string}} data */ + load(data) { + if (data.input) fetcher.setInput(data.input); + return buildTree(data.options, progress => { + // @ts-ignore + self.postMessage(progress); + }); + }, + /** @param {string} path */ + async open(path) { + if (!builder) throw new Error('Called open before load'); + const node = builder.find(path); + return TreeBuilder.formatNode(node); + }, +}; + +/** + * Call the requested action function with the given data. If an error is thrown + * or rejected, post the error message to the UI thread. + * @param {number} id Unique message ID. + * @param {string} action Action type, corresponding to a key in `actions.` + * @param {any} data Data to supply to the action function. + */ +async function runAction(id, action, data) { + try { + const result = await actions[action](data); + // @ts-ignore + self.postMessage({id, result}); + } catch (err) { + // @ts-ignore + self.postMessage({id, error: err.message}); + throw err; + } +} + +const runActionDebounced = debounce(runAction, 0); + +/** + * @param {MessageEvent} event Event for when this worker receives a message. + */ +self.onmessage = async event => { + const {id, action, data} = event.data; + if (action === 'load') { + // Loading large files will block the worker thread until complete or when + // an await statement is reached. During this time, multiple load messages + // can pile up due to filters being adjusted. We debounce the load call + // so that only the last message is read (the current set of filters). + runActionDebounced(id, action, data); + } else { + runAction(id, action, data); + } +}; diff --git a/chromium/tools/binary_size/libsupersize/string_extract.py b/chromium/tools/binary_size/libsupersize/string_extract.py new file mode 100644 index 00000000000..216052f0eb4 --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/string_extract.py @@ -0,0 +1,249 @@ +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utilities to extract string literals from object files. + +LookupElfRodataInfo(): + Runs readelf to extract and return .rodata section spec of an ELF file. + +ReadFileChunks(): + Reads raw data from a file, given a list of ranges in the file. + +ResolveStringPieces(): + BulkForkAndCall() target: Given {path: [string addresses]} and + [raw_string_data for each string_section]: + - Reads {path: [src_strings]}. + - For each path, searches for src_strings in at most 1 raw_string_data over + each string_section. If found, translates to string_range and annotates it + to the string_section. + - Returns [{path: [string_ranges]} for each string_section]. +""" + +import collections +import itertools +import logging +import os +import subprocess + +import ar +import concurrent +import models +import path_util + + +def LookupElfRodataInfo(elf_path, tool_prefix): + """Returns (address, offset, size) for the .rodata section.""" + args = [path_util.GetReadElfPath(tool_prefix), '-S', '--wide', elf_path] + output = subprocess.check_output(args) + lines = output.splitlines() + for line in lines: + # [Nr] Name Type Addr Off Size ES Flg Lk Inf Al + # [07] .rodata PROGBITS 025e7000 237c000 5ec4f6 00 A 0 0 256 + if '.rodata ' in line: + fields = line[line.index(models.SECTION_RODATA):].split() + return int(fields[2], 16), int(fields[3], 16), int(fields[4], 16) + raise AssertionError('No .rodata for command: ' + repr(args)) + + +def ReadFileChunks(path, positions): + """Returns a list of strings corresponding to |positions|. + + Args: + positions: List of (offset, size). + """ + ret = [] + if not positions: + return ret + with open(path, 'rb') as f: + for offset, size in positions: + f.seek(offset) + ret.append(f.read(size)) + return ret + + +def _ExtractArchivePath(path): + # E.g. foo/bar.a(baz.o) + if path.endswith(')'): + start_idx = path.index('(') + return path[:start_idx] + return None + + +def _LookupStringSectionPositions(target, tool_prefix, output_directory): + """Returns a dict of object_path -> [(offset, size)...] of .rodata sections. + + Args: + target: An archive path string (e.g., "foo.a") or a list of object paths. + """ + is_archive = isinstance(target, basestring) + args = [path_util.GetReadElfPath(tool_prefix), '-S', '--wide'] + if is_archive: + args.append(target) + else: + # Assign path for when len(target) == 1, (no File: line exists). + path = target[0] + args.extend(target) + + output = subprocess.check_output(args, cwd=output_directory) + lines = output.splitlines() + section_positions_by_path = {} + cur_offsets = [] + for line in lines: + # File: base/third_party/libevent/libevent.a(buffer.o) + # [Nr] Name Type Addr Off Size ES Flg Lk Inf Al + # [11] .rodata.str1.1 PROGBITS 00000000 0000b4 000004 01 AMS 0 0 1 + # [11] .rodata.str4.4 PROGBITS 00000000 0000b4 000004 01 AMS 0 0 4 + # [11] .rodata.str8.8 PROGBITS 00000000 0000b4 000004 01 AMS 0 0 8 + # [80] .rodata..L.str PROGBITS 00000000 000530 000002 00 A 0 0 1 + # The various string sections differ by alignment. + # The presence of a wchar_t literal (L"asdf") seems to make a str4 section. + # When multiple sections exist, nm gives us no indication as to which + # section each string corresponds to. + if line.startswith('File: '): + if cur_offsets: + section_positions_by_path[path] = cur_offsets + cur_offsets = [] + path = line[6:] + elif '.rodata.' in line: + progbits_idx = line.find('PROGBITS ') + if progbits_idx != -1: + fields = line[progbits_idx:].split() + position = (int(fields[2], 16), int(fields[3], 16)) + # The heuristics in _IterStringLiterals rely on str1 coming first. + if fields[-1] == '1': + cur_offsets.insert(0, position) + else: + cur_offsets.append(position) + if cur_offsets: + section_positions_by_path[path] = cur_offsets + return section_positions_by_path + + +def _ReadStringSections(target, output_directory, positions_by_path): + """Returns a dict of object_path -> [string...] of .rodata chunks. + + Args: + target: An archive path string (e.g., "foo.a") or a list of object paths. + positions_by_path: A dict of object_path -> [(offset, size)...] + """ + is_archive = isinstance(target, basestring) + string_sections_by_path = {} + if is_archive: + for subpath, chunk in ar.IterArchiveChunks( + os.path.join(output_directory, target)): + path = '{}({})'.format(target, subpath) + positions = positions_by_path.get(path) + # No positions if file has no string literals. + if positions: + string_sections_by_path[path] = ( + [chunk[offset:offset + size] for offset, size in positions]) + else: + for path in target: + positions = positions_by_path.get(path) + # We already log a warning about this in _IterStringLiterals(). + if positions: + string_sections_by_path[path] = ReadFileChunks( + os.path.join(output_directory, path), positions) + return string_sections_by_path + + +def _IterStringLiterals(path, addresses, obj_sections): + """Yields all string literals (including \0) for the given object path. + + Args: + path: Object file path. + addresses: List of string offsets encoded as hex strings. + obj_sections: List of contents of .rodata.str sections read from the given + object file. + """ + + next_offsets = sorted(int(a, 16) for a in addresses) + if not obj_sections: + # Happens when there is an address for a symbol which is not actually a + # string literal, or when string_sections_by_path is missing an entry. + logging.warning('Object has %d strings but no string sections: %s', + len(addresses), path) + return + for section_data in obj_sections: + cur_offsets = next_offsets + # Always assume first element is 0. I'm not entirely sure why this is + # necessary, but strings get missed without it. + next_offsets = [0] + prev_offset = 0 + # TODO(agrieve): Switch to using nm --print-size in order to capture the + # address+size of each string rather than just the address. + for offset in cur_offsets[1:]: + if offset >= len(section_data): + # Remaining offsets are for next section. + next_offsets.append(offset) + continue + # Figure out which offsets apply to this section via heuristic of them + # all ending with a null character. + if offset == prev_offset or section_data[offset - 1] != '\0': + next_offsets.append(offset) + continue + yield section_data[prev_offset:offset] + prev_offset = offset + + if prev_offset < len(section_data): + yield section_data[prev_offset:] + + +# This is a target for BulkForkAndCall(). +def ResolveStringPieces(encoded_string_addresses_by_path, string_data, + tool_prefix, output_directory): + string_addresses_by_path = concurrent.DecodeDictOfLists( + encoded_string_addresses_by_path) + # Assign |target| as archive path, or a list of object paths. + any_path = next(string_addresses_by_path.iterkeys()) + target = _ExtractArchivePath(any_path) + if not target: + target = string_addresses_by_path.keys() + + # Run readelf to find location of .rodata within the .o files. + section_positions_by_path = _LookupStringSectionPositions( + target, tool_prefix, output_directory) + # Load the .rodata sections (from object files) as strings. + string_sections_by_path = _ReadStringSections( + target, output_directory, section_positions_by_path) + + # list of elf_positions_by_path. + ret = [collections.defaultdict(list) for _ in string_data] + # Brute-force search of strings within ** merge strings sections. + # This is by far the slowest part of AnalyzeStringLiterals(). + # TODO(agrieve): Pre-process string_data into a dict of literal->address (at + # least for ascii strings). + for path, object_addresses in string_addresses_by_path.iteritems(): + for value in _IterStringLiterals( + path, object_addresses, string_sections_by_path.get(path)): + first_match = -1 + first_match_dict = None + for target_dict, data in itertools.izip(ret, string_data): + # Set offset so that it will be 0 when len(value) is added to it below. + offset = -len(value) + while True: + offset = data.find(value, offset + len(value)) + if offset == -1: + break + # Preferring exact matches (those following \0) over substring matches + # significantly increases accuracy (although shows that linker isn't + # being optimal). + if offset == 0 or data[offset - 1] == '\0': + break + if first_match == -1: + first_match = offset + first_match_dict = target_dict + if offset != -1: + break + if offset == -1: + # Exact match not found, so take suffix match if it exists. + offset = first_match + target_dict = first_match_dict + # Missing strings happen when optimization make them unused. + if offset != -1: + # Encode tuple as a string for easier mashalling. + target_dict[path].append( + str(offset) + ':' + str(len(value))) + + return [concurrent.EncodeDictOfLists(x) for x in ret] diff --git a/chromium/tools/binary_size/libsupersize/template/D3SymbolTreeMap.js b/chromium/tools/binary_size/libsupersize/template/D3SymbolTreeMap.js deleted file mode 100644 index 3f001bb999d..00000000000 --- a/chromium/tools/binary_size/libsupersize/template/D3SymbolTreeMap.js +++ /dev/null @@ -1,930 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// TODO: -// 1. Visibility functions: base on boxPadding.t, not 15 -// 2. Track a maxDisplayDepth that is user-settable: -// maxDepth == currentRoot.depth + maxDisplayDepth -function D3SymbolTreeMap(mapWidth, mapHeight, levelsToShow) { - this._mapContainer = undefined; - this._mapWidth = mapWidth; - this._mapHeight = mapHeight; - this.boxPadding = {'l': 5, 'r': 5, 't': 20, 'b': 5}; - this.infobox = undefined; - this.methodCountMode = false; - this._maskContainer = undefined; - this._highlightContainer = undefined; - // Transition in this order: - // 1. Exiting items go away. - // 2. Updated items move. - // 3. New items enter. - this._exitDuration=500; - this._updateDuration=500; - this._enterDuration=500; - this._firstTransition=true; - this._layout = undefined; - this._currentRoot = undefined; - this._currentNodes = undefined; - this._treeData = undefined; - this._maxLevelsToShow = levelsToShow; - this._currentMaxDepth = this._maxLevelsToShow; -} - -/** - * Make a number pretty, with comma separators. - */ -D3SymbolTreeMap._pretty = function(num) { - num = Math.round(num) - var asString = String(num); - var result = ''; - var counter = 0; - for (var x = asString.length - 1; x >= 0; x--) { - counter++; - if (counter === 4) { - result = ',' + result; - counter = 1; - } - result = asString.charAt(x) + result; - } - return result; -} - -/** - * Express a number in terms of KiB, MiB, GiB, etc. - * Note that these are powers of 2, not of 10. - */ -D3SymbolTreeMap.prototype._byteify = function(num) { - if (this.methodCountMode) { - return num + ' methods'; - } - var suffix; - if (num >= 1024) { - if (num >= 1024 * 1024 * 1024) { - suffix = 'GiB'; - num = num / (1024 * 1024 * 1024); - } else if (num >= 1024 * 1024) { - suffix = 'MiB'; - num = num / (1024 * 1024); - } else if (num >= 1024) { - suffix = 'KiB' - num = num / 1024; - } - return num.toFixed(2) + ' ' + suffix; - } - return num + ' B'; -} - -D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS = { - 'b': '.bss', - 'd': '.data and .data.*', - 'r': '.rodata', - 't': '.text', - 'v': 'Vtable Entry', - '!': 'Generated Symbols (typeinfo, thunks, etc)', - 'x': 'Dex Non-Method Entries', - 'm': 'Dex Methods', - 'p': 'Locale Pak Entries', - 'P': 'Non-Locale Pak Entries', - 'o': 'Other Entries', -}; -D3SymbolTreeMap._NM_SYMBOL_TYPES = ''; -for (var symbol_type in D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS) { - D3SymbolTreeMap._NM_SYMBOL_TYPES += symbol_type; -} - -/** - * Given a symbol type code, look up and return a human-readable description - * of that symbol type. If the symbol type does not match one of the known - * types, the unrecognized description (corresponding to symbol type '?') is - * returned instead of null or undefined. - */ -D3SymbolTreeMap._getSymbolDescription = function(type) { - var result = D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS[type]; - if (result === undefined) { - result = D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS['?']; - } - return result; -} - -D3SymbolTreeMap._colorArray = [ - 'rgb(190,186,218)', - 'rgb(253,180,98)', - 'rgb(141,211,199)', - 'rgb(128,177,211)', - 'rgb(255,237,111)', - 'rgb(204,235,197)', - 'rgb(255,151,151)', - 'rgb(255,95,95)', - 'rgb(93,156,110)', - 'rgb(61,109,55)', - 'rgb(150,100,111)', -] - -D3SymbolTreeMap._initColorMap = function() { - var map = {}; - var numColors = D3SymbolTreeMap._colorArray.length; - var count = 0; - for (var key in D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS) { - var index = count++ % numColors; - map[key] = d3.rgb(D3SymbolTreeMap._colorArray[index]); - } - D3SymbolTreeMap._colorMap = map; -} -D3SymbolTreeMap._initColorMap(); - -D3SymbolTreeMap.getColorForType = function(type) { - var result = D3SymbolTreeMap._colorMap[type]; - if (result === undefined) return d3.rgb('rgb(255,255,255)'); - return result; -} - -D3SymbolTreeMap.prototype.init = function() { - this.infobox = this._createInfoBox(); - this._mapContainer = d3.select('body').append('div') - .style('position', 'relative') - .style('width', this._mapWidth) - .style('height', this._mapHeight) - .style('padding', 0) - .style('margin', 0) - .style('box-shadow', '5px 5px 5px #888'); - this._layout = this._createTreeMapLayout(); - this.methodCountMode = tree_data['methodCountMode']; - this._setData(tree_data); // TODO: Don't use global 'tree_data' -} - -/** - * Sets the data displayed by the treemap and layint out the map. - */ -D3SymbolTreeMap.prototype._setData = function(data) { - this._treeData = data; - console.time('_crunchStats'); - this._crunchStats(data); - console.timeEnd('_crunchStats'); - this._currentRoot = this._treeData; - this._currentNodes = this._layout.nodes(this._currentRoot); - this._currentMaxDepth = this._maxLevelsToShow; - this._doLayout(); -} - -/** - * Recursively traverses the entire tree starting from the specified node, - * computing statistics and recording metadata as it goes. Call this method - * only once per imported tree. - */ -D3SymbolTreeMap.prototype._crunchStats = function(node) { - var stack = []; - stack.idCounter = 0; - this._crunchStatsHelper(stack, node); -} - -/** - * Invoke the specified visitor function on all data elements currently shown - * in the treemap including any and all of their children, starting at the - * currently-displayed root and descening recursively. The function will be - * passed the datum element representing each node. No traversal guarantees - * are made. - */ -D3SymbolTreeMap.prototype.visitFromDisplayedRoot = function(visitor) { - this._visit(this._currentRoot, visitor); -} - -/** - * Helper function for visit functions. - */ -D3SymbolTreeMap.prototype._visit = function(datum, visitor) { - visitor.call(this, datum); - if (datum.children) for (var i = 0; i < datum.children.length; i++) { - this._visit(datum.children[i], visitor); - } -} - -D3SymbolTreeMap.prototype._crunchStatsHelper = function(stack, node) { - // Only overwrite the node ID if it isn't already set. - // This allows stats to be crunched multiple times on subsets of data - // without breaking the data-to-ID bindings. New nodes get new IDs. - if (node.id === undefined) node.id = stack.idCounter++; - if (node.children === undefined) { - // Leaf node (symbol); accumulate stats. - for (var i = 0; i < stack.length; i++) { - var ancestor = stack[i]; - if (!ancestor.symbol_stats) ancestor.symbol_stats = {}; - if (ancestor.symbol_stats[node.t] === undefined) { - // New symbol type we haven't seen before, just record. - ancestor.symbol_stats[node.t] = {'count': 1, - 'size': node.value}; - } else { - // Existing symbol type, increment. - ancestor.symbol_stats[node.t].count++; - ancestor.symbol_stats[node.t].size += node.value; - } - } - } else for (var i = 0; i < node.children.length; i++) { - stack.push(node); - this._crunchStatsHelper(stack, node.children[i]); - stack.pop(); - } -} - -D3SymbolTreeMap.prototype._createTreeMapLayout = function() { - var result = d3.layout.treemap() - .padding([this.boxPadding.t, this.boxPadding.r, - this.boxPadding.b, this.boxPadding.l]) - .size([this._mapWidth, this._mapHeight]); - return result; -} - -D3SymbolTreeMap.prototype.resize = function(width, height) { - this._mapWidth = width; - this._mapHeight = height; - this._mapContainer.style('width', width).style('height', height); - this._layout.size([this._mapWidth, this._mapHeight]); - this._currentNodes = this._layout.nodes(this._currentRoot); - this._doLayout(); -} - -D3SymbolTreeMap.prototype._zoomDatum = function(datum) { - if (this._currentRoot === datum) return; // already here - this._hideHighlight(datum); - this._hideInfoBox(datum); - this._currentRoot = datum; - this._currentNodes = this._layout.nodes(this._currentRoot); - this._currentMaxDepth = this._currentRoot.depth + this._maxLevelsToShow; - console.log('zooming into datum ' + this._currentRoot.n); - this._doLayout(); -} - -D3SymbolTreeMap.prototype.setMaxLevels = function(levelsToShow) { - this._maxLevelsToShow = levelsToShow; - this._currentNodes = this._layout.nodes(this._currentRoot); - this._currentMaxDepth = this._currentRoot.depth + this._maxLevelsToShow; - console.log('setting max levels to show: ' + this._maxLevelsToShow); - this._doLayout(); -} - -/** - * Clone the specified tree, returning an independent copy of the data. - * Only the original attributes expected to exist prior to invoking - * _crunchStatsHelper are retained, with the exception of the 'id' attribute - * (which must be retained for proper transitions). - * If the optional filter parameter is provided, it will be called with 'this' - * set to this treemap instance and passed the 'datum' object as an argument. - * When specified, the copy will retain only the data for which the filter - * function returns true. - */ -D3SymbolTreeMap.prototype._clone = function(datum, filter) { - var trackingStats = false; - if (this.__cloneState === undefined) { - console.time('_clone'); - trackingStats = true; - this.__cloneState = {'accepted': 0, 'rejected': 0, - 'forced': 0, 'pruned': 0}; - } - - // Must go depth-first. All parents of children that are accepted by the - // filter must be preserved! - var copy = {'n': datum.n, 'k': datum.k}; - var childAccepted = false; - if (datum.children !== undefined) { - for (var i = 0; i < datum.children.length; i++) { - var copiedChild = this._clone(datum.children[i], filter); - if (copiedChild !== undefined) { - childAccepted = true; // parent must also be accepted. - if (copy.children === undefined) copy.children = []; - copy.children.push(copiedChild); - } - } - } - - // Ignore nodes that don't match the filter, when present. - var accept = false; - if (childAccepted) { - // Parent of an accepted child must also be accepted. - this.__cloneState.forced++; - accept = true; - } else if (filter !== undefined && filter.call(this, datum) !== true) { - this.__cloneState.rejected++; - } else if (datum.children === undefined) { - // Accept leaf nodes that passed the filter - this.__cloneState.accepted++; - accept = true; - } else { - // Non-leaf node. If no children are accepted, prune it. - this.__cloneState.pruned++; - } - - if (accept) { - if (datum.id !== undefined) copy.id = datum.id; - if (datum.lastPathElement !== undefined) { - copy.lastPathElement = datum.lastPathElement; - } - if (datum.t !== undefined) copy.t = datum.t; - if (datum.value !== undefined && datum.children === undefined) { - copy.value = datum.value; - } - } else { - // Discard the copy we were going to return - copy = undefined; - } - - if (trackingStats === true) { - // We are the fist call in the recursive chain. - console.timeEnd('_clone'); - var totalAccepted = this.__cloneState.accepted + - this.__cloneState.forced; - console.log( - totalAccepted + ' nodes retained (' + - this.__cloneState.forced + ' forced by accepted children, ' + - this.__cloneState.accepted + ' accepted on their own merits), ' + - this.__cloneState.rejected + ' nodes (and their children) ' + - 'filtered out,' + - this.__cloneState.pruned + ' nodes pruned because because no ' + - 'children remained.'); - delete this.__cloneState; - } - return copy; -} - -D3SymbolTreeMap.prototype.filter = function(filter) { - // Ensure we have a copy of the original root. - if (this._backupTree === undefined) this._backupTree = this._treeData; - this._mapContainer.selectAll('div').remove(); - this._setData(this._clone(this._backupTree, filter)); -} - -D3SymbolTreeMap.prototype._doLayout = function() { - console.time('_doLayout'); - this._handleInodes(); - this._handleLeaves(); - this._firstTransition = false; - console.timeEnd('_doLayout'); -} - -D3SymbolTreeMap.prototype._highlightElement = function(datum, selection) { - this._showHighlight(datum, selection); -} - -D3SymbolTreeMap.prototype._unhighlightElement = function(datum, selection) { - this._hideHighlight(datum, selection); -} - -D3SymbolTreeMap.prototype._handleInodes = function() { - console.time('_handleInodes'); - var thisTreeMap = this; - var inodes = this._currentNodes.filter(function(datum){ - return (datum.depth <= thisTreeMap._currentMaxDepth) && - datum.children !== undefined; - }); - var cellsEnter = this._mapContainer.selectAll('div.inode') - .data(inodes, function(datum) { return datum.id; }) - .enter() - .append('div').attr('class', 'inode').attr('id', function(datum){ - return 'node-' + datum.id;}); - - - // Define enter/update/exit for inodes - cellsEnter - .append('div') - .attr('class', 'rect inode_rect_entering') - .style('z-index', function(datum) { return datum.id * 2; }) - .style('position', 'absolute') - .style('left', function(datum) { return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum){ return datum.dx; }) - .style('height', function(datum){ return datum.dy; }) - .style('opacity', '0') - .style('border', '1px solid black') - .style('background-image', function(datum) { - return thisTreeMap._makeSymbolBucketBackgroundImage.call( - thisTreeMap, datum); - }) - .style('background-color', function(datum) { - if (datum.t === undefined) return 'rgb(220,220,220)'; - return D3SymbolTreeMap.getColorForType(datum.t).toString(); - }) - .on('mouseover', function(datum){ - thisTreeMap._highlightElement.call( - thisTreeMap, datum, d3.select(this)); - thisTreeMap._showInfoBox.call(thisTreeMap, datum); - }) - .on('mouseout', function(datum){ - thisTreeMap._unhighlightElement.call( - thisTreeMap, datum, d3.select(this)); - thisTreeMap._hideInfoBox.call(thisTreeMap, datum); - }) - .on('mousemove', function(){ - thisTreeMap._moveInfoBox.call(thisTreeMap, event); - }) - .on('dblclick', function(datum){ - if (datum !== thisTreeMap._currentRoot) { - // Zoom into the selection - thisTreeMap._zoomDatum(datum); - } else if (datum.parent) { - console.log('event.shiftKey=' + event.shiftKey); - if (event.shiftKey === true) { - // Back to root - thisTreeMap._zoomDatum(thisTreeMap._treeData); - } else { - // Zoom out of the selection - thisTreeMap._zoomDatum(datum.parent); - } - } - }); - cellsEnter - .append('div') - .attr('class', 'label inode_label_entering') - .style('z-index', function(datum) { return (datum.id * 2) + 1; }) - .style('position', 'absolute') - .style('left', function(datum){ return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum) { return datum.dx; }) - .style('height', function(datum) { return thisTreeMap.boxPadding.t; }) - .style('opacity', '0') - .style('pointer-events', 'none') - .style('-webkit-user-select', 'none') - .style('overflow', 'hidden') // required for ellipsis - .style('white-space', 'nowrap') // required for ellipsis - .style('text-overflow', 'ellipsis') - .style('text-align', 'center') - .style('vertical-align', 'top') - .style('visibility', function(datum) { - return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible'; - }) - .text(function(datum) { - var sizeish = ' [' + thisTreeMap._byteify(datum.value) + ']' - var text; - if (datum.k === 'b') { // bucket - if (datum === thisTreeMap._currentRoot) { - text = thisTreeMap.pathFor(datum) + ': ' - + D3SymbolTreeMap._getSymbolDescription(datum.t) - } else { - text = D3SymbolTreeMap._getSymbolDescription(datum.t); - } - } else if (datum === thisTreeMap._currentRoot) { - // The top-most level should always show the complete path - text = thisTreeMap.pathFor(datum); - } else { - // Anything that isn't a bucket or a leaf (symbol) or the - // current root should just show its name. - text = datum.n; - } - return text + sizeish; - } - ); - - // Complicated transition logic: - // For nodes that are entering, we want to fade them in in-place AFTER - // any adjusting nodes have resized and moved around. That way, new nodes - // seamlessly appear in the right spot after their containers have resized - // and moved around. - // To do this we do some trickery: - // 1. Define a '_entering' class on the entering elements - // 2. Use this to select only the entering elements and apply the opacity - // transition. - // 3. Use the same transition to drop the '_entering' suffix, so that they - // will correctly update in later zoom/resize/whatever operations. - // 4. The update transition is achieved by selecting the elements without - // the '_entering_' suffix and applying movement and resizing transition - // effects. - this._mapContainer.selectAll('div.inode_rect_entering').transition() - .duration(thisTreeMap._enterDuration).delay( - this._firstTransition ? 0 : thisTreeMap._exitDuration + - thisTreeMap._updateDuration) - .attr('class', 'rect inode_rect') - .style('opacity', '1') - this._mapContainer.selectAll('div.inode_label_entering').transition() - .duration(thisTreeMap._enterDuration).delay( - this._firstTransition ? 0 : thisTreeMap._exitDuration + - thisTreeMap._updateDuration) - .attr('class', 'label inode_label') - .style('opacity', '1') - this._mapContainer.selectAll('div.inode_rect').transition() - .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration) - .style('opacity', '1') - .style('background-image', function(datum) { - return thisTreeMap._makeSymbolBucketBackgroundImage.call( - thisTreeMap, datum); - }) - .style('left', function(datum) { return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum){ return datum.dx; }) - .style('height', function(datum){ return datum.dy; }); - this._mapContainer.selectAll('div.inode_label').transition() - .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration) - .style('opacity', '1') - .style('visibility', function(datum) { - return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible'; - }) - .style('left', function(datum){ return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum) { return datum.dx; }) - .style('height', function(datum) { return thisTreeMap.boxPadding.t; }) - .text(function(datum) { - var sizeish = ' [' + thisTreeMap._byteify(datum.value) + ']' - var text; - if (datum.k === 'b') { - if (datum === thisTreeMap._currentRoot) { - text = thisTreeMap.pathFor(datum) + ': ' + - D3SymbolTreeMap._getSymbolDescription(datum.t) - } else { - text = D3SymbolTreeMap._getSymbolDescription(datum.t); - } - } else if (datum === thisTreeMap._currentRoot) { - // The top-most level should always show the complete path - text = thisTreeMap.pathFor(datum); - } else { - // Anything that isn't a bucket or a leaf (symbol) or the - // current root should just show its name. - text = datum.n; - } - return text + sizeish; - }); - var exit = this._mapContainer.selectAll('div.inode') - .data(inodes, function(datum) { return 'inode-' + datum.id; }) - .exit(); - exit.selectAll('div.inode_rect').transition().duration( - thisTreeMap._exitDuration).style('opacity', 0); - exit.selectAll('div.inode_label').transition().duration( - thisTreeMap._exitDuration).style('opacity', 0); - exit.transition().delay(thisTreeMap._exitDuration + 1).remove(); - - console.log(inodes.length + ' inodes layed out.'); - console.timeEnd('_handleInodes'); -} - -D3SymbolTreeMap.prototype._handleLeaves = function() { - console.time('_handleLeaves'); - var color_fn = d3.scale.category10(); - var thisTreeMap = this; - var leaves = this._currentNodes.filter(function(datum){ - return (datum.depth <= thisTreeMap._currentMaxDepth) && - datum.children === undefined; }); - var cellsEnter = this._mapContainer.selectAll('div.leaf') - .data(leaves, function(datum) { return datum.id; }) - .enter() - .append('div').attr('class', 'leaf').attr('id', function(datum){ - return 'node-' + datum.id; - }); - - // Define enter/update/exit for leaves - cellsEnter - .append('div') - .attr('class', 'rect leaf_rect_entering') - .style('z-index', function(datum) { return datum.id * 2; }) - .style('position', 'absolute') - .style('left', function(datum){ return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum){ return datum.dx; }) - .style('height', function(datum){ return datum.dy; }) - .style('opacity', '0') - .style('background-color', function(datum) { - if (datum.t === undefined) return 'rgb(220,220,220)'; - return D3SymbolTreeMap.getColorForType(datum.t) - .darker(0.3).toString(); - }) - .style('border', '1px solid black') - .on('mouseover', function(datum){ - thisTreeMap._highlightElement.call( - thisTreeMap, datum, d3.select(this)); - thisTreeMap._showInfoBox.call(thisTreeMap, datum); - }) - .on('mouseout', function(datum){ - thisTreeMap._unhighlightElement.call( - thisTreeMap, datum, d3.select(this)); - thisTreeMap._hideInfoBox.call(thisTreeMap, datum); - }) - .on('mousemove', function(){ thisTreeMap._moveInfoBox.call( - thisTreeMap, event); - }); - cellsEnter - .append('div') - .attr('class', 'label leaf_label_entering') - .style('z-index', function(datum) { return (datum.id * 2) + 1; }) - .style('position', 'absolute') - .style('left', function(datum){ return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum) { return datum.dx; }) - .style('height', function(datum) { return datum.dy; }) - .style('opacity', '0') - .style('pointer-events', 'none') - .style('-webkit-user-select', 'none') - .style('overflow', 'hidden') // required for ellipsis - .style('white-space', 'nowrap') // required for ellipsis - .style('text-overflow', 'ellipsis') - .style('text-align', 'center') - .style('vertical-align', 'middle') - .style('visibility', function(datum) { - return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible'; - }) - .text(function(datum) { return datum.n; }); - - // Complicated transition logic: See note in _handleInodes() - this._mapContainer.selectAll('div.leaf_rect_entering').transition() - .duration(thisTreeMap._enterDuration).delay( - this._firstTransition ? 0 : thisTreeMap._exitDuration + - thisTreeMap._updateDuration) - .attr('class', 'rect leaf_rect') - .style('opacity', '1') - this._mapContainer.selectAll('div.leaf_label_entering').transition() - .duration(thisTreeMap._enterDuration).delay( - this._firstTransition ? 0 : thisTreeMap._exitDuration + - thisTreeMap._updateDuration) - .attr('class', 'label leaf_label') - .style('opacity', '1') - this._mapContainer.selectAll('div.leaf_rect').transition() - .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration) - .style('opacity', '1') - .style('left', function(datum){ return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum){ return datum.dx; }) - .style('height', function(datum){ return datum.dy; }); - this._mapContainer.selectAll('div.leaf_label').transition() - .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration) - .style('opacity', '1') - .style('visibility', function(datum) { - return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible'; - }) - .style('left', function(datum){ return datum.x; }) - .style('top', function(datum){ return datum.y; }) - .style('width', function(datum) { return datum.dx; }) - .style('height', function(datum) { return datum.dy; }); - var exit = this._mapContainer.selectAll('div.leaf') - .data(leaves, function(datum) { return 'leaf-' + datum.id; }) - .exit(); - exit.selectAll('div.leaf_rect').transition() - .duration(thisTreeMap._exitDuration) - .style('opacity', 0); - exit.selectAll('div.leaf_label').transition() - .duration(thisTreeMap._exitDuration) - .style('opacity', 0); - exit.transition().delay(thisTreeMap._exitDuration + 1).remove(); - - console.log(leaves.length + ' leaves layed out.'); - console.timeEnd('_handleLeaves'); -} - -D3SymbolTreeMap.prototype._makeSymbolBucketBackgroundImage = function(datum) { - if (!(datum.t === undefined && datum.depth == this._currentMaxDepth)) { - return 'none'; - } - var text = ''; - var lastStop = 0; - for (var x = 0; x < D3SymbolTreeMap._NM_SYMBOL_TYPES.length; x++) { - symbol_type = D3SymbolTreeMap._NM_SYMBOL_TYPES.charAt(x); - var stats = datum.symbol_stats[symbol_type]; - if (stats !== undefined) { - if (text.length !== 0) { - text += ', '; - } - var percent = 100 * (stats.size / datum.value); - var nowStop = lastStop + percent; - var tempcolor = D3SymbolTreeMap.getColorForType(symbol_type); - var color = d3.rgb(tempcolor).toString(); - text += color + ' ' + lastStop + '%, ' + color + ' ' + - nowStop + '%'; - lastStop = nowStop; - } - } - return 'linear-gradient(' + (datum.dx > datum.dy ? 'to right' : - 'to bottom') + ', ' + text + ')'; -} - -D3SymbolTreeMap.prototype.pathFor = function(datum) { - if (datum.__path) return datum.__path; - parts=[]; - node = datum; - while (node) { - if (node.k === 'p') { // path node - if(node.n !== '/') parts.unshift(node.n); - } - node = node.parent; - } - datum.__path = '/' + parts.join('/'); - return datum.__path; -} - -D3SymbolTreeMap.prototype._createHighlight = function(datum, selection) { - var x = parseInt(selection.style('left')); - var y = parseInt(selection.style('top')); - var w = parseInt(selection.style('width')); - var h = parseInt(selection.style('height')); - datum.highlight = this._mapContainer.append('div') - .attr('id', 'h-' + datum.id) - .attr('class', 'highlight') - .style('pointer-events', 'none') - .style('-webkit-user-select', 'none') - .style('z-index', '999999') - .style('position', 'absolute') - .style('top', y-2) - .style('left', x-2) - .style('width', w+4) - .style('height', h+4) - .style('margin', 0) - .style('padding', 0) - .style('border', '4px outset rgba(250,40,200,0.9)') - .style('box-sizing', 'border-box') - .style('opacity', 0.0); -} - -D3SymbolTreeMap.prototype._showHighlight = function(datum, selection) { - if (datum === this._currentRoot) return; - if (datum.highlight === undefined) { - this._createHighlight(datum, selection); - } - datum.highlight.transition().duration(200).style('opacity', 1.0); -} - -D3SymbolTreeMap.prototype._hideHighlight = function(datum, selection) { - if (datum.highlight === undefined) return; - datum.highlight.transition().duration(750) - .style('opacity', 0) - .each('end', function(){ - if (datum.highlight) datum.highlight.remove(); - delete datum.highlight; - }); -} - -D3SymbolTreeMap.prototype._createInfoBox = function() { - return d3.select('body') - .append('div') - .attr('id', 'infobox') - .style('z-index', '2147483647') // (2^31) - 1: Hopefully safe :) - .style('position', 'absolute') - .style('visibility', 'hidden') - .style('background-color', 'rgba(255,255,255, 0.9)') - .style('border', '1px solid black') - .style('padding', '10px') - .style('-webkit-user-select', 'none') - .style('box-shadow', '3px 3px rgba(70,70,70,0.5)') - .style('border-radius', '10px') - .style('white-space', 'nowrap'); -} - -D3SymbolTreeMap.prototype._showInfoBox = function(datum) { - this.infobox.text(''); - var numSymbols = 0; - var sizeish = this._byteify(datum.value); - if (!this.methodCountMode) { - sizeish = D3SymbolTreeMap._pretty(datum.value) + ' bytes (' + sizeish + ')' - } - if (datum.k === 'p' || datum.k === 'b') { // path or bucket - if (datum.symbol_stats) { // can be empty if filters are applied - for (var x = 0; x < D3SymbolTreeMap._NM_SYMBOL_TYPES.length; x++) { - symbol_type = D3SymbolTreeMap._NM_SYMBOL_TYPES.charAt(x); - var stats = datum.symbol_stats[symbol_type]; - if (stats !== undefined) numSymbols += stats.count; - } - } - } else if (datum.k === 's') { // symbol - numSymbols = 1; - } - - if (datum.k === 'p' && !datum.lastPathElement) { - this.infobox.append('div').text('Directory: ' + this.pathFor(datum)) - this.infobox.append('div').text('Size: ' + sizeish); - } else { - if (datum.k === 'p') { // path - this.infobox.append('div').text('File: ' + this.pathFor(datum)) - this.infobox.append('div').text('Size: ' + sizeish); - } else if (datum.k === 'b') { // bucket - this.infobox.append('div').text('Symbol Bucket: ' + - D3SymbolTreeMap._getSymbolDescription(datum.t)); - this.infobox.append('div').text('Count: ' + numSymbols); - this.infobox.append('div').text('Size: ' + sizeish); - this.infobox.append('div').text('Location: ' + this.pathFor(datum)) - } else if (datum.k === 's') { // symbol - this.infobox.append('div').text('Symbol: ' + datum.n); - this.infobox.append('div').text('Type: ' + - D3SymbolTreeMap._getSymbolDescription(datum.t)); - this.infobox.append('div').text('Size: ' + sizeish); - this.infobox.append('div').text('Location: ' + this.pathFor(datum)) - } - } - if (datum.k === 'p' && !this.methodCountMode) { - this.infobox.append('div') - .text('Number of symbols: ' + D3SymbolTreeMap._pretty(numSymbols)); - // can be empty if filters are applied - if (datum.symbol_stats) { - var table = this.infobox.append('table') - .attr('border', 1).append('tbody'); - var header = table.append('tr'); - header.append('th').text('Type'); - header.append('th').text('Count'); - header.append('th') - .style('white-space', 'nowrap') - .text('Total Size (Bytes)'); - for (var x = 0; x < D3SymbolTreeMap._NM_SYMBOL_TYPES.length; x++) { - symbol_type = D3SymbolTreeMap._NM_SYMBOL_TYPES.charAt(x); - var stats = datum.symbol_stats[symbol_type]; - if (stats !== undefined) { - var tr = table.append('tr'); - tr.append('td') - .style('white-space', 'nowrap') - .text(D3SymbolTreeMap._getSymbolDescription( - symbol_type)); - tr.append('td').text(D3SymbolTreeMap._pretty(stats.count)); - tr.append('td').text(D3SymbolTreeMap._pretty(stats.size)); - } - } - } - } - this.infobox.style('visibility', 'visible'); -} - -D3SymbolTreeMap.prototype._hideInfoBox = function(datum) { - this.infobox.style('visibility', 'hidden'); -} - -D3SymbolTreeMap.prototype._moveInfoBox = function(event) { - var element = document.getElementById('infobox'); - var w = element.offsetWidth; - var h = element.offsetHeight; - var offsetLeft = 10; - var offsetTop = 10; - - var rightLimit = window.innerWidth; - var rightEdge = event.pageX + offsetLeft + w; - if (rightEdge > rightLimit) { - // Too close to screen edge, reflect around the cursor - offsetLeft = -1 * (w + offsetLeft); - } - - var bottomLimit = window.innerHeight; - var bottomEdge = event.pageY + offsetTop + h; - if (bottomEdge > bottomLimit) { - // Too close ot screen edge, reflect around the cursor - offsetTop = -1 * (h + offsetTop); - } - - this.infobox.style('top', (event.pageY + offsetTop) + 'px') - .style('left', (event.pageX + offsetLeft) + 'px'); -} - -D3SymbolTreeMap.prototype.biggestSymbols = function(maxRecords) { - var result = undefined; - var smallest = undefined; - var sortFunction = function(a,b) { - var result = b.value - a.value; - if (result !== 0) return result; // sort by size - var pathA = treemap.pathFor(a); // sort by path - var pathB = treemap.pathFor(b); - if (pathA > pathB) return 1; - if (pathB > pathA) return -1; - return a.n - b.n; // sort by symbol name - }; - this.visitFromDisplayedRoot(function(datum) { - if (datum.children) return; // ignore non-leaves - if (!result) { // first element - result = [datum]; - smallest = datum.value; - return; - } - if (result.length < maxRecords) { // filling the array - result.push(datum); - return; - } - if (datum.value > smallest) { // array is already full - result.push(datum); - result.sort(sortFunction); - result.pop(); // get rid of smallest element - smallest = result[maxRecords - 1].value; // new threshold for entry - } - }); - result.sort(sortFunction); - return result; -} - -D3SymbolTreeMap.prototype.biggestPaths = function(maxRecords) { - var result = undefined; - var smallest = undefined; - var sortFunction = function(a,b) { - var result = b.value - a.value; - if (result !== 0) return result; // sort by size - var pathA = treemap.pathFor(a); // sort by path - var pathB = treemap.pathFor(b); - if (pathA > pathB) return 1; - if (pathB > pathA) return -1; - console.log('warning, multiple entries for the same path: ' + pathA); - return 0; // should be impossible - }; - this.visitFromDisplayedRoot(function(datum) { - if (!datum.lastPathElement) return; // ignore non-files - if (!result) { // first element - result = [datum]; - smallest = datum.value; - return; - } - if (result.length < maxRecords) { // filling the array - result.push(datum); - return; - } - if (datum.value > smallest) { // array is already full - result.push(datum); - result.sort(sortFunction); - result.pop(); // get rid of smallest element - smallest = result[maxRecords - 1].value; // new threshold for entry - } - }); - result.sort(sortFunction); - return result; -} diff --git a/chromium/tools/binary_size/libsupersize/template/index.html b/chromium/tools/binary_size/libsupersize/template/index.html deleted file mode 100644 index 88df14d85af..00000000000 --- a/chromium/tools/binary_size/libsupersize/template/index.html +++ /dev/null @@ -1,517 +0,0 @@ - - - -Binary Size Analysis - - - - - - - -
    - - [?] -
    -
    - Reports: - - -
    -
    - Binary Size Analysis -
    Double-click a box to zoom in, double-click outermost title to zoom out. -
    - - - - diff --git a/chromium/tools/binary_size/libsupersize/template/test-data-generator.html b/chromium/tools/binary_size/libsupersize/template/test-data-generator.html deleted file mode 100644 index 9c6790a8f9e..00000000000 --- a/chromium/tools/binary_size/libsupersize/template/test-data-generator.html +++ /dev/null @@ -1,157 +0,0 @@ - - - - - - - -This script generates sample data for use in D3SymbolTreeMap, and can be used -for testing. - - - - diff --git a/chromium/tools/binary_size/libsupersize/testdata/mock_output_directory/args.gn b/chromium/tools/binary_size/libsupersize/testdata/mock_output_directory/args.gn deleted file mode 100644 index b8526773c3d..00000000000 --- a/chromium/tools/binary_size/libsupersize/testdata/mock_output_directory/args.gn +++ /dev/null @@ -1,2 +0,0 @@ -var1 = true -var2 = "foo" diff --git a/chromium/tools/binary_size/libsupersize/testdata/mock_source_directory/out/Release/args.gn b/chromium/tools/binary_size/libsupersize/testdata/mock_source_directory/out/Release/args.gn new file mode 100644 index 00000000000..b8526773c3d --- /dev/null +++ b/chromium/tools/binary_size/libsupersize/testdata/mock_source_directory/out/Release/args.gn @@ -0,0 +1,2 @@ +var1 = true +var2 = "foo" diff --git a/chromium/tools/binary_size/libsupersize/third_party/gvr-android-sdk/libgvr_shim_static_arm.a b/chromium/tools/binary_size/libsupersize/third_party/gvr-android-sdk/libgvr_shim_static_arm.a deleted file mode 100644 index 52881b6f2a6..00000000000 --- a/chromium/tools/binary_size/libsupersize/third_party/gvr-android-sdk/libgvr_shim_static_arm.a +++ /dev/null @@ -1,3 +0,0 @@ -This file has to exist so that it can be checked for whether or not it is a -thin archive. Thin archives start with "!\n". As you can see, this file is -not a thin archive. diff --git a/chromium/tools/binary_size/supersize.pydeps b/chromium/tools/binary_size/supersize.pydeps index c9ded25ad4d..5b4d56aca49 100644 --- a/chromium/tools/binary_size/supersize.pydeps +++ b/chromium/tools/binary_size/supersize.pydeps @@ -57,4 +57,7 @@ libsupersize/match_util.py libsupersize/models.py libsupersize/ninja_parser.py libsupersize/nm.py +libsupersize/obj_analyzer.py libsupersize/path_util.py +libsupersize/start_server.py +libsupersize/string_extract.py diff --git a/chromium/tools/binary_size/trybot_commit_size_checker.py b/chromium/tools/binary_size/trybot_commit_size_checker.py new file mode 100755 index 00000000000..664be053c99 --- /dev/null +++ b/chromium/tools/binary_size/trybot_commit_size_checker.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Fails if a try job increases binary size unexpectedly.""" + +import argparse +import sys + + +_MAX_UNNOTICED_INCREASE = 16 * 1024 + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--author', help='CL author') + parser.add_argument('--resource-sizes-diff', + help='Path to resource sizes diff produced by ' + '"diagnose_bloat.py diff sizes".') + args = parser.parse_args() + + # Last line looks like: + # MonochromePublic.apk_Specifics normalized apk size=1234 + with open(args.resource_sizes_diff) as f: + last_line = f.readlines()[-1] + size_delta = int(last_line.partition('=')[2]) + + is_roller = '-autoroll' in args.author + + # Useful for bot debugging to have these printed out: + print 'Is Roller:', is_roller + print 'Increase:', size_delta + + if size_delta > _MAX_UNNOTICED_INCREASE and not is_roller: + # Failure message printed to stderr, so flush first. + sys.stdout.flush() + failure_message = """ + +Binary size increase is non-trivial (where "non-trivial" means the normalized \ +size increased by more than {} bytes). + +Please look at the symbol diffs from the "Show Resource Sizes Diff" and the \ +"Show Supersize Diff" bot steps. Try and understand the growth and see if it \ +can be mitigated. There is guidance at: + +https://chromium.googlesource.com/chromium/src/+/master/docs/speed/apk_size_regressions.md#Debugging-Apk-Size-Increase + +If the growth is expected / justified, then you can bypass this bot failure by \ +adding "Binary-Size: $JUSTIFICATION" to your commit description. Here are some \ +examples: + +Binary-Size: Increase is due to translations and so cannot be avoided. +Binary-Size: Increase is due to new images, which are already optimally encoded. +Binary-Size: Increase is temporary due to a "new way" / "old way" refactoring. + It should go away once the "old way" is removed. +Binary-Size: Increase is temporary and will be reverted before next branch cut. +Binary-Size: Increase needed to reduce RAM of a common user flow. +Binary-Size: Increase needed to reduce runtime of a common user flow. +Binary-Size: Increase needed to implement a feature, and I've already spent a + non-trivial amount of time trying to reduce its size. +""".format(_MAX_UNNOTICED_INCREASE) + # Make blank lines not blank prevent them from being stripped. + # https://crbug.com/855671 + failure_message.replace('\n\n', '\n.\n') + sys.exit(failure_message) + + +if __name__ == '__main__': + main() diff --git a/chromium/tools/cfi/blacklist.txt b/chromium/tools/cfi/blacklist.txt index 2301ebdb4fd..41e5b4d1148 100644 --- a/chromium/tools/cfi/blacklist.txt +++ b/chromium/tools/cfi/blacklist.txt @@ -151,7 +151,6 @@ src:*audio/pulse/pulse_stubs.cc src:*chrome/browser/speech/tts_linux.cc src:*device/udev_linux/udev0_loader.cc src:*device/udev_linux/udev1_loader.cc -src:*net/proxy_resolution/proxy_config_service_linux.cc # Calls to auto-generated stubs by ui/gl/generate_bindings.py src:*ui/gl/gl_bindings_autogen_* diff --git a/chromium/tools/checkperms/checkperms.py b/chromium/tools/checkperms/checkperms.py index 7388a7ef2fc..23c58cd12a0 100755 --- a/chromium/tools/checkperms/checkperms.py +++ b/chromium/tools/checkperms/checkperms.py @@ -321,7 +321,8 @@ def check_file(root_path, rel_path): def check_files(root, files): - gen = (check_file(root, f) for f in files if not is_ignored(f)) + gen = (check_file(root, f) for f in files + if not is_ignored(f) and not os.path.isdir(f)) return filter(None, gen) diff --git a/chromium/tools/chrome_proxy/webdriver/bypass.py b/chromium/tools/chrome_proxy/webdriver/bypass.py index 315e2311dba..474ec720c68 100644 --- a/chromium/tools/chrome_proxy/webdriver/bypass.py +++ b/chromium/tools/chrome_proxy/webdriver/bypass.py @@ -188,7 +188,7 @@ class Bypass(IntegrationTest): self.skipTest('This test cannot be run with other experiments.') with TestDriver() as test_driver: test_driver.AddChromeArg('--enable-spdy-proxy-auth') - test_driver.AddChromeArg('--data-reduction-proxy-experiment=test') + test_driver.AddChromeArg('--data-reduction-proxy-experiment=client_test_bypass') # Verify that loading a page other than the specific exp directive test # page loads through the proxy without being bypassed. @@ -198,7 +198,7 @@ class Bypass(IntegrationTest): for response in responses: self.assertHasChromeProxyViaHeader(response) - # Verify that loading the exp directive test page with "exp=test" triggers + # Verify that loading the exp directive test page with "exp=client_test_bypass" triggers # a bypass. test_driver.LoadURL('http://check.googlezip.net/exp/') responses = test_driver.GetHTTPResponses() @@ -206,7 +206,7 @@ class Bypass(IntegrationTest): for response in responses: self.assertNotHasChromeProxyViaHeader(response) - # Verify that loading the same test page without setting "exp=test" loads + # Verify that loading the same test page without setting "exp=client_test_bypass" loads # through the proxy without being bypassed. with TestDriver() as test_driver: test_driver.AddChromeArg('--enable-spdy-proxy-auth') diff --git a/chromium/tools/chrome_proxy/webdriver/lite_page.py b/chromium/tools/chrome_proxy/webdriver/lite_page.py index 398cefc3ed1..655abe7c236 100644 --- a/chromium/tools/chrome_proxy/webdriver/lite_page.py +++ b/chromium/tools/chrome_proxy/webdriver/lite_page.py @@ -143,110 +143,6 @@ class LitePage(IntegrationTest): # Verify that a main frame without Lite Page was seen. self.assertEqual(1, non_lite_page_responses) - # Checks that a Lite Page does not have an error when scrolling to the bottom - # of the page and is able to load all resources. This test is only run on - # Android because it depends on window size of the browser. - @AndroidOnly - @ChromeVersionBeforeM(65) - def testLitePageBTFOldFlags(self): - # If it was attempted to run with another experiment, skip this test. - if common.ParseFlags().browser_args and ('--data-reduction-proxy-experiment' - in common.ParseFlags().browser_args): - self.skipTest('This test cannot be run with other experiments.') - with TestDriver() as test_driver: - test_driver.AddChromeArg('--enable-spdy-proxy-auth') - # Need to force lite page so target page doesn't fallback to Lo-Fi - test_driver.AddChromeArg('--data-reduction-proxy-lo-fi=always-on') - test_driver.AddChromeArg('--enable-data-reduction-proxy-lite-page') - test_driver.AddChromeArg('--data-reduction-proxy-experiment=alt6') - - # This page is long and has many media resources. - test_driver.LoadURL('http://check.googlezip.net/metrics/index.html') - - # Verify that a Lite Page response for the main frame was seen. - lite_page_responses = 0 - for response in test_driver.GetHTTPResponses(): - # Skip CSI requests when validating Lite Page headers. CSI requests - # aren't expected to have LoFi headers. - if '/csi?' in response.url: - continue - if response.url.startswith('data:'): - continue - if (self.checkLitePageResponse(response)): - lite_page_responses = lite_page_responses + 1 - self.assertEqual(1, lite_page_responses) - - # Scroll to the bottom of the window and ensure scrollHeight increases. - original_scroll_height = test_driver.ExecuteJavascriptStatement( - 'document.body.scrollHeight') - test_driver.ExecuteJavascriptStatement( - 'window.scrollTo(0,Math.max(document.body.scrollHeight));') - # Give some time for loading after scrolling. - time.sleep(2) - new_scroll_height = test_driver.ExecuteJavascriptStatement( - 'document.body.scrollHeight') - self.assertGreater(new_scroll_height, original_scroll_height) - - # Make sure there were more requests that were proxied. - responses = test_driver.GetHTTPResponses(override_has_logs=True) - self.assertNotEqual(0, len(responses)) - for response in responses: - self.assertHasChromeProxyViaHeader(response) - self.assertIn(response.status, [200, 204]) - - # Checks that a Lite Page does not have an error when scrolling to the bottom - # of the page and is able to load all resources. This test is only run on - # Android because it depends on window size of the browser. - @AndroidOnly - @ChromeVersionEqualOrAfterM(65) - def testLitePageBTFWithoutFallback(self): - # If it was attempted to run with another experiment, skip this test. - if common.ParseFlags().browser_args and ('--data-reduction-proxy-experiment' - in common.ParseFlags().browser_args): - self.skipTest('This test cannot be run with other experiments.') - with TestDriver() as test_driver: - test_driver.AddChromeArg('--enable-spdy-proxy-auth') - # Need to force 2G speed to get lite-page response. - test_driver.AddChromeArg('--force-effective-connection-type=2G') - - # Need to force lite page so target page doesn't fallback to Lo-Fi - # Set exp=alt6 to force Lite-page response. - test_driver.AddChromeArg('--data-reduction-proxy-experiment=alt6') - - # This page is long and has many media resources. - test_driver.LoadURL('http://check.googlezip.net/metrics/index.html') - - # Verify that a Lite Page response for the main frame was seen. - lite_page_responses = 0 - for response in test_driver.GetHTTPResponses(): - # Skip CSI requests when validating Lite Page headers. CSI requests - # aren't expected to have LoFi headers. - if '/csi?' in response.url: - continue - if response.url.startswith('data:'): - continue - if (self.checkLitePageResponse(response)): - lite_page_responses = lite_page_responses + 1 - self.assertEqual(1, lite_page_responses) - - # Scroll to the bottom of the window and ensure scrollHeight increases. - original_scroll_height = test_driver.ExecuteJavascriptStatement( - 'document.body.scrollHeight') - test_driver.ExecuteJavascriptStatement( - 'window.scrollTo(0,Math.max(document.body.scrollHeight));') - # Give some time for loading after scrolling. - time.sleep(2) - new_scroll_height = test_driver.ExecuteJavascriptStatement( - 'document.body.scrollHeight') - self.assertGreater(new_scroll_height, original_scroll_height) - - # Make sure there were more requests that were proxied. - responses = test_driver.GetHTTPResponses(override_has_logs=True) - self.assertNotEqual(0, len(responses)) - for response in responses: - self.assertHasChromeProxyViaHeader(response) - self.assertIn(response.status, [200, 204]) - # Checks that a Nano Lite Page does not have an error when scrolling to the # bottom of the page and is able to load all resources. Nano pages don't # request additional resources when scrolling. This test is only run on @@ -260,10 +156,12 @@ class LitePage(IntegrationTest): self.skipTest('This test cannot be run with other experiments.') with TestDriver() as test_driver: test_driver.AddChromeArg('--enable-spdy-proxy-auth') + test_driver.AddChromeArg('--enable-features=' + 'Previews,DataReductionProxyDecidesTransform') # Need to force 2G speed to get lite-page response. test_driver.AddChromeArg('--force-effective-connection-type=2G') - # Set exp=alt2 to force Nano response. - test_driver.AddChromeArg('--data-reduction-proxy-experiment=alt2') + # Set exp=client_test_nano to force Nano response. + test_driver.AddChromeArg('--data-reduction-proxy-experiment=client_test_nano') # This page is long and has many media resources. test_driver.LoadURL('http://check.googlezip.net/metrics/index.html') @@ -497,7 +395,7 @@ class LitePage(IntegrationTest): 'Previews,DataReductionProxyDecidesTransform') # Need to force 2G speed to get a preview. test_driver.AddChromeArg('--force-effective-connection-type=2G') - # Set exp=ihdp_integration to force iCASPR response. + # Set exp=client_test_icaspr to force iCASPR response. test_driver.AddChromeArg( '--data-reduction-proxy-experiment=ihdp_integration') diff --git a/chromium/tools/chrome_proxy/webdriver/lofi.py b/chromium/tools/chrome_proxy/webdriver/lofi.py index 8c707ee6ed6..050be5b1f01 100644 --- a/chromium/tools/chrome_proxy/webdriver/lofi.py +++ b/chromium/tools/chrome_proxy/webdriver/lofi.py @@ -468,5 +468,48 @@ class LoFi(IntegrationTest): self.assertNotEqual(0, intervention_headers) + # Checks that Client LoFi range requests that go through the Data Reduction + # Proxy are returned correctly. + @ChromeVersionEqualOrAfterM(62) + def testClientLoFiRangeRequestThroughDataReductionProxy(self): + with TestDriver() as test_driver: + test_driver.AddChromeArg('--enable-spdy-proxy-auth') + # Enable Previews and Client-side LoFi, but disable server previews in + # order to force Chrome to use Client-side LoFi for the images on the + # page. + test_driver.AddChromeArg('--enable-features=Previews,PreviewsClientLoFi') + test_driver.AddChromeArg( + '--disable-features=DataReductionProxyDecidesTransform') + + test_driver.AddChromeArg( + '--force-fieldtrial-params=NetworkQualityEstimator.Enabled:' + 'force_effective_connection_type/2G,' + 'PreviewsClientLoFi.Enabled:' + 'max_allowed_effective_connection_type/4G') + + test_driver.AddChromeArg( + '--force-fieldtrials=NetworkQualityEstimator/Enabled/' + 'PreviewsClientLoFi/Enabled') + + # Fetch a non-SSL page with multiple images on it, such that the images + # are fetched through the Data Reduction Proxy. + test_driver.LoadURL('http://check.googlezip.net/static/index.html') + + image_response_count = 0 + for response in test_driver.GetHTTPResponses(): + if response.url.endswith('.png'): + self.assertHasChromeProxyViaHeader(response) + self.assertIn('range', response.request_headers) + self.assertIn('content-range', response.response_headers) + self.assertTrue(response.response_headers['content-range'].startswith( + 'bytes 0-2047/')) + image_response_count = image_response_count + 1 + + self.assertNotEqual(0, image_response_count) + + # Verify Lo-Fi previews info bar recorded. + histogram = test_driver.GetHistogram('Previews.InfoBarAction.LoFi', 5) + self.assertEqual(1, histogram['count']) + if __name__ == '__main__': IntegrationTest.RunAllTests() diff --git a/chromium/tools/chrome_proxy/webdriver/variations_combinations.py b/chromium/tools/chrome_proxy/webdriver/variations_combinations.py index 20c565ac2f8..fc6d36d5408 100644 --- a/chromium/tools/chrome_proxy/webdriver/variations_combinations.py +++ b/chromium/tools/chrome_proxy/webdriver/variations_combinations.py @@ -42,12 +42,12 @@ def GetExperimentArgs(): elif platform.system().lower() == 'linux': my_platform = 'linux' elif platform.system().lower() == 'windows': - my_platform = 'win' + my_platform = 'windows' elif platform.system().lower() == 'darwin': my_platform = 'mac' else: raise Exception('unknown platform!') - return fieldtrial_util.GenerateArgs(config_path, my_platform) + return fieldtrial_util.GenerateArgs(config_path, [my_platform]) def GenerateTestSuites(): """A generator function that yields non-blacklisted tests to run. diff --git a/chromium/tools/clang/base_bind_rewriters/BaseBindRewriters.cpp b/chromium/tools/clang/base_bind_rewriters/BaseBindRewriters.cpp index b906f7ad7dd..4f6c068e54b 100644 --- a/chromium/tools/clang/base_bind_rewriters/BaseBindRewriters.cpp +++ b/chromium/tools/clang/base_bind_rewriters/BaseBindRewriters.cpp @@ -161,9 +161,8 @@ class BindOnceRewriter : public MatchFinder::MatchCallback, public Rewriter { auto constructor_conversion = cxxConstructExpr( is_once_callback, argumentCountIs(1), hasArgument(0, ignoringImplicit(parameter_construction))); - auto implicit_conversion = implicitCastExpr( - is_once_callback, hasSourceExpression(constructor_conversion)); - return implicit_conversion; + return implicitCastExpr(is_once_callback, + hasSourceExpression(constructor_conversion)); } void run(const MatchFinder::MatchResult& result) override { @@ -577,6 +576,66 @@ class AddStdMoveRewriter : public MatchFinder::MatchCallback, public Rewriter { Replacements* replacements_; }; +// Remove base::AdaptCallbackForRepeating() where resulting +// base::RepeatingCallback is implicitly converted into base::OnceCallback. +// Example: +// // Before +// base::PostTask( +// FROM_HERE, +// base::AdaptCallbackForRepeating(base::BindOnce(&Foo))); +// base::OnceCallback cb = base::AdaptCallbackForRepeating( +// base::OnceBind(&Foo)); +// +// // After +// base::PostTask(FROM_HERE, base::BindOnce(&Foo)); +// base::OnceCallback cb = base::BindOnce(&Foo); +class AdaptCallbackForRepeatingRewriter : public MatchFinder::MatchCallback, + public Rewriter { + public: + explicit AdaptCallbackForRepeatingRewriter(Replacements* replacements) + : replacements_(replacements) {} + + StatementMatcher GetMatcher() { + auto is_once_callback = hasType(hasCanonicalType(hasDeclaration( + classTemplateSpecializationDecl(hasName("::base::OnceCallback"))))); + auto is_repeating_callback = + hasType(hasCanonicalType(hasDeclaration(classTemplateSpecializationDecl( + hasName("::base::RepeatingCallback"))))); + + auto adapt_callback_call = + callExpr( + callee(namedDecl(hasName("::base::AdaptCallbackForRepeating")))) + .bind("target"); + auto parameter_construction = + cxxConstructExpr(is_repeating_callback, argumentCountIs(1), + hasArgument(0, ignoringImplicit(adapt_callback_call))); + auto constructor_conversion = cxxConstructExpr( + is_once_callback, argumentCountIs(1), + hasArgument(0, ignoringImplicit(parameter_construction))); + return implicitCastExpr(is_once_callback, + hasSourceExpression(constructor_conversion)); + } + + void run(const MatchFinder::MatchResult& result) override { + auto* target = result.Nodes.getNodeAs("target"); + + auto left = clang::CharSourceRange::getTokenRange( + result.SourceManager->getSpellingLoc(target->getLocStart()), + result.SourceManager->getSpellingLoc(target->getArg(0)->getExprLoc()) + .getLocWithOffset(-1)); + + // We use " " as replacement to work around https://crbug.com/861886. + replacements_->emplace_back(*result.SourceManager, left, " "); + auto r_paren = clang::CharSourceRange::getTokenRange( + result.SourceManager->getSpellingLoc(target->getRParenLoc()), + result.SourceManager->getSpellingLoc(target->getRParenLoc())); + replacements_->emplace_back(*result.SourceManager, r_paren, " "); + } + + private: + Replacements* replacements_; +}; + llvm::cl::extrahelp common_help(CommonOptionsParser::HelpMessage); llvm::cl::OptionCategory rewriter_category("Rewriter Options"); @@ -588,6 +647,7 @@ Available rewriters are: bind_to_bind_once pass_by_value add_std_move + remove_unneeded_adapt_callback The default is remove_unneeded_passed. )"), llvm::cl::init("remove_unneeded_passed"), @@ -623,6 +683,12 @@ int main(int argc, const char* argv[]) { auto add_std_move = llvm::make_unique(&replacements); match_finder.addMatcher(add_std_move->GetMatcher(), add_std_move.get()); rewriter = std::move(add_std_move); + } else if (rewriter_option == "remove_unneeded_adapt_callback") { + auto remove_unneeded_adapt_callback = + llvm::make_unique(&replacements); + match_finder.addMatcher(remove_unneeded_adapt_callback->GetMatcher(), + remove_unneeded_adapt_callback.get()); + rewriter = std::move(remove_unneeded_adapt_callback); } else { abort(); } diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp b/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp index ff6af461330..36db12aba53 100644 --- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp +++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPlugin.cpp @@ -37,8 +37,6 @@ class BlinkGCPluginAction : public PluginASTAction { options_.warn_unneeded_finalizer = true; } else if (arg == "enable-weak-members-in-unmanaged-classes") { options_.enable_weak_members_in_unmanaged_classes = true; - } else if (arg == "warn-trace-wrappers-missing-base-dispatch") { - options_.warn_trace_wrappers_missing_base_dispatch = true; } else { llvm::errs() << "Unknown blink-gc-plugin argument: " << arg << "\n"; return false; diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp index 4deee9cb369..7768fe355f3 100644 --- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp +++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp @@ -13,7 +13,6 @@ #include "CheckFinalizerVisitor.h" #include "CheckGCRootsVisitor.h" #include "CheckTraceVisitor.h" -#include "CheckTraceWrappersVisitor.h" #include "CollectVisitor.h" #include "JsonWriter.h" #include "RecordInfo.h" @@ -119,11 +118,6 @@ void BlinkGCPluginConsumer::HandleTranslationUnit(ASTContext& context) { for (const auto& method : visitor.trace_decls()) CheckTracingMethod(method); - if (options_.warn_trace_wrappers_missing_base_dispatch) { - for (const auto& method : visitor.trace_wrapper_decls()) - CheckWrapperTracingMethod(method); - } - if (json_) { json_->CloseList(); delete json_; @@ -531,16 +525,6 @@ void BlinkGCPluginConsumer::CheckTracingMethod(CXXMethodDecl* method) { CheckTraceOrDispatchMethod(parent, method); } -void BlinkGCPluginConsumer::CheckWrapperTracingMethod(CXXMethodDecl* method) { - RecordInfo* parent = cache_.Lookup(method->getParent()); - if (IsIgnored(parent)) - return; - - Config::TraceWrappersMethodType trace_wrappers_type = - Config::GetTraceWrappersMethodType(method); - CheckTraceWrappersMethod(parent, method, trace_wrappers_type); -} - void BlinkGCPluginConsumer::CheckTraceOrDispatchMethod( RecordInfo* parent, CXXMethodDecl* method) { @@ -580,18 +564,6 @@ void BlinkGCPluginConsumer::CheckTraceMethod( } } -void BlinkGCPluginConsumer::CheckTraceWrappersMethod( - RecordInfo* parent, - clang::CXXMethodDecl* trace_wrappers, - Config::TraceWrappersMethodType trace_wrappers_type) { - CheckTraceWrappersVisitor visitor(trace_wrappers, parent, &cache_); - visitor.TraverseCXXMethodDecl(trace_wrappers); - - for (auto& base : parent->GetBases()) - if (!base.second.IsProperlyWrapperTraced()) - reporter_.BaseRequiresWrapperTracing(parent, trace_wrappers, base.first); -} - void BlinkGCPluginConsumer::DumpClass(RecordInfo* info) { if (!json_) return; diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h index d76ccf236e0..bcfb3afbd59 100644 --- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h +++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h @@ -57,8 +57,6 @@ class BlinkGCPluginConsumer : public clang::ASTConsumer { // This is the main entry for tracing method definitions. void CheckTracingMethod(clang::CXXMethodDecl* method); - void CheckWrapperTracingMethod(clang::CXXMethodDecl* method); - // Determine what type of tracing method this is (dispatch or trace). void CheckTraceOrDispatchMethod(RecordInfo* parent, clang::CXXMethodDecl* method); @@ -68,11 +66,6 @@ class BlinkGCPluginConsumer : public clang::ASTConsumer { clang::CXXMethodDecl* trace, Config::TraceMethodType trace_type); - void CheckTraceWrappersMethod( - RecordInfo* parent, - clang::CXXMethodDecl* trace_wrappers, - Config::TraceWrappersMethodType trace_wrappers_type); - void DumpClass(RecordInfo* info); // Adds either a warning or error, based on the current handling of -Werror. diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h index f69976d9824..4af8950f11f 100644 --- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h +++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginOptions.h @@ -30,9 +30,6 @@ struct BlinkGCPluginOptions { // TODO(sof): remove this option once safely rolled out. bool enable_weak_members_in_unmanaged_classes = false; - // Warn on missing dispatches to base class TraceWrappers. - bool warn_trace_wrappers_missing_base_dispatch = false; - std::set ignored_classes; std::set checked_namespaces; std::vector ignored_directories; diff --git a/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt b/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt index 5188def93de..1429f0b9ee1 100644 --- a/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt +++ b/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt @@ -9,7 +9,6 @@ set(plugin_sources CheckFinalizerVisitor.cpp CheckGCRootsVisitor.cpp CheckTraceVisitor.cpp - CheckTraceWrappersVisitor.cpp CollectVisitor.cpp Config.cpp DiagnosticsReporter.cpp diff --git a/chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.cpp b/chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.cpp deleted file mode 100644 index 106f9833551..00000000000 --- a/chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.cpp +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "CheckTraceWrappersVisitor.h" - -#include - -#include "Config.h" - -using namespace clang; - -CheckTraceWrappersVisitor::CheckTraceWrappersVisitor(CXXMethodDecl* trace, - RecordInfo* info, - RecordCache* cache) - : trace_wrappers_(trace), info_(info), cache_(cache) {} - -bool CheckTraceWrappersVisitor::VisitCallExpr(CallExpr* call) { - CheckTraceBaseCall(call); - return true; -} - -bool CheckTraceWrappersVisitor::IsTraceWrappersCallName( - const std::string& name) { - // See CheckTraceVisitor::IsTraceCallName. - return name == trace_wrappers_->getName(); -} - -bool CheckTraceWrappersVisitor::CheckTraceBaseCall(CallExpr* call) { - // Checks for "Base::TraceWrappers(visitor)"-like calls. - - // For example, if we've got "Base::TraceWrappers(visitor)" as |call|, - // callee_record will be "Base", and func_name will be "TraceWrappers". - CXXRecordDecl* callee_record = nullptr; - std::string func_name; - - if (MemberExpr* callee = dyn_cast(call->getCallee())) { - if (!callee->hasQualifier()) - return false; - - FunctionDecl* trace_decl = dyn_cast(callee->getMemberDecl()); - if (!trace_decl || !Config::IsTraceWrappersMethod(trace_decl)) - return false; - - const Type* type = callee->getQualifier()->getAsType(); - if (!type) - return false; - - callee_record = type->getAsCXXRecordDecl(); - func_name = trace_decl->getName(); - } - - if (!callee_record) - return false; - - if (!IsTraceWrappersCallName(func_name)) - return false; - - for (auto& base : info_->GetBases()) { - // We want to deal with omitted TraceWrappers() function in an intermediary - // class in the class hierarchy, e.g.: - // class A : public TraceWrapperBase { TraceWrappers() { ... } }; - // class B : public A { - // /* No TraceWrappers(); have nothing to trace. */ - // }; - // class C : public B { TraceWrappers() { B::TraceWrappers(visitor); } } - // where, B::TraceWrappers() is actually A::TraceWrappers(), and in some - // cases we get A as |callee_record| instead of B. We somehow need to mark B - // as wrapper traced if we find A::TraceWrappers() call. - // - // To solve this, here we keep going up the class hierarchy as long as - // they are not required to have a trace method. The implementation is - // a simple DFS, where |base_records| represents the set of base classes - // we need to visit. - - std::vector base_records; - base_records.push_back(base.first); - - while (!base_records.empty()) { - CXXRecordDecl* base_record = base_records.back(); - base_records.pop_back(); - - if (base_record == callee_record) { - // If we find a matching trace method, pretend the user has written - // a correct trace() method of the base; in the example above, we - // find A::trace() here and mark B as correctly traced. - base.second.MarkWrapperTraced(); - return true; - } - - if (RecordInfo* base_info = cache_->Lookup(base_record)) { - if (!base_info->RequiresTraceWrappersMethod()) { - // If this base class is not required to have a trace method, then - // the actual trace method may be defined in an ancestor. - for (auto& inner_base : base_info->GetBases()) - base_records.push_back(inner_base.first); - } - } - } - } - - return false; -} diff --git a/chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.h b/chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.h deleted file mode 100644 index 619046583cc..00000000000 --- a/chromium/tools/clang/blink_gc_plugin/CheckTraceWrappersVisitor.h +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef TOOLS_BLINK_GC_PLUGIN_CHECK_TRACE_WRAPPERS_VISITOR_H_ -#define TOOLS_BLINK_GC_PLUGIN_CHECK_TRACE_WRAPPERS_VISITOR_H_ - -#include - -#include "RecordInfo.h" -#include "clang/AST/AST.h" -#include "clang/AST/RecursiveASTVisitor.h" - -class RecordCache; -class RecordInfo; - -// This visitor checks a wrapper tracing method by traversing its body. -// - A base is wrapper traced if a base-qualified call to a trace method is -// found. -class CheckTraceWrappersVisitor - : public clang::RecursiveASTVisitor { - public: - CheckTraceWrappersVisitor(clang::CXXMethodDecl* trace, - RecordInfo* info, - RecordCache* cache); - - bool VisitCallExpr(clang::CallExpr* call); - - private: - bool IsTraceWrappersCallName(const std::string& name); - - bool CheckTraceBaseCall(clang::CallExpr* call); - - clang::CXXMethodDecl* trace_wrappers_; - RecordInfo* info_; - RecordCache* cache_; -}; - -#endif // TOOLS_BLINK_GC_PLUGIN_CHECK_TRACE_WRAPPERS_VISITOR_H_ diff --git a/chromium/tools/clang/blink_gc_plugin/CollectVisitor.cpp b/chromium/tools/clang/blink_gc_plugin/CollectVisitor.cpp index 0f999a4ad1d..89af6fc2107 100644 --- a/chromium/tools/clang/blink_gc_plugin/CollectVisitor.cpp +++ b/chromium/tools/clang/blink_gc_plugin/CollectVisitor.cpp @@ -19,10 +19,6 @@ CollectVisitor::MethodVector& CollectVisitor::trace_decls() { return trace_decls_; } -CollectVisitor::MethodVector& CollectVisitor::trace_wrapper_decls() { - return trace_wrapper_decls_; -} - bool CollectVisitor::VisitCXXRecordDecl(CXXRecordDecl* record) { if (record->hasDefinition() && record->isCompleteDefinition()) record_decls_.push_back(record); @@ -34,8 +30,6 @@ bool CollectVisitor::VisitCXXMethodDecl(CXXMethodDecl* method) { if (Config::IsTraceMethod(method)) { trace_decls_.push_back(method); } - if (Config::IsTraceWrappersMethod(method)) - trace_wrapper_decls_.push_back(method); } return true; } diff --git a/chromium/tools/clang/blink_gc_plugin/CollectVisitor.h b/chromium/tools/clang/blink_gc_plugin/CollectVisitor.h index 4adf81904b4..a3ccdd8a331 100644 --- a/chromium/tools/clang/blink_gc_plugin/CollectVisitor.h +++ b/chromium/tools/clang/blink_gc_plugin/CollectVisitor.h @@ -20,7 +20,6 @@ class CollectVisitor : public clang::RecursiveASTVisitor { RecordVector& record_decls(); MethodVector& trace_decls(); - MethodVector& trace_wrapper_decls(); // Collect record declarations, including nested declarations. bool VisitCXXRecordDecl(clang::CXXRecordDecl* record); @@ -31,7 +30,6 @@ class CollectVisitor : public clang::RecursiveASTVisitor { private: RecordVector record_decls_; MethodVector trace_decls_; - MethodVector trace_wrapper_decls_; }; #endif // TOOLS_BLINK_GC_PLUGIN_COLLECT_VISITOR_H_ diff --git a/chromium/tools/clang/blink_gc_plugin/Config.cpp b/chromium/tools/clang/blink_gc_plugin/Config.cpp index 4b3a1a3a9b8..df8867a9b29 100644 --- a/chromium/tools/clang/blink_gc_plugin/Config.cpp +++ b/chromium/tools/clang/blink_gc_plugin/Config.cpp @@ -13,7 +13,6 @@ using namespace clang; const char kNewOperatorName[] = "operator new"; const char kCreateName[] = "Create"; const char kTraceName[] = "Trace"; -const char kTraceWrappersName[] = "TraceWrappers"; const char kFinalizeName[] = "FinalizeGarbageCollectedObject"; const char kTraceAfterDispatchName[] = "TraceAfterDispatch"; const char kRegisterWeakMembersName[] = "RegisterWeakMembers"; @@ -29,10 +28,6 @@ const char kIteratorName[] = "iterator"; const char kConstReverseIteratorName[] = "const_reverse_iterator"; const char kReverseIteratorName[] = "reverse_iterator"; -const char* kIgnoredTraceWrapperNames[] = { - "blink::ScriptWrappableVisitor::TraceWrappers", - "blink::WrapperMarkingData::TraceWrappers"}; - bool Config::IsTemplateInstantiation(CXXRecordDecl* record) { ClassTemplateSpecializationDecl* spec = dyn_cast(record); @@ -52,29 +47,3 @@ bool Config::IsTemplateInstantiation(CXXRecordDecl* record) { assert(false && "Unknown template specialization kind"); return false; } - -// static -Config::TraceWrappersMethodType Config::GetTraceWrappersMethodType( - const clang::FunctionDecl* method) { - if (method->getNumParams() != 1) - return NOT_TRACE_WRAPPERS_METHOD; - - const std::string& name = method->getNameAsString(); - const std::string& full_name = method->getQualifiedNameAsString(); - for (size_t i = 0; i < (sizeof(kIgnoredTraceWrapperNames) / - sizeof(kIgnoredTraceWrapperNames[0])); - i++) { - if (full_name == kIgnoredTraceWrapperNames[i]) - return NOT_TRACE_WRAPPERS_METHOD; - } - - if (name == kTraceWrappersName) - return TRACE_WRAPPERS_METHOD; - - return NOT_TRACE_WRAPPERS_METHOD; -} - -// static -bool Config::IsTraceWrappersMethod(const clang::FunctionDecl* method) { - return GetTraceWrappersMethodType(method) != NOT_TRACE_WRAPPERS_METHOD; -} diff --git a/chromium/tools/clang/blink_gc_plugin/Config.h b/chromium/tools/clang/blink_gc_plugin/Config.h index 527fe1c3bf3..bd347b89243 100644 --- a/chromium/tools/clang/blink_gc_plugin/Config.h +++ b/chromium/tools/clang/blink_gc_plugin/Config.h @@ -20,7 +20,6 @@ extern const char kNewOperatorName[]; extern const char kCreateName[]; extern const char kTraceName[]; -extern const char kTraceWrappersName[]; extern const char kFinalizeName[]; extern const char kTraceAfterDispatchName[]; extern const char kRegisterWeakMembersName[]; @@ -36,8 +35,6 @@ extern const char kIteratorName[]; extern const char kConstReverseIteratorName[]; extern const char kReverseIteratorName[]; -extern const char* kIgnoredTraceWrapperNames[]; - class Config { public: static bool IsMember(const std::string& name) { @@ -142,10 +139,6 @@ class Config { IsGCMixinBase(name); } - static bool IsTraceWrapperBase(const std::string& name) { - return name == "TraceWrapperBase"; - } - static bool IsIterator(const std::string& name) { return name == kIteratorName || name == kConstIteratorName || name == kReverseIteratorName || name == kConstReverseIteratorName; @@ -246,15 +239,6 @@ class Config { return GetTraceMethodType(method) != NOT_TRACE_METHOD; } - enum TraceWrappersMethodType { - NOT_TRACE_WRAPPERS_METHOD, - TRACE_WRAPPERS_METHOD, - // TODO(mlippautz): TRACE_WRAPPERS_AFTER_DISPATCH_METHOD - }; - - static TraceWrappersMethodType GetTraceWrappersMethodType( - const clang::FunctionDecl* method); - static bool IsTraceWrappersMethod(const clang::FunctionDecl* method); static bool StartsWith(const std::string& str, const std::string& prefix) { diff --git a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp index 6e704f2221d..82175817e6e 100644 --- a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp +++ b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.cpp @@ -184,8 +184,6 @@ DiagnosticsReporter::DiagnosticsReporter( diagnostic_.getCustomDiagID(getErrorLevel(), kClassRequiresTraceMethod); diag_base_requires_tracing_ = diagnostic_.getCustomDiagID(getErrorLevel(), kBaseRequiresTracing); - diag_base_requires_wrapper_tracing_ = - diagnostic_.getCustomDiagID(getErrorLevel(), kBaseRequiresWrapperTracing); diag_fields_require_tracing_ = diagnostic_.getCustomDiagID(getErrorLevel(), kFieldsRequireTracing); diag_fields_improperly_traced_ = @@ -318,13 +316,6 @@ void DiagnosticsReporter::BaseRequiresTracing( << base << derived->record(); } -void DiagnosticsReporter::BaseRequiresWrapperTracing(RecordInfo* derived, - CXXMethodDecl* trace, - CXXRecordDecl* base) { - ReportDiagnostic(trace->getLocStart(), diag_base_requires_wrapper_tracing_) - << base << derived->record(); -} - void DiagnosticsReporter::FieldsImproperlyTraced( RecordInfo* info, CXXMethodDecl* trace) { diff --git a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h index 784e4d8cc6b..512874d646e 100644 --- a/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h +++ b/chromium/tools/clang/blink_gc_plugin/DiagnosticsReporter.h @@ -30,9 +30,6 @@ class DiagnosticsReporter { void BaseRequiresTracing(RecordInfo* derived, clang::CXXMethodDecl* trace, clang::CXXRecordDecl* base); - void BaseRequiresWrapperTracing(RecordInfo* derived, - clang::CXXMethodDecl* trace, - clang::CXXRecordDecl* base); void FieldsImproperlyTraced(RecordInfo* info, clang::CXXMethodDecl* trace); void ClassContainsInvalidFields( @@ -106,7 +103,6 @@ class DiagnosticsReporter { unsigned diag_class_must_left_mostly_derive_gc_; unsigned diag_class_requires_trace_method_; unsigned diag_base_requires_tracing_; - unsigned diag_base_requires_wrapper_tracing_; unsigned diag_fields_require_tracing_; unsigned diag_fields_improperly_traced_; unsigned diag_class_contains_invalid_fields_; diff --git a/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp b/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp index 5f4f7ca4808..30a5c745d29 100644 --- a/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp +++ b/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp @@ -24,9 +24,7 @@ RecordInfo::RecordInfo(CXXRecordDecl* record, RecordCache* cache) is_declaring_local_trace_(kNotComputed), is_eagerly_finalized_(kNotComputed), determined_trace_methods_(false), - determined_wrapper_trace_methods_(false), trace_method_(0), - trace_wrappers_method_(0), trace_dispatch_method_(0), finalize_dispatch_method_(0), is_gc_derived_(false) {} @@ -78,6 +76,34 @@ bool RecordInfo::IsHeapAllocatedCollection() { return Config::IsGCCollection(name_); } +bool RecordInfo::HasOptionalFinalizer() { + if (!IsHeapAllocatedCollection()) + return false; + // Heap collections may have a finalizer but it is optional (i.e. may be + // delayed until FinalizeGarbageCollectedObject() gets called), unless there + // is an inline buffer. Vector, Deque, and ListHashSet can have an inline + // buffer. + if (name_ != "Vector" && name_ != "Deque" && name_ != "HeapVector" && + name_ != "HeapDeque") + return true; + ClassTemplateSpecializationDecl* tmpl = + dyn_cast(record_); + // These collections require template specialization so tmpl should always be + // non-null for valid code. + if (!tmpl) + return false; + const TemplateArgumentList& args = tmpl->getTemplateArgs(); + if (args.size() < 2) + return true; + TemplateArgument arg = args[1]; + // The second template argument must be void or 0 so there is no inline + // buffer. + return (arg.getKind() == TemplateArgument::Type && + arg.getAsType()->isVoidType()) || + (arg.getKind() == TemplateArgument::Integral && + arg.getAsIntegral().getExtValue() == 0); +} + // Test if a record is derived from a garbage collected base. bool RecordInfo::IsGCDerived() { // If already computed, return the known result. @@ -172,14 +198,6 @@ bool RecordInfo::IsGCAllocated() { return IsGCDerived() || IsHeapAllocatedCollection(); } -bool RecordInfo::IsTraceWrapperBase() { - for (const auto& gc_base : gc_base_names_) { - if (Config::IsTraceWrapperBase(gc_base)) - return true; - } - return false; -} - bool RecordInfo::IsEagerlyFinalized() { if (is_eagerly_finalized_ != kNotComputed) return is_eagerly_finalized_; @@ -302,23 +320,6 @@ bool RecordInfo::RequiresTraceMethod() { return fields_need_tracing_.IsNeeded(); } -bool RecordInfo::RequiresTraceWrappersMethod() { - if (IsStackAllocated()) - return false; - - if (IsTraceWrapperBase()) - return true; - - unsigned bases_with_trace_wrappers = 0; - for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) { - if (it->second.NeedsWrapperTracing().IsNeeded()) - ++bases_with_trace_wrappers; - } - if (bases_with_trace_wrappers > 1) - return true; - return false; -} - // Get the actual tracing method (ie, can be traceAfterDispatch if there is a // dispatch method). CXXMethodDecl* RecordInfo::GetTraceMethod() { @@ -326,11 +327,6 @@ CXXMethodDecl* RecordInfo::GetTraceMethod() { return trace_method_; } -CXXMethodDecl* RecordInfo::GetTraceWrappersMethod() { - DetermineWrapperTracingMethods(); - return trace_wrappers_method_; -} - // Get the static trace dispatch method. CXXMethodDecl* RecordInfo::GetTraceDispatchMethod() { DetermineTracingMethods(); @@ -358,16 +354,6 @@ bool RecordInfo::InheritsTrace() { return false; } -bool RecordInfo::InheritsTraceWrappers() { - if (GetTraceWrappersMethod()) - return true; - for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) { - if (it->second.info()->InheritsTraceWrappers()) - return true; - } - return false; -} - CXXMethodDecl* RecordInfo::InheritsNonVirtualTrace() { if (CXXMethodDecl* trace = GetTraceMethod()) return trace->isVirtual() ? 0 : trace; @@ -434,11 +420,7 @@ RecordInfo::Bases* RecordInfo::CollectBases() { TracingStatus status = info->InheritsTrace() ? TracingStatus::Needed() : TracingStatus::Unneeded(); - TracingStatus wrapper_status = info->InheritsTraceWrappers() - ? TracingStatus::Needed() - : TracingStatus::Unneeded(); - bases->push_back( - std::make_pair(base, BasePoint(spec, info, status, wrapper_status))); + bases->push_back(std::make_pair(base, BasePoint(spec, info, status))); } return bases; } @@ -477,36 +459,6 @@ RecordInfo::Fields* RecordInfo::CollectFields() { return fields; } -void RecordInfo::DetermineWrapperTracingMethods() { - if (determined_wrapper_trace_methods_) - return; - determined_wrapper_trace_methods_ = true; - - if (Config::IsTraceWrapperBase(name_)) - return; - - CXXMethodDecl* trace_wrappers = nullptr; - for (Decl* decl : record_->decls()) { - CXXMethodDecl* method = dyn_cast(decl); - if (!method) { - if (FunctionTemplateDecl* func_template = - dyn_cast(decl)) - method = dyn_cast(func_template->getTemplatedDecl()); - } - if (!method) - continue; - - switch (Config::GetTraceWrappersMethodType(method)) { - case Config::TRACE_METHOD: - trace_wrappers = method; - break; - case Config::NOT_TRACE_METHOD: - break; - } - } - trace_wrappers_method_ = trace_wrappers; -} - void RecordInfo::DetermineTracingMethods() { if (determined_trace_methods_) return; @@ -579,6 +531,11 @@ void RecordInfo::DetermineTracingMethods() { // TODO: Add classes with a finalize() method that specialize FinalizerTrait. bool RecordInfo::NeedsFinalization() { if (does_need_finalization_ == kNotComputed) { + if (HasOptionalFinalizer()) { + does_need_finalization_ = kFalse; + return does_need_finalization_; + } + // Rely on hasNonTrivialDestructor(), but if the only // identifiable reason for it being true is the presence // of a safely ignorable class as a direct base, @@ -635,18 +592,6 @@ TracingStatus RecordInfo::NeedsTracing(Edge::NeedsTracingOption option) { return fields_need_tracing_; } -TracingStatus RecordInfo::NeedsWrapperTracing() { - if (IsStackAllocated()) - return TracingStatus::Unneeded(); - - for (Bases::iterator it = GetBases().begin(); it != GetBases().end(); ++it) { - if (it->second.info()->NeedsWrapperTracing().IsNeeded()) - return TracingStatus::Needed(); - } - - return TracingStatus::Unneeded(); -} - static bool isInStdNamespace(clang::Sema& sema, NamespaceDecl* ns) { while (ns) { diff --git a/chromium/tools/clang/blink_gc_plugin/RecordInfo.h b/chromium/tools/clang/blink_gc_plugin/RecordInfo.h index 8ec92ec6fc2..3ac082fb5c0 100644 --- a/chromium/tools/clang/blink_gc_plugin/RecordInfo.h +++ b/chromium/tools/clang/blink_gc_plugin/RecordInfo.h @@ -23,35 +23,24 @@ class RecordCache; // A potentially tracable and/or lifetime affecting point in the object graph. class GraphPoint { public: - GraphPoint() : traced_(false), wrapper_traced_(false) {} + GraphPoint() : traced_(false) {} virtual ~GraphPoint() {} void MarkTraced() { traced_ = true; } - void MarkWrapperTraced() { wrapper_traced_ = true; } bool IsProperlyTraced() { return traced_ || !NeedsTracing().IsNeeded(); } bool IsInproperlyTraced() { return traced_ && NeedsTracing().IsIllegal(); } - bool IsProperlyWrapperTraced() { - return wrapper_traced_ || !NeedsWrapperTracing().IsNeeded(); - } virtual const TracingStatus NeedsTracing() = 0; - virtual const TracingStatus NeedsWrapperTracing() = 0; private: bool traced_; - bool wrapper_traced_; }; class BasePoint : public GraphPoint { public: BasePoint(const clang::CXXBaseSpecifier& spec, RecordInfo* info, - const TracingStatus& status, - const TracingStatus& wrapper_status) - : spec_(spec), - info_(info), - status_(status), - wrapper_status_(wrapper_status) {} + const TracingStatus& status) + : spec_(spec), info_(info), status_(status) {} const TracingStatus NeedsTracing() { return status_; } - const TracingStatus NeedsWrapperTracing() { return wrapper_status_; } const clang::CXXBaseSpecifier& spec() { return spec_; } RecordInfo* info() { return info_; } @@ -59,7 +48,6 @@ class BasePoint : public GraphPoint { const clang::CXXBaseSpecifier& spec_; RecordInfo* info_; TracingStatus status_; - TracingStatus wrapper_status_; }; class FieldPoint : public GraphPoint { @@ -69,7 +57,6 @@ class FieldPoint : public GraphPoint { const TracingStatus NeedsTracing() { return edge_->NeedsTracing(Edge::kRecursive); } - const TracingStatus NeedsWrapperTracing() { return TracingStatus::Illegal(); } clang::FieldDecl* field() { return field_; } Edge* edge() { return edge_; } @@ -113,7 +100,6 @@ class RecordInfo { bool IsGCAllocated(); bool IsGCFinalized(); bool IsGCMixin(); - bool IsTraceWrapperBase(); bool IsStackAllocated(); bool IsNonNewable(); bool IsOnlyPlacementNewable(); @@ -125,11 +111,9 @@ class RecordInfo { bool RequiresTraceMethod(); bool NeedsFinalization(); - bool RequiresTraceWrappersMethod(); bool DeclaresGCMixinMethods(); bool DeclaresLocalTraceMethod(); TracingStatus NeedsTracing(Edge::NeedsTracingOption); - TracingStatus NeedsWrapperTracing(); clang::CXXMethodDecl* InheritsNonVirtualTrace(); bool IsConsideredAbstract(); @@ -143,13 +127,13 @@ class RecordInfo { Fields* CollectFields(); Bases* CollectBases(); void DetermineTracingMethods(); - void DetermineWrapperTracingMethods(); bool InheritsTrace(); - bool InheritsTraceWrappers(); Edge* CreateEdge(const clang::Type* type); Edge* CreateEdgeFromOriginalType(const clang::Type* type); + bool HasOptionalFinalizer(); + RecordCache* cache_; clang::CXXRecordDecl* record_; const std::string name_; @@ -167,9 +151,7 @@ class RecordInfo { CachedBool is_eagerly_finalized_; bool determined_trace_methods_; - bool determined_wrapper_trace_methods_; clang::CXXMethodDecl* trace_method_; - clang::CXXMethodDecl* trace_wrappers_method_; clang::CXXMethodDecl* trace_dispatch_method_; clang::CXXMethodDecl* finalize_dispatch_method_; diff --git a/chromium/tools/clang/plugins/OWNERS b/chromium/tools/clang/plugins/OWNERS index 4733a4f06bf..859c2c608c3 100644 --- a/chromium/tools/clang/plugins/OWNERS +++ b/chromium/tools/clang/plugins/OWNERS @@ -1 +1 @@ -erg@chromium.org +dcheng@chromium.org diff --git a/chromium/tools/clang/pylib/clang/compile_db.py b/chromium/tools/clang/pylib/clang/compile_db.py index bbf5e3c30e9..4dbe39cd15d 100755 --- a/chromium/tools/clang/pylib/clang/compile_db.py +++ b/chromium/tools/clang/pylib/clang/compile_db.py @@ -17,7 +17,7 @@ _debugging = False def _ProcessEntry(entry): """Transforms one entry in the compile database to be clang-tool friendly.""" - split_command = shlex.split(entry['command']) + split_command = shlex.split(entry['command'], posix=(sys.platform != 'win32')) # Drop gomacc.exe from the front, if present. if split_command[0].endswith('gomacc.exe'): @@ -84,11 +84,12 @@ def GetNinjaPath(): # FIXME: This really should be a build target, rather than generated at runtime. -def GenerateWithNinja(path): +def GenerateWithNinja(path, targets=[]): """Generates a compile database using ninja. Args: path: The build directory to generate a compile database for. + targets: Additional targets to pass to ninja. Returns: List of the contents of the compile database. @@ -96,9 +97,9 @@ def GenerateWithNinja(path): # TODO(dcheng): Ensure that clang is enabled somehow. # First, generate the compile database. - json_compile_db = subprocess.check_output([ - GetNinjaPath(), '-C', path, '-t', 'compdb', 'cc', 'cxx', 'objc', - 'objcxx']) + json_compile_db = subprocess.check_output( + [GetNinjaPath(), '-C', path] + targets + + ['-t', 'compdb', 'cc', 'cxx', 'objc', 'objcxx']) return json.loads(json_compile_db) diff --git a/chromium/tools/clang/scripts/generate_compdb.py b/chromium/tools/clang/scripts/generate_compdb.py index bf56712bd4f..73780936c83 100755 --- a/chromium/tools/clang/scripts/generate_compdb.py +++ b/chromium/tools/clang/scripts/generate_compdb.py @@ -28,9 +28,24 @@ def main(argv): '-p', required=True, help='Path to build directory') + parser.add_argument( + 'targets', + nargs='*', + help='Additional targets to pass to ninja') + parser.add_argument( + '-o', + help='File to write the compilation database to. Defaults to stdout') + args = parser.parse_args() - print json.dumps(compile_db.GenerateWithNinja(args.p)) + compdb_text = json.dumps( + compile_db.ProcessCompileDatabaseIfNeeded( + compile_db.GenerateWithNinja(args.p, args.targets))) + if args.o is None: + print(compdb_text) + else: + with open(args.o, 'w') as f: + f.write(compdb_text) if __name__ == '__main__': diff --git a/chromium/tools/clang/scripts/package.py b/chromium/tools/clang/scripts/package.py index bc9f4a0d5ca..5abba310079 100755 --- a/chromium/tools/clang/scripts/package.py +++ b/chromium/tools/clang/scripts/package.py @@ -352,7 +352,8 @@ def main(): objdumpdir = 'llvmobjdump-' + stamp shutil.rmtree(objdumpdir, ignore_errors=True) os.makedirs(os.path.join(objdumpdir, 'bin')) - for filename in ['llvm-cxxfilt', 'llvm-nm', 'llvm-objdump', 'llvm-readobj']: + for filename in ['llvm-bcanalyzer', 'llvm-cxxfilt', 'llvm-nm', 'llvm-objdump', + 'llvm-readobj']: shutil.copy(os.path.join(LLVM_RELEASE_DIR, 'bin', filename + exe_ext), os.path.join(objdumpdir, 'bin')) llvmobjdump_stamp_file_base = 'llvmobjdump_build_revision' diff --git a/chromium/tools/clang/scripts/run_tool.py b/chromium/tools/clang/scripts/run_tool.py index 388f2416fc3..82ae64def61 100755 --- a/chromium/tools/clang/scripts/run_tool.py +++ b/chromium/tools/clang/scripts/run_tool.py @@ -82,6 +82,7 @@ def _PruneGitFiles(git_files, paths): """ if not git_files: return [] + git_files.sort() pruned_list = [] git_index = 0 for path in sorted(paths): @@ -215,6 +216,10 @@ def _ExecuteTool(toolname, tool_args, build_directory, compdb_entry): # passed to the tool twice - once directly and once via # the compile args. if a != compdb_entry.filename + # /showIncludes is used by Ninja to track header file dependencies on + # Windows. We don't need to do this here, and it results in lots of spam + # and a massive log file, so we strip it. + and a != '/showIncludes' ]) # shlex.split escapes double qoutes in non-Posix mode, so we need to strip @@ -294,9 +299,12 @@ class _CompilerDispatcher(object): sys.stderr.write('\n') done_count = self.__success_count + self.__failed_count percentage = (float(done_count) / len(self.__compdb_entries)) * 100 - sys.stderr.write( - 'Processed %d files with %s tool (%d failures) [%.2f%%]\r' % - (done_count, self.__toolname, self.__failed_count, percentage)) + # Only output progress for every 100th entry, to make log files easier to + # inspect. + if done_count % 100 == 0 or done_count == len(self.__compdb_entries): + sys.stderr.write( + 'Processed %d files with %s tool (%d failures) [%.2f%%]\r' % + (done_count, self.__toolname, self.__failed_count, percentage)) def main(): diff --git a/chromium/tools/clang/scripts/update.py b/chromium/tools/clang/scripts/update.py index 1dffa88154b..f69d2eb8915 100755 --- a/chromium/tools/clang/scripts/update.py +++ b/chromium/tools/clang/scripts/update.py @@ -27,7 +27,7 @@ import zipfile # Do NOT CHANGE this if you don't know what you're doing -- see # https://chromium.googlesource.com/chromium/src/+/master/docs/updating_clang.md # Reverting problematic clang rolls is safe, though. -CLANG_REVISION = '332838' +CLANG_REVISION = '337439' use_head_revision = bool(os.environ.get('LLVM_FORCE_HEAD_REVISION', '0') in ('1', 'YES')) @@ -80,12 +80,6 @@ LLVM_REPO_URL='https://llvm.org/svn/llvm-project' if 'LLVM_REPO_URL' in os.environ: LLVM_REPO_URL = os.environ['LLVM_REPO_URL'] -# Bump after VC updates. -DIA_DLL = { - '2013': 'msdia120.dll', - '2015': 'msdia140.dll', - '2017': 'msdia140.dll', -} def DownloadUrl(url, output_file): @@ -204,7 +198,8 @@ def RunCommand(command, msvc_arch=None, env=None, fail_hard=True): shell with the msvc tools for that architecture.""" if msvc_arch and sys.platform == 'win32': - command = GetVSVersion().SetupScript(msvc_arch) + ['&&'] + command + command = [os.path.join(GetWinSDKDir(), 'bin', 'SetEnv.cmd'), + "/" + msvc_arch, '&&'] + command # https://docs.python.org/2/library/subprocess.html: # "On Unix with shell=True [...] if args is a sequence, the first item @@ -370,31 +365,45 @@ def AddGnuWinToPath(): os.environ['PATH'] = gnuwin_dir + os.pathsep + os.environ.get('PATH', '') -vs_version = None -def GetVSVersion(): - global vs_version - if vs_version: - return vs_version +win_sdk_dir = None +dia_dll = None +def GetWinSDKDir(): + """Get the location of the current SDK. Sets dia_dll as a side-effect.""" + global win_sdk_dir + global dia_dll + if win_sdk_dir: + return win_sdk_dir + + # Bump after VC updates. + DIA_DLL = { + '2013': 'msdia120.dll', + '2015': 'msdia140.dll', + '2017': 'msdia140.dll', + } + + # Don't let vs_toolchain overwrite our environment. + environ_bak = os.environ - # Try using the toolchain in depot_tools. - # This sets environment variables used by SelectVisualStudioVersion below. sys.path.append(os.path.join(CHROMIUM_DIR, 'build')) import vs_toolchain - vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() + win_sdk_dir = vs_toolchain.SetEnvironmentAndGetSDKDir() + msvs_version = vs_toolchain.GetVisualStudioVersion() + + if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1'))): + dia_path = os.path.join(win_sdk_dir, '..', 'DIA SDK', 'bin', 'amd64') + else: + vs_path = vs_toolchain.DetectVisualStudioPath() + dia_path = os.path.join(vs_path, 'DIA SDK', 'bin', 'amd64') + + dia_dll = os.path.join(dia_path, DIA_DLL[msvs_version]) - # Use gyp to find the MSVS installation, either in depot_tools as per above, - # or a system-wide installation otherwise. - sys.path.append(os.path.join(CHROMIUM_DIR, 'tools', 'gyp', 'pylib')) - import gyp.MSVSVersion - vs_version = gyp.MSVSVersion.SelectVisualStudioVersion( - vs_toolchain.GetVisualStudioVersion()) - return vs_version + os.environ = environ_bak + return win_sdk_dir def CopyDiaDllTo(target_dir): # This script always wants to use the 64-bit msdia*.dll. - dia_path = os.path.join(GetVSVersion().Path(), 'DIA SDK', 'bin', 'amd64') - dia_dll = os.path.join(dia_path, DIA_DLL[GetVSVersion().ShortName()]) + GetWinSDKDir() CopyFile(dia_dll, target_dir) diff --git a/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp b/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp index be61238bf46..579e64747b5 100644 --- a/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp +++ b/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp @@ -29,11 +29,15 @@ #include "clang/Tooling/CompilationDatabase.h" #include "clang/Tooling/Refactoring.h" #include "clang/Tooling/Tooling.h" +#include "llvm/ADT/SmallVector.h" #include "llvm/Support/CommandLine.h" +#include "llvm/Support/FileSystem.h" #include "llvm/Support/Path.h" using clang::HeaderSearchOptions; using clang::tooling::CommonOptionsParser; +using llvm::sys::fs::real_path; +using llvm::SmallVector; using std::set; using std::stack; using std::string; @@ -203,7 +207,13 @@ string IncludeFinderPPCallbacks::DoubleSlashSystemHeaders( void IncludeFinderPPCallbacks::EndOfMainFile() { const clang::FileEntry* main_file = source_manager_->getFileEntryForID(source_manager_->getMainFileID()); - assert(*main_source_file_ == main_file->getName()); + + SmallVector main_source_file_real_path; + SmallVector main_file_name_real_path; + assert(!real_path(*main_source_file_, main_source_file_real_path)); + assert(!real_path(main_file->getName(), main_file_name_real_path)); + assert(main_source_file_real_path == main_file_name_real_path); + AddFile(main_file->getName()); } diff --git a/chromium/tools/clang/translation_unit/test_files/compile_commands.json.template b/chromium/tools/clang/translation_unit/test_files/compile_commands.json.template index f7710877fb1..9fef4cf92e6 100644 --- a/chromium/tools/clang/translation_unit/test_files/compile_commands.json.template +++ b/chromium/tools/clang/translation_unit/test_files/compile_commands.json.template @@ -8,5 +8,10 @@ "directory": "$test_files_dir", "command": "clang++ -fsyntax-only -std=c++11 --sysroot ./sysroot -c test_relative_sysroot.cc", "file": "test_relative_sysroot.cc" + }, + { + "directory": "$test_files_dir", + "command": "clang++ -I.", + "file": "includes_self.cc" } -] \ No newline at end of file +] diff --git a/chromium/tools/clang/translation_unit/test_files/includes_self.cc b/chromium/tools/clang/translation_unit/test_files/includes_self.cc new file mode 100644 index 00000000000..fbacda4b668 --- /dev/null +++ b/chromium/tools/clang/translation_unit/test_files/includes_self.cc @@ -0,0 +1,6 @@ +#ifndef GUARD +#define GUARD + +#include "includes_self.cc" + +#endif diff --git a/chromium/tools/clang/translation_unit/test_files/includes_self.cc.filepaths.expected b/chromium/tools/clang/translation_unit/test_files/includes_self.cc.filepaths.expected new file mode 100644 index 00000000000..45527860d54 --- /dev/null +++ b/chromium/tools/clang/translation_unit/test_files/includes_self.cc.filepaths.expected @@ -0,0 +1 @@ +includes_self.cc diff --git a/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected b/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected index adb9e6514e9..90693537239 100644 --- a/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected +++ b/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected @@ -1,4 +1,4 @@ -./binomial.h -./test.h //bits/wchar.h +binomial.h test.cc +test.h diff --git a/chromium/tools/clang/translation_unit/test_translation_unit.py b/chromium/tools/clang/translation_unit/test_translation_unit.py index 0482a7873f6..426aa7b65e2 100755 --- a/chromium/tools/clang/translation_unit/test_translation_unit.py +++ b/chromium/tools/clang/translation_unit/test_translation_unit.py @@ -47,7 +47,9 @@ def main(): args = ['python', os.path.join(tools_clang_scripts_directory, 'run_tool.py'), + '--tool', 'translation_unit', + '-p', test_directory_for_tool] args.extend(source_files) run_tool = subprocess.Popen(args, stdout=subprocess.PIPE) diff --git a/chromium/tools/code_coverage/test_suite.txt b/chromium/tools/code_coverage/test_suite.txt index 9e62e1738db..abe7e5788d1 100644 --- a/chromium/tools/code_coverage/test_suite.txt +++ b/chromium/tools/code_coverage/test_suite.txt @@ -23,6 +23,9 @@ compositor_unittests content_browsertests content_unittests courgette_unittests +crashpad_tests +cronet_tests +cronet_unittests crypto_unittests dbus_unittests device_unittests @@ -35,7 +38,6 @@ gcm_unit_tests gfx_unittests gin_unittests gl_unittests -gn_unittests google_apis_unittests gpu_unittests headless_browsertests @@ -43,14 +45,17 @@ headless_unittests interactive_ui_tests ipc_tests jingle_unittests -keyboard_unittests +latency_unittests leveldb_service_unittests libjingle_xmpp_unittests media_blink_unittests media_mojo_unittests media_service_unittests media_unittests +message_center_unittests midi_unittests +mojo_unittests +nacl_helper_nonsfi_unittests nacl_loader_unittests native_theme_unittests net_unittests diff --git a/chromium/tools/cygprofile/BUILD.gn b/chromium/tools/cygprofile/BUILD.gn deleted file mode 100644 index 037c31e41b7..00000000000 --- a/chromium/tools/cygprofile/BUILD.gn +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2015 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//build/config/android/config.gni") - -if (target_cpu == "arm") { - static_library("cygprofile") { - sources = [ - "delayed_dumper.cc", - "lightweight_cygprofile.cc", - "lightweight_cygprofile.h", - ] - deps = [ - "//base", - ] - } - - executable("cygprofile_perftests") { - testonly = true - - sources = [ - "lightweight_cygprofile_perftest.cc", - ] - - deps = [ - ":cygprofile", - "//base", - "//testing/gtest", - "//testing/perf", - ] - } -} diff --git a/chromium/tools/cygprofile/delayed_dumper.cc b/chromium/tools/cygprofile/delayed_dumper.cc deleted file mode 100644 index 513b6a53a3f..00000000000 --- a/chromium/tools/cygprofile/delayed_dumper.cc +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include -#include - -#include - -#include "base/android/library_loader/anchor_functions.h" -#include "base/logging.h" -#include "build/build_config.h" -#include "tools/cygprofile/lightweight_cygprofile.h" - -#if !defined(ARCH_CPU_ARMEL) -#error Only supported on ARM. -#endif // !defined(ARCH_CPU_ARMEL) - -namespace cygprofile { -namespace { - -// Disables the recording of addresses after |kDelayInSeconds| and dumps the -// result to a file. -class DelayedDumper { - public: - DelayedDumper() { - // Not using base::TimeTicks() to not call too many base:: symbol that would - // pollute the reached symbols dumps. - struct timespec ts; - if (clock_gettime(CLOCK_MONOTONIC, &ts)) - PLOG(FATAL) << "clock_gettime."; - uint64_t start_ns_since_epoch = - static_cast(ts.tv_sec) * 1000 * 1000 * 1000 + ts.tv_nsec; - int pid = getpid(); - - std::thread([pid, start_ns_since_epoch]() { - sleep(kInitialDelayInSeconds); - while (!SwitchToNextPhaseOrDump(pid, start_ns_since_epoch)) - sleep(kDelayInSeconds); - }) - .detach(); - } - - static constexpr int kDelayInSeconds = 30; - static constexpr int kInitialDelayInSeconds = - kPhases == 1 ? kDelayInSeconds : 5; -}; - -// Static initializer on purpose. Will disable instrumentation after -// |kDelayInSeconds|. -DelayedDumper g_dump_later; - -} // namespace -} // namespace cygprofile diff --git a/chromium/tools/cygprofile/lightweight_cygprofile.cc b/chromium/tools/cygprofile/lightweight_cygprofile.cc deleted file mode 100644 index f872f8661c2..00000000000 --- a/chromium/tools/cygprofile/lightweight_cygprofile.cc +++ /dev/null @@ -1,224 +0,0 @@ -// Copyright (c) 2017 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "tools/cygprofile/lightweight_cygprofile.h" - -#include -#include -#include -#include -#include - -#include "base/android/library_loader/anchor_functions.h" -#include "base/files/file.h" -#include "base/format_macros.h" -#include "base/logging.h" -#include "base/macros.h" -#include "base/strings/stringprintf.h" -#include "build/build_config.h" - -#if !defined(ARCH_CPU_ARMEL) -#error Only supported on ARM. -#endif // !defined(ARCH_CPU_ARMEL) - -// Must be applied to all functions within this file. -#define NO_INSTRUMENT_FUNCTION __attribute__((no_instrument_function)) - -namespace cygprofile { -namespace { - -// These are large overestimates, which is not an issue, as the data is -// allocated in .bss, and on linux doesn't take any actual memory when it's not -// touched. -constexpr size_t kBitfieldSize = 1 << 22; -constexpr size_t kMaxTextSizeInBytes = kBitfieldSize * (4 * 32); -constexpr size_t kMaxElements = 1 << 20; - -// Data required to log reached offsets. -struct LogData { - std::atomic offsets[kBitfieldSize]; - std::atomic ordered_offsets[kMaxElements]; - std::atomic index; -}; - -LogData g_data[kPhases]; -std::atomic g_data_index; - -// |RecordAddress()| adds an element to a concurrent bitset and to a concurrent -// append-only list of offsets. -// -// Ordering: -// Two consecutive calls to |RecordAddress()| from the same thread will be -// ordered in the same way in the result, as written by -// |StopAndDumpToFile()|. The result will contain exactly one instance of each -// unique offset relative to |kStartOfText| passed to |RecordAddress()|. -// -// Implementation: -// The "set" part is implemented with a bitfield, |g_offset|. The insertion -// order is recorded in |g_ordered_offsets|. -// This is not a class to make sure there isn't a static constructor, as it -// would cause issue with an instrumented static constructor calling this code. -// -// Limitations: -// - Only records offsets to addresses between |kStartOfText| and |kEndOfText|. -// - Capacity of the set is limited by |kMaxElements|. -// - Some insertions at the end of collection may be lost. - -// Records that |address| has been reached, if recording is enabled. -// To avoid infinite recursion, this *must* *never* call any instrumented -// function, unless |Disable()| is called first. -template -__attribute__((always_inline, no_instrument_function)) void RecordAddress( - size_t address) { - int index = g_data_index.load(std::memory_order_relaxed); - if (index >= kPhases) - return; - - const size_t start = - for_testing ? kStartOfTextForTesting : base::android::kStartOfText; - const size_t end = - for_testing ? kEndOfTextForTesting : base::android::kEndOfText; - if (UNLIKELY(address < start || address > end)) { - Disable(); - // If the start and end addresses are set incorrectly, this code path is - // likely happening during a static initializer. Logging at this time is - // prone to deadlock. By crashing immediately we at least have a chance to - // get a stack trace from the system to give some clue about the nature of - // the problem. - IMMEDIATE_CRASH(); - } - - size_t offset = address - start; - static_assert(sizeof(int) == 4, - "Collection and processing code assumes that sizeof(int) == 4"); - size_t offset_index = offset / 4; - - auto* offsets = g_data[index].offsets; - // Atomically set the corresponding bit in the array. - std::atomic* element = offsets + (offset_index / 32); - // First, a racy check. This saves a CAS if the bit is already set, and - // allows the cache line to remain shared acoss CPUs in this case. - uint32_t value = element->load(std::memory_order_relaxed); - uint32_t mask = 1 << (offset_index % 32); - if (value & mask) - return; - - auto before = element->fetch_or(mask, std::memory_order_relaxed); - if (before & mask) - return; - - // We were the first one to set the element, record it in the ordered - // elements list. - // Use relaxed ordering, as the value is not published, or used for - // synchronization. - auto* ordered_offsets = g_data[index].ordered_offsets; - auto& ordered_offsets_index = g_data[index].index; - size_t insertion_index = - ordered_offsets_index.fetch_add(1, std::memory_order_relaxed); - if (UNLIKELY(insertion_index >= kMaxElements)) { - Disable(); - LOG(FATAL) << "Too many reached offsets"; - } - ordered_offsets[insertion_index].store(offset, std::memory_order_relaxed); -} - -NO_INSTRUMENT_FUNCTION void DumpToFile(const base::FilePath& path, - const LogData& data) { - auto file = - base::File(path, base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE); - if (!file.IsValid()) { - PLOG(ERROR) << "Could not open " << path; - return; - } - - size_t count = data.index - 1; - for (size_t i = 0; i < count; i++) { - // |g_ordered_offsets| is initialized to 0, so a 0 in the middle of it - // indicates a case where the index was incremented, but the write is not - // visible in this thread yet. Safe to skip, also because the function at - // the start of text is never called. - auto offset = data.ordered_offsets[i].load(std::memory_order_relaxed); - if (!offset) - continue; - auto offset_str = base::StringPrintf("%" PRIuS "\n", offset); - file.WriteAtCurrentPos(offset_str.c_str(), - static_cast(offset_str.size())); - } -} - -// Stops recording, and outputs the data to |path|. -NO_INSTRUMENT_FUNCTION void StopAndDumpToFile(int pid, - uint64_t start_ns_since_epoch) { - Disable(); - - for (int phase = 0; phase < kPhases; phase++) { - auto path = base::StringPrintf( - "/data/local/tmp/chrome/cyglog/" - "cygprofile-instrumented-code-hitmap-%d-%" PRIu64 ".txt_%d", - pid, start_ns_since_epoch, phase); - DumpToFile(base::FilePath(path), g_data[phase]); - } -} - -} // namespace - -NO_INSTRUMENT_FUNCTION void Disable() { - g_data_index.store(kPhases, std::memory_order_relaxed); - std::atomic_thread_fence(std::memory_order_seq_cst); -} - -NO_INSTRUMENT_FUNCTION void SanityChecks() { - CHECK_LT(base::android::kEndOfText - base::android::kStartOfText, - kMaxTextSizeInBytes); - CHECK(base::android::IsOrderingSane()); -} - -NO_INSTRUMENT_FUNCTION bool SwitchToNextPhaseOrDump( - int pid, - uint64_t start_ns_since_epoch) { - int before = g_data_index.fetch_add(1, std::memory_order_relaxed); - if (before + 1 == kPhases) { - StopAndDumpToFile(pid, start_ns_since_epoch); - return true; - } - return false; -} - -NO_INSTRUMENT_FUNCTION void ResetForTesting() { - Disable(); - g_data_index = 0; - for (int i = 0; i < kPhases; i++) { - memset(reinterpret_cast(g_data[i].offsets), 0, - sizeof(uint32_t) * kBitfieldSize); - memset(reinterpret_cast(g_data[i].ordered_offsets), 0, - sizeof(uint32_t) * kMaxElements); - g_data[i].index.store(0); - } -} - -NO_INSTRUMENT_FUNCTION void RecordAddressForTesting(size_t address) { - return RecordAddress(address); -} - -NO_INSTRUMENT_FUNCTION std::vector GetOrderedOffsetsForTesting() { - std::vector result; - size_t max_index = g_data[0].index.load(std::memory_order_relaxed); - for (size_t i = 0; i < max_index; ++i) { - auto value = g_data[0].ordered_offsets[i].load(std::memory_order_relaxed); - if (value) - result.push_back(value); - } - return result; -} - -} // namespace cygprofile - -extern "C" { - -NO_INSTRUMENT_FUNCTION void __cyg_profile_func_enter_bare() { - cygprofile::RecordAddress( - reinterpret_cast(__builtin_return_address(0))); -} - -} // extern "C" diff --git a/chromium/tools/cygprofile/lightweight_cygprofile.h b/chromium/tools/cygprofile/lightweight_cygprofile.h deleted file mode 100644 index 2cabd2af685..00000000000 --- a/chromium/tools/cygprofile/lightweight_cygprofile.h +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef TOOLS_CYGPROFILE_LIGHTWEIGHT_CYGPROFILE_H_ -#define TOOLS_CYGPROFILE_LIGHTWEIGHT_CYGPROFILE_H_ - -#include -#include - -namespace cygprofile { -constexpr int kPhases = 1; -constexpr size_t kStartOfTextForTesting = 1000; -constexpr size_t kEndOfTextForTesting = kStartOfTextForTesting + 1000 * 1000; - -// Stop recording. -void Disable(); - -// CHECK()s that the offsets are correctly set up. -void SanityChecks(); - -// Switches to the next recording phase. If called from the last phase, dumps -// the data to disk, and returns |true|. |pid| is the current process pid, and -// |start_ns_since_epoch| the process start timestamp. -bool SwitchToNextPhaseOrDump(int pid, uint64_t start_ns_since_epoch); - -// Record an |address|, if recording is enabled. Only for testing. -void RecordAddressForTesting(size_t address); - -// Resets the state. Only for testing. -void ResetForTesting(); - -// Returns an ordered list of reached offsets. Only for testing. -std::vector GetOrderedOffsetsForTesting(); -} // namespace cygprofile - -#endif // TOOLS_CYGPROFILE_LIGHTWEIGHT_CYGPROFILE_H_ diff --git a/chromium/tools/cygprofile/lightweight_cygprofile_perftest.cc b/chromium/tools/cygprofile/lightweight_cygprofile_perftest.cc deleted file mode 100644 index 689dd609c93..00000000000 --- a/chromium/tools/cygprofile/lightweight_cygprofile_perftest.cc +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "tools/cygprofile/lightweight_cygprofile.h" - -#include - -#include "base/android/library_loader/anchor_functions.h" -#include "base/strings/stringprintf.h" -#include "base/time/time.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_test.h" - -namespace cygprofile { - -namespace { - -// Records |addresses_count| distinct addresses |iterations| times, in -// |threads|. -void RunBenchmark(int iterations, int addresses_count, int threads) { - auto iterate = [iterations, addresses_count]() { - for (int i = 0; i < iterations; i++) { - for (size_t addr = kStartOfTextForTesting; - addr < static_cast(addresses_count); addr += sizeof(int)) { - RecordAddressForTesting(addr); - } - } - }; - if (threads != 1) { - for (int i = 0; i < threads - 1; ++i) - std::thread(iterate).detach(); - } - auto tick = base::TimeTicks::Now(); - iterate(); - auto tock = base::TimeTicks::Now(); - double nanos = static_cast((tock - tick).InNanoseconds()); - auto ns_per_call = - nanos / (iterations * static_cast(addresses_count)); - auto modifier = - base::StringPrintf("_%d_%d_%d", iterations, addresses_count, threads); - perf_test::PrintResult("RecordAddressCostPerCall", modifier, "", ns_per_call, - "ns", true); -} - -} // namespace - -class LightweightCygprofileTest : public ::testing::Test { - protected: - void SetUp() override { ResetForTesting(); } -}; - -TEST_F(LightweightCygprofileTest, RecordOffset) { - size_t first = 1234, second = 1456; - RecordAddressForTesting(first); - RecordAddressForTesting(second); - RecordAddressForTesting(first); // No duplicates. - RecordAddressForTesting(first + 1); // 4 bytes granularity. - Disable(); - - auto reached = GetOrderedOffsetsForTesting(); - EXPECT_EQ(2UL, reached.size()); - EXPECT_EQ(first - kStartOfTextForTesting, reached[0]); - EXPECT_EQ(second - kStartOfTextForTesting, reached[1]); -} - -TEST_F(LightweightCygprofileTest, RecordingStops) { - size_t first = 1234, second = 1456, third = 1789; - RecordAddressForTesting(first); - RecordAddressForTesting(second); - Disable(); - RecordAddressForTesting(third); - - auto reached = GetOrderedOffsetsForTesting(); - ASSERT_EQ(2UL, reached.size()); - ASSERT_EQ(first - kStartOfTextForTesting, reached[0]); - ASSERT_EQ(second - kStartOfTextForTesting, reached[1]); -} - -TEST_F(LightweightCygprofileTest, OutOfBounds) { - EXPECT_DEATH(RecordAddressForTesting(kEndOfTextForTesting + 100), ""); - EXPECT_DEATH(RecordAddressForTesting(kStartOfTextForTesting - 100), ""); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_10_10000) { - RunBenchmark(10, 10000, 1); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_100_10000) { - RunBenchmark(100, 10000, 1); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_10_100000) { - RunBenchmark(10, 100000, 1); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_100_100000) { - RunBenchmark(100, 100000, 1); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_1000_100000_2) { - RunBenchmark(1000, 100000, 2); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_1000_100000_3) { - RunBenchmark(1000, 100000, 3); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_1000_100000_4) { - RunBenchmark(1000, 100000, 4); -} - -TEST(LightweightCygprofilePerfTest, RecordAddress_1000_100000_6) { - RunBenchmark(1000, 100000, 6); -} - -} // namespace cygprofile - -// Custom runner implementation since base's one requires JNI on Android. -int main(int argc, char** argv) { - testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/chromium/tools/cygprofile/orderfile_generator_backend.py b/chromium/tools/cygprofile/orderfile_generator_backend.py index 73232066887..c2a2bfd856e 100755 --- a/chromium/tools/cygprofile/orderfile_generator_backend.py +++ b/chromium/tools/cygprofile/orderfile_generator_backend.py @@ -26,6 +26,7 @@ import sys import tempfile import time +import cyglog_to_orderfile import cygprofile_utils import patch_orderfile import process_profiles @@ -429,7 +430,7 @@ class OrderfileGenerator(object): if options.profile: output_directory = os.path.join(self._instrumented_out_dir, 'Release') - host_cyglog_dir = os.path.join(output_directory, 'cyglog_data') + host_profile_dir = os.path.join(output_directory, 'profile_data') urls = [profile_android_startup.AndroidProfileTool.TEST_URL] use_wpr = True simulate_user = False @@ -437,7 +438,8 @@ class OrderfileGenerator(object): use_wpr = not options.no_wpr simulate_user = options.simulate_user self._profiler = profile_android_startup.AndroidProfileTool( - output_directory, host_cyglog_dir, use_wpr, urls, simulate_user) + output_directory, host_profile_dir, use_wpr, urls, simulate_user, + device=options.device) self._output_data = {} self._step_recorder = StepRecorder(options.buildbot) @@ -479,7 +481,7 @@ class OrderfileGenerator(object): files = self._profiler.CollectProfile( self._compiler.chrome_apk, constants.PACKAGE_INFO['chrome']) - self._step_recorder.BeginStep('Process cyglog') + self._step_recorder.BeginStep('Process profile') assert os.path.exists(self._compiler.lib_chrome_so) offsets = process_profiles.GetReachedOffsetsFromDumpFiles( files, self._compiler.lib_chrome_so) @@ -615,6 +617,9 @@ class OrderfileGenerator(object): profile_uploaded = False orderfile_uploaded = False + assert (bool(self._options.profile) ^ + bool(self._options.manual_symbol_offsets)) + if self._options.profile: try: _UnstashOutputDirectory(self._instrumented_out_dir) @@ -630,6 +635,22 @@ class OrderfileGenerator(object): finally: self._DeleteTempFiles() _StashOutputDirectory(self._instrumented_out_dir) + elif self._options.manual_symbol_offsets: + assert self._options.manual_libname + assert self._options.manual_objdir + with file(self._options.manual_symbol_offsets) as f: + symbol_offsets = [int(x) for x in f.xreadlines()] + processor = process_profiles.SymbolOffsetProcessor( + self._options.manual_libname) + generator = cyglog_to_orderfile.OffsetOrderfileGenerator( + processor, cyglog_to_orderfile.ObjectFileProcessor( + self._options.manual_objdir)) + ordered_sections = generator.GetOrderedSections(symbol_offsets) + if not ordered_sections: # Either None or empty is a problem. + raise Exception('Failed to get ordered sections') + with open(self._GetUnpatchedOrderfileFilename(), 'w') as orderfile: + orderfile.write('\n'.join(ordered_sections)) + if self._options.patch: if self._options.profile: self._RemoveBlanks(self._GetUnpatchedOrderfileFilename(), @@ -678,6 +699,9 @@ def CreateArgumentParser(): parser.add_argument( '--buildbot', action='store_true', help='If true, the script expects to be run on a buildbot') + parser.add_argument( + '--device', default=None, type=str, + help='Device serial number on which to run profiling.') parser.add_argument( '--verify', action='store_true', help='If true, the script only verifies the current orderfile') @@ -711,6 +735,18 @@ def CreateArgumentParser(): parser.add_argument( '--use-goma', action='store_true', help='Enable GOMA.', default=False) parser.add_argument('--adb-path', help='Path to the adb binary.') + + parser.add_argument('--manual-symbol-offsets', default=None, type=str, + help=('File of list of ordered symbol offsets generated ' + 'by manual profiling. Must set other --manual* ' + 'flags if this is used, and must --skip-profile.')) + parser.add_argument('--manual-libname', default=None, type=str, + help=('Library filename corresponding to ' + '--manual-symbol-offsets.')) + parser.add_argument('--manual-objdir', default=None, type=str, + help=('Root of object file directory corresponding to ' + '--manual-symbol-offsets.')) + profile_android_startup.AddProfileCollectionArguments(parser) return parser diff --git a/chromium/tools/cygprofile/profile_android_startup.py b/chromium/tools/cygprofile/profile_android_startup.py index 47ea35fda7b..57499a41dc8 100755 --- a/chromium/tools/cygprofile/profile_android_startup.py +++ b/chromium/tools/cygprofile/profile_android_startup.py @@ -38,11 +38,11 @@ from telemetry.internal.util import webpagereplay_go_server from telemetry.internal.util import binary_manager -class NoCyglogDataError(Exception): - """An error used to indicate that no cyglog data was collected.""" +class NoProfileDataError(Exception): + """An error used to indicate that no profile data was collected.""" def __init__(self, value): - super(NoCyglogDataError, self).__init__() + super(NoProfileDataError, self).__init__() self.value = value def __str__(self): @@ -174,35 +174,43 @@ class WprManager(object): class AndroidProfileTool(object): - """A utility for generating cygprofile data for chrome on andorid. + """A utility for generating orderfile profile data for chrome on android. Runs cygprofile_unittest found in output_directory, does profiling runs, - and pulls the data to the local machine in output_directory/cyglog_data. + and pulls the data to the local machine in output_directory/profile_data. """ - _DEVICE_CYGLOG_DIR = '/data/local/tmp/chrome/cyglog' + _DEVICE_PROFILE_DIR = '/data/local/tmp/chrome/orderfile' + + # Old profile data directories that used to be used. These are cleaned up in + # order to keep devices tidy. + _LEGACY_PROFILE_DIRS = ['/data/local/tmp/chrome/cyglog'] TEST_URL = 'https://www.google.com/#hl=en&q=science' _WPR_ARCHIVE = os.path.join( os.path.dirname(__file__), 'memory_top_10_mobile_000.wprgo') - def __init__(self, output_directory, host_cyglog_dir, use_wpr, urls, - simulate_user): + def __init__(self, output_directory, host_profile_dir, use_wpr, urls, + simulate_user, device=None): """Constructor. Args: output_directory: (str) Chrome build directory. - host_cyglog_dir: (str) Where to store the profiles. + host_profile_dir: (str) Where to store the profiles on the host. use_wpr: (bool) Whether to use Web Page Replay. urls: (str) URLs to load. Have to be contained in the WPR archive if use_wpr is True. simulate_user: (bool) Whether to simulate a user. """ - devices = device_utils.DeviceUtils.HealthyDevices() - self._device = devices[0] + if device is None: + devices = device_utils.DeviceUtils.HealthyDevices() + assert len(devices) == 1, 'Expected exactly one connected device' + self._device = devices[0] + else: + self._device = device_utils.DeviceUtils(device) self._cygprofile_tests = os.path.join( output_directory, 'cygprofile_unittests') - self._host_cyglog_dir = host_cyglog_dir + self._host_profile_dir = host_profile_dir self._use_wpr = use_wpr self._urls = urls self._simulate_user = simulate_user @@ -238,7 +246,7 @@ class AndroidProfileTool(object): A list of cygprofile data files. Raises: - NoCyglogDataError: No data was found on the device. + NoProfileDataError: No data was found on the device. """ self._Install(apk) try: @@ -253,7 +261,7 @@ class AndroidProfileTool(object): finally: self._RestoreChromeFlags(changer) - data = self._PullCyglogData() + data = self._PullProfileData() self._DeleteDeviceData() return data @@ -308,7 +316,7 @@ class AndroidProfileTool(object): """When profiling, files are output to the disk by every process. This means running without sandboxing enabled. """ - # We need to have adb root in order to pull cyglog data + # We need to have adb root in order to pull profile data try: print 'Enabling root...' self._device.EnableRoot() @@ -334,18 +342,18 @@ class AndroidProfileTool(object): changer.Restore() def _SetUpDeviceFolders(self): - """Creates folders on the device to store cyglog data.""" + """Creates folders on the device to store profile data.""" print 'Setting up device folders...' self._DeleteDeviceData() - self._device.RunShellCommand( - ['mkdir', '-p', str(self._DEVICE_CYGLOG_DIR)], - check_return=True) + self._device.RunShellCommand(['mkdir', '-p', self._DEVICE_PROFILE_DIR], + check_return=True) def _DeleteDeviceData(self): - """Clears out cyglog storage locations on the device. """ - self._device.RunShellCommand( - ['rm', '-rf', str(self._DEVICE_CYGLOG_DIR)], - check_return=True) + """Clears out profile storage locations on the device. """ + for profile_dir in [self._DEVICE_PROFILE_DIR] + self._LEGACY_PROFILE_DIRS: + self._device.RunShellCommand( + ['rm', '-rf', str(profile_dir)], + check_return=True) def _StartChrome(self, package_info, url): print 'Launching chrome...' @@ -360,43 +368,44 @@ class AndroidProfileTool(object): self._device.KillAll(package_info.package) def _DeleteHostData(self): - """Clears out cyglog storage locations on the host.""" - shutil.rmtree(self._host_cyglog_dir, ignore_errors=True) + """Clears out profile storage locations on the host.""" + shutil.rmtree(self._host_profile_dir, ignore_errors=True) def _SetUpHostFolders(self): self._DeleteHostData() - os.mkdir(self._host_cyglog_dir) + os.mkdir(self._host_profile_dir) - def _PullCyglogData(self): - """Pulls the cyglog data off of the device. + def _PullProfileData(self): + """Pulls the profile data off of the device. Returns: - A list of cyglog data files which were pulled. + A list of profile data files which were pulled. Raises: - NoCyglogDataError: No data was found on the device. + NoProfileDataError: No data was found on the device. """ - print 'Pulling cyglog data...' + print 'Pulling profile data...' self._SetUpHostFolders() - self._device.PullFile(self._DEVICE_CYGLOG_DIR, self._host_cyglog_dir) - files = os.listdir(self._host_cyglog_dir) - - if len(files) == 0: - raise NoCyglogDataError('No cyglog data was collected') + self._device.PullFile(self._DEVICE_PROFILE_DIR, self._host_profile_dir) # Temporary workaround/investigation: if (for unknown reason) 'adb pull' of - # the directory 'cyglog' into '.../Release/cyglog_data' produces - # '...cyglog_data/cyglog/files' instead of the usual '...cyglog_data/files', - # list the files deeper in the tree. - cyglog_dir = self._host_cyglog_dir - if (len(files) == 1) and (files[0] == 'cyglog'): - cyglog_dir = os.path.join(self._host_cyglog_dir, 'cyglog') - files = os.listdir(cyglog_dir) + # the directory 'orderfile' '.../Release/profile_data' produces + # '...profile_data/orderfile/files' instead of the usual + # '...profile_data/files', list the files deeper in the tree. + files = [] + redundant_dir_root = os.path.basename(self._DEVICE_PROFILE_DIR) + for root_file in os.listdir(self._host_profile_dir): + if root_file == redundant_dir_root: + profile_dir = os.path.join(self._host_profile_dir, root_file) + files.extend(os.path.join(profile_dir, f) + for f in os.listdir(profile_dir)) + else: + files.append(root_file) if len(files) == 0: - raise NoCyglogDataError('No cyglog data was collected') + raise NoProfileDataError('No profile data was collected') - return [os.path.join(cyglog_dir, x) for x in files] + return [os.path.join(profile_dir, x) for x in files] def AddProfileCollectionArguments(parser): @@ -424,8 +433,8 @@ def CreateArgumentParser(): help='Chromium output directory (e.g. out/Release)') parser.add_argument( '--trace-directory', type=os.path.realpath, - help='Directory in which cyglog traces will be stored. ' - 'Defaults to /cyglog_data') + help='Directory in which profile traces will be stored. ' + 'Defaults to /profile_data') AddProfileCollectionArguments(parser) return parser @@ -446,8 +455,11 @@ def main(): else: raise Exception('Unable to determine package info for %s' % args.apk_path) + trace_directory = args.trace_directory + if not trace_directory: + trace_directory = os.path.join(args.output_directory, 'profile_data') profiler = AndroidProfileTool( - args.output_directory, host_cyglog_dir=args.trace_directory, + args.output_directory, host_profile_dir=trace_directory, use_wpr=not args.no_wpr, urls=args.urls, simulate_user=args.simulate_user) profiler.CollectProfile(args.apk_path, package_info) return 0 diff --git a/chromium/tools/determinism/compare_build_artifacts.py b/chromium/tools/determinism/compare_build_artifacts.py index 62dae004beb..6a8282593a2 100755 --- a/chromium/tools/determinism/compare_build_artifacts.py +++ b/chromium/tools/determinism/compare_build_artifacts.py @@ -164,7 +164,7 @@ def compare_files(first_filepath, second_filepath): return diff_binary(first_filepath, second_filepath, file_len) -def get_deps(build_dir, target): +def get_deps(ninja_path, build_dir, target): """Returns list of object files needed to build target.""" NODE_PATTERN = re.compile(r'label="([a-zA-Z0-9_\\/.-]+)"') CHECK_EXTS = ('.o', '.obj') @@ -181,7 +181,7 @@ def get_deps(build_dir, target): shutil.move(build_dir, fixed_build_dir) try: - out = subprocess.check_output(['ninja', '-C', fixed_build_dir, + out = subprocess.check_output([ninja_path, '-C', fixed_build_dir, '-t', 'graph', target]) except subprocess.CalledProcessError as e: print >> sys.stderr, 'error to get graph for %s: %s' % (target, e) @@ -207,12 +207,12 @@ def get_deps(build_dir, target): return files -def compare_deps(first_dir, second_dir, targets): +def compare_deps(first_dir, second_dir, ninja_path, targets): """Print difference of dependent files.""" diffs = set() for target in targets: - first_deps = get_deps(first_dir, target) - second_deps = get_deps(second_dir, target) + first_deps = get_deps(ninja_path, first_dir, target) + second_deps = get_deps(ninja_path, second_dir, target) print 'Checking %s difference: (%s deps)' % (target, len(first_deps)) if set(first_deps) != set(second_deps): # Since we do not thiks this case occur, we do not do anything special @@ -231,7 +231,7 @@ def compare_deps(first_dir, second_dir, targets): return list(diffs) -def compare_build_artifacts(first_dir, second_dir, target_platform, +def compare_build_artifacts(first_dir, second_dir, ninja_path, target_platform, json_output, recursive=False): """Compares the artifacts from two distinct builds.""" if not os.path.isdir(first_dir): @@ -300,7 +300,8 @@ def compare_build_artifacts(first_dir, second_dir, target_platform, all_diffs = expected_diffs + unexpected_diffs diffs_to_investigate = sorted(set(all_diffs).difference(missing_files)) - deps_diff = compare_deps(first_dir, second_dir, diffs_to_investigate) + deps_diff = compare_deps(first_dir, second_dir, + ninja_path, diffs_to_investigate) if json_output: try: @@ -326,6 +327,8 @@ def main(): parser.add_option('-r', '--recursive', action='store_true', default=False, help='Indicates if the comparison should be recursive.') parser.add_option('--json-output', help='JSON file to output differences') + parser.add_option('--ninja-path', help='path to ninja command.', + default='ninja') target = { 'darwin': 'mac', 'linux2': 'linux', 'win32': 'win' }.get(sys.platform, sys.platform) @@ -342,6 +345,7 @@ def main(): return compare_build_artifacts(os.path.abspath(options.first_build_dir), os.path.abspath(options.second_build_dir), + options.ninja_path, options.target_platform, options.json_output, options.recursive) diff --git a/chromium/tools/determinism/deterministic_build_whitelist.pyl b/chromium/tools/determinism/deterministic_build_whitelist.pyl index 3be370171ec..ce6a518b69e 100644 --- a/chromium/tools/determinism/deterministic_build_whitelist.pyl +++ b/chromium/tools/determinism/deterministic_build_whitelist.pyl @@ -164,7 +164,6 @@ 'angle_unittests.exe', 'app_driver_library.dll', 'app_list_demo.exe', - 'app_list_presenter_unittests.exe', 'app_list_unittests.exe', 'app_shell.exe', 'app_shell_unittests.exe', diff --git a/chromium/tools/dump_process_memory/BUILD.gn b/chromium/tools/dump_process_memory/BUILD.gn new file mode 100644 index 00000000000..4c1a2419391 --- /dev/null +++ b/chromium/tools/dump_process_memory/BUILD.gn @@ -0,0 +1,18 @@ +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +executable("dump_process") { + sources = [ + "dump_process.cc", + ] + deps = [ + "//base", + ] +} + +group("all") { + deps = [ + ":dump_process", + ] +} diff --git a/chromium/tools/dump_process_memory/OWNERS b/chromium/tools/dump_process_memory/OWNERS new file mode 100644 index 00000000000..ef8590f575f --- /dev/null +++ b/chromium/tools/dump_process_memory/OWNERS @@ -0,0 +1,3 @@ +lizeb@chromium.org +pasko@chromium.org + diff --git a/chromium/tools/dump_process_memory/analyze_dumps.py b/chromium/tools/dump_process_memory/analyze_dumps.py new file mode 100755 index 00000000000..666e9da413f --- /dev/null +++ b/chromium/tools/dump_process_memory/analyze_dumps.py @@ -0,0 +1,253 @@ +#!/usr/bin/python +# +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""From a dump generated by dump_process.cc dump, prints statistics about +its content. +""" + +import array +import collections +import hashlib +import logging +import os +import struct +import sys +import zlib + + +PAGE_SIZE = 1 << 12 + + +def _ReadPage(f): + """Reads a page of data from a file. + + Args: + f: (file) An opened file to read from. + + Returns: + An array.array() of unsigned int with the page content. + """ + result = array.array('I') + result.fromfile(f, PAGE_SIZE / result.itemsize) + return result + + +def _PrettyPrintSize(x): + """Pretty print sizes in bytes, e.g. 123456 -> 123.45kB. + + Args: + x: (int) size + + Returns: + (str) Pretty printed version, 2 decimal places. + """ + if x < 1e3: + return str(x) + elif 1e3 <= x < 1e6: + return '%.2fkB' % (x / 1e3) + elif 1e6 <= x < 1e9: + return '%.2fMB' % (x / 1e6) + else: + return '%.2fGB' % (x / 1e9) + + +class MappingStats(object): + """Statistics about a mapping, from a dump. + + Slots: + filename: (str) Dump filename. + start: (int) Start address of the mapping. + end: (int) End address of the mapping. + pages: (int) Sizs of the mapping in pages. + is_zero: ([bool]) For each page, whether it's a zero page. + is_present: ([bool]) For each page, whether it's present. + is_swapped: ([bool]) For each page, whether it has been swapped out. + compressed_size: ([int]) If a page is not zero, its compressed size. + hashes: ([str]) If a page is not zero, its SHA1 hash. + """ + __slots__ = ('filename', 'start', 'end', 'pages', 'is_zero', 'is_present', + 'is_swapped', 'compressed_size', 'hashes', 'freed') + def __init__(self, filename, start, end): + """Init. + + Args: + filename: (str) Dump filename. + start: (int) Start address. + end: (int) End address + """ + self.filename = filename + self.start = start + self.end = end + self.pages = (end - start) / PAGE_SIZE + self.is_zero = [False for i in range(self.pages)] + self.is_present = [False for i in range(self.pages)] + self.is_swapped = [False for i in range(self.pages)] + self.compressed_size = [0 for i in range(self.pages)] + self.hashes = [None for i in range(self.pages)] + self.freed = 0 + + +def _GetStatsFromFileDump(filename): + """Computes per-dump statistics. + + Args: + filename: (str) Path to the dump. + + Returns: + MappingStats for the mapping. + """ + # These are typically only populated with DCHECK() on. + FREED_PATTERNS = (0xcccccccc, # V8 + 0xcdcdcdcd, # PartitionAlloc "zapped" + 0xabababab, # PartitionAlloc "uninitialized" + 0xdeadbeef, # V8 "zapped" + 0x0baddeaf, # V8 zapped handles + 0x0baffedf, # V8 zapped global handles + 0x0beefdaf, # V8 zapped from space + 0xbeefdeef, # V8 zapped slots + 0xbadbaddb, # V8 debug zapped + 0xfeed1eaf) # V8 zapped freelist + # Dump integrity checks. + metadata_filename = filename + '.metadata' + pid_start_end = os.path.basename(filename)[:-len('.dump')] + (_, start, end) = [int(x, 10) for x in pid_start_end.split('-')] + file_stat = os.stat(filename) + assert start % PAGE_SIZE == 0 + assert end % PAGE_SIZE == 0 + assert file_stat.st_size == (end - start) + metadata_file_stat = os.stat(metadata_filename) + result = MappingStats(filename, start, end) + # each line is [01]{2}\n, eg '10\n', 1 line per page. + assert metadata_file_stat.st_size == 3 * result.pages + + with open(filename, 'r') as f, open(metadata_filename, 'r') as metadata_f: + for i in range(result.pages): + page = _ReadPage(f) + assert len(page) == 1024 + result.freed += 4 * sum(x in FREED_PATTERNS for x in page) + is_zero = max(page) == 0 + present, swapped = (bool(int(x)) for x in metadata_f.readline().strip()) + # Not present, not swapped private anonymous == lazily initialized zero + # page. + if not present and not swapped: + assert is_zero + result.is_zero[i] = is_zero + result.is_present[i] = present + result.is_swapped[i] = swapped + if not is_zero: + sha1 = hashlib.sha1() + sha1.update(page) + page_hash = sha1.digest() + result.hashes[i] = page_hash + compressed = zlib.compress(page, 1) + result.compressed_size[i] = len(compressed) + return result + + +def _FindPageFromHash(mappings, page_hash): + """Returns a page with a given hash from a list of mappings. + + Args: + mappings: ([MappingStats]) List of mappings. + page_hash: (str) Page hash to look for, + + Returns: + array.array(uint32_t) with the page content + """ + for mapping in mappings: + for i in range(mapping.pages): + if mapping.hashes[i] == page_hash: + with open(mapping.filename, 'r') as f: + f.seek(i * PAGE_SIZE) + page = _ReadPage(f) + sha1 = hashlib.sha1() + sha1.update(page) + assert page_hash == sha1.digest() + return page + + +def _PrintPage(page): + """Prints the content of a page.""" + for i, x in enumerate(page): + print '{:08x}'.format(x), + if i % 16 == 15: + print + + +def PrintStats(dumps): + """Logs statistics about a process mappings dump. + + Args: + dumps: ([str]) List of dumps. + """ + dump_stats = [_GetStatsFromFileDump(filename) for filename in dumps] + content_to_count = collections.defaultdict(int) + total_pages = sum(stats.pages for stats in dump_stats) + total_zero_pages = sum(sum(stats.is_zero) for stats in dump_stats) + total_compressed_size = sum(sum(stats.compressed_size) + for stats in dump_stats) + total_swapped_pages = sum(sum(stats.is_swapped) for stats in dump_stats) + total_not_present_pages = sum(stats.pages - sum(stats.is_present) + for stats in dump_stats) + total_present_zero_pages = sum( + sum(x == (True, True) for x in zip(stats.is_zero, stats.is_present)) + for stats in dump_stats) + total_freed_space = sum(stats.freed for stats in dump_stats) + + content_to_count = collections.defaultdict(int) + for stats in dump_stats: + for page_hash in stats.hashes: + if page_hash: + content_to_count[page_hash] += 1 + + print 'Total pages = %d (%s)' % (total_pages, + _PrettyPrintSize(total_pages * PAGE_SIZE)) + print 'Total zero pages = %d (%.02f%%)' % ( + total_zero_pages, (100. * total_zero_pages) / total_pages) + print 'Total present zero pages = %d (%s)' % ( + total_present_zero_pages, + _PrettyPrintSize(total_present_zero_pages * PAGE_SIZE)) + total_size_non_zero_pages = (total_pages - total_zero_pages) * PAGE_SIZE + print 'Total size of non-zero pages = %d (%s)' % ( + total_size_non_zero_pages, _PrettyPrintSize(total_size_non_zero_pages)) + print 'Total compressed size = %d (%.02f%%)' % ( + total_compressed_size, + (100. * total_compressed_size) / total_size_non_zero_pages) + duplicated_pages = sum(x - 1 for x in content_to_count.values()) + print 'Duplicated non-zero pages = %d' % duplicated_pages + count_and_hashes = sorted(((v, k) for k, v in content_to_count.items()), + reverse=True) + max_common_pages = count_and_hashes[0][0] - 1 + print 'Max non-zero pages with the same content = %d' % max_common_pages + print 'Swapped pages = %d (%s)' % ( + total_swapped_pages, _PrettyPrintSize(total_swapped_pages * PAGE_SIZE)) + print 'Non-present pages = %d (%s)' % ( + total_not_present_pages, + _PrettyPrintSize(total_not_present_pages * PAGE_SIZE)) + print 'Freed = %d (%s)' % ( + total_freed_space, _PrettyPrintSize(total_freed_space)) + print 'Top Duplicated Pages:' + for i in range(10): + count, page_hash = count_and_hashes[i] + print '%d common pages' % count + page = _FindPageFromHash(dump_stats, page_hash) + _PrintPage(page) + print + + +def main(): + logging.basicConfig(level=logging.INFO) + if len(sys.argv) != 2: + logging.error('Usage: %s ', sys.argv[0]) + sys.exit(1) + directory = sys.argv[1] + dumps = [os.path.join(directory, f) for f in os.listdir(directory) + if f.endswith('.dump')] + PrintStats(dumps) + + +if __name__ == '__main__': + main() diff --git a/chromium/tools/dump_process_memory/dump_process.cc b/chromium/tools/dump_process_memory/dump_process.cc new file mode 100644 index 00000000000..de6384be76f --- /dev/null +++ b/chromium/tools/dump_process_memory/dump_process.cc @@ -0,0 +1,259 @@ +// Copyright 2018 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include +#include +#include +#include + +#include +#include +#include + +#include "base/debug/proc_maps_linux.h" +#include "base/files/file.h" +#include "base/files/file_path.h" +#include "base/files/file_util.h" +#include "base/format_macros.h" +#include "base/logging.h" +#include "base/posix/eintr_wrapper.h" +#include "base/strings/string_number_conversions.h" +#include "base/strings/string_util.h" +#include "base/strings/stringprintf.h" + +namespace { + +using base::debug::MappedMemoryRegion; +constexpr size_t kPageSize = 1 << 12; + +// See https://www.kernel.org/doc/Documentation/vm/pagemap.txt. +struct PageMapEntry { + uint64_t pfn_or_swap : 55; + uint64_t soft_dirty : 1; + uint64_t exclusively_mapped : 1; + uint64_t unused : 4; + uint64_t file_mapped_or_shared_anon : 1; + uint64_t swapped : 1; + uint64_t present : 1; +}; +static_assert(sizeof(PageMapEntry) == sizeof(uint64_t), "Wrong bitfield size"); + +// Calls ptrace() on a process, and detaches in the destructor. +class ScopedPtracer { + public: + ScopedPtracer(pid_t pid) : pid_(pid), is_attached_(false) { + // ptrace() delivers a SIGSTOP signal to one thread in the target process, + // unless it is already stopped. Since we want to stop the whole process, + // send a signal to every thread in the process group. + pid_t process_group_id = getpgid(pid); + if (killpg(process_group_id, SIGSTOP)) { + PLOG(ERROR) << "Cannot stop the process group of " << pid; + return; + } + + if (ptrace(PTRACE_ATTACH, pid, nullptr, nullptr)) { + PLOG(ERROR) << "Unable to attach to " << pid; + return; + } + // ptrace(PTRACE_ATTACH) sends a SISTOP signal to the process, need to wait + // for it. + int status; + pid_t ret = HANDLE_EINTR(waitpid(pid, &status, 0)); + if (ret != pid) { + PLOG(ERROR) << "Waiting for the process failed"; + return; + } + if (!WIFSTOPPED(status)) { + LOG(ERROR) << "The process is not stopped"; + ptrace(PTRACE_DETACH, pid, 0, 0); + return; + } + is_attached_ = true; + } + + ~ScopedPtracer() { + if (!is_attached_) + return; + if (ptrace(PTRACE_DETACH, pid_, 0, 0)) { + PLOG(ERROR) << "Cannot detach from " << pid_; + } + pid_t process_group_id = getpgid(pid_); + if (killpg(process_group_id, SIGCONT)) { + PLOG(ERROR) << "Cannot resume the process " << pid_; + return; + } + } + + bool IsAttached() const { return is_attached_; } + + private: + pid_t pid_; + bool is_attached_; +}; + +bool ParseProcMaps(pid_t pid, std::vector* regions) { + std::string path = base::StringPrintf("/proc/%d/maps", pid); + std::string proc_maps; + bool ok = base::ReadFileToString(base::FilePath(path), &proc_maps); + if (!ok) { + LOG(ERROR) << "Cannot read " << path; + return false; + } + ok = base::debug::ParseProcMaps(proc_maps, regions); + if (!ok) { + LOG(ERROR) << "Cannot parse " << path; + return false; + } + return true; +} + +// Keep anonynmous rw-p regions. +bool ShouldDump(const MappedMemoryRegion& region) { + const auto rw_p = MappedMemoryRegion::READ | MappedMemoryRegion::WRITE | + MappedMemoryRegion::PRIVATE; + if (region.permissions != rw_p) + return false; + if (base::StartsWith(region.path, "/", base::CompareCase::SENSITIVE) || + base::StartsWith(region.path, "[stack]", base::CompareCase::SENSITIVE)) { + return false; + } + return true; +} + +base::File OpenProcPidFile(const char* filename, pid_t pid) { + std::string path = base::StringPrintf("/proc/%d/%s", pid, filename); + auto file = base::File(base::FilePath(path), + base::File::FLAG_OPEN | base::File::FLAG_READ); + if (!file.IsValid()) { + PLOG(ERROR) << "Cannot open " << path; + } + return file; +} + +bool DumpRegion(const MappedMemoryRegion& region, + pid_t pid, + base::File* proc_mem, + base::File* proc_pagemap) { + size_t size_in_pages = (region.end - region.start) / kPageSize; + std::string output_path = base::StringPrintf("%d-%" PRIuS "-%" PRIuS ".dump", + pid, region.start, region.end); + base::File output_file(base::FilePath(output_path), + base::File::FLAG_WRITE | base::File::FLAG_CREATE); + if (!output_file.IsValid()) { + PLOG(ERROR) << "Cannot open " << output_path; + return false; + } + std::string metadata_path = output_path + std::string(".metadata"); + base::File metadata_file(base::FilePath(metadata_path), + base::File::FLAG_WRITE | base::File::FLAG_CREATE); + if (!metadata_file.IsValid()) { + PLOG(ERROR) << "Cannot open " << metadata_path; + return false; + } + + // Dump metadata. + // Important: Metadata must be dumped before the data, as reading from + // /proc/pid/mem will move the data back from swap, so dumping metadata + // later would not show anything in swap. + // This also means that dumping the same process twice will result in + // inaccurate metadata. + for (size_t i = 0; i < size_in_pages; ++i) { + // See https://www.kernel.org/doc/Documentation/vm/pagemap.txt + // 64 bits per page. + int64_t pagemap_offset = + ((region.start / kPageSize) + i) * sizeof(PageMapEntry); + PageMapEntry entry; + proc_pagemap->Seek(base::File::FROM_BEGIN, pagemap_offset); + int size_read = proc_pagemap->ReadAtCurrentPos( + reinterpret_cast(&entry), sizeof(PageMapEntry)); + if (size_read != sizeof(PageMapEntry)) { + PLOG(ERROR) << "Cannot read from /proc/pid/pagemap at offset " + << pagemap_offset; + return false; + } + std::string metadata = base::StringPrintf( + "%c%c\n", entry.present ? '1' : '0', entry.swapped ? '1' : '0'); + metadata_file.WriteAtCurrentPos(metadata.c_str(), metadata.size()); + } + + // Writing data page by page to avoid allocating too much memory. + std::vector buffer(kPageSize); + for (size_t i = 0; i < size_in_pages; ++i) { + uint64_t address = region.start + i * kPageSize; + // Works because the upper half of the address space is reserved for the + // kernel on at least ARM64 and x86_64 bit architectures. + CHECK(address <= std::numeric_limits::max()); + proc_mem->Seek(base::File::FROM_BEGIN, static_cast(address)); + int size_read = proc_mem->ReadAtCurrentPos(&buffer[0], kPageSize); + if (size_read != kPageSize) { + PLOG(ERROR) << "Cannot read from /proc/pid/mem at offset " << address; + return false; + } + + int64_t output_offset = i * kPageSize; + int size_written = output_file.Write(output_offset, &buffer[0], kPageSize); + if (size_written != kPageSize) { + PLOG(ERROR) << "Cannot write to output file"; + return false; + } + } + + return true; +} + +// Dumps the content of all the anonymous rw-p mappings in a given process to +// disk. +bool DumpMappings(pid_t pid) { + LOG(INFO) << "Attaching to " << pid; + ScopedPtracer tracer(pid); + if (!tracer.IsAttached()) + return false; + + LOG(INFO) << "Reading /proc/pid/maps"; + std::vector regions; + bool ok = ParseProcMaps(pid, ®ions); + if (!ok) + return false; + + base::File proc_mem = OpenProcPidFile("mem", pid); + if (!proc_mem.IsValid()) + return false; + base::File proc_pagemap = OpenProcPidFile("pagemap", pid); + if (!proc_pagemap.IsValid()) + return false; + + for (const auto& region : regions) { + if (!ShouldDump(region)) + continue; + std::string message = + base::StringPrintf("%" PRIuS "-%" PRIuS " (size %" PRIuS ")", + region.start, region.end, region.end - region.start); + LOG(INFO) << "Dumping " << message; + ok = DumpRegion(region, pid, &proc_mem, &proc_pagemap); + if (!ok) { + LOG(WARNING) << "Failed to dump region"; + } + } + return true; +} + +} // namespace + +int main(int argc, char** argv) { + CHECK(sysconf(_SC_PAGESIZE) == kPageSize); + + if (argc != 2) { + LOG(ERROR) << "Usage: " << argv[0] << " "; + return 1; + } + pid_t pid; + bool ok = base::StringToInt(argv[1], &pid); + if (!ok) { + LOG(ERROR) << "Cannot parse PID"; + return 1; + } + + ok = DumpMappings(pid); + return ok ? 0 : 1; +} diff --git a/chromium/tools/emacs/trybot-mac.txt b/chromium/tools/emacs/trybot-mac.txt index d12efd5f1ca..7ce02be6617 100644 --- a/chromium/tools/emacs/trybot-mac.txt +++ b/chromium/tools/emacs/trybot-mac.txt @@ -1591,7 +1591,7 @@ Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-norm setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket setenv LANG en_US.US-ASCII setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin" - /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_host_manager.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host_manager.o + /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_host_manager.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host_manager.o Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/blob_dispatcher_host.o browser/renderer_host/blob_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2 cd /b/build/slave/mac/build/src/chrome @@ -1613,7 +1613,7 @@ Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-norm setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO setenv INCLUDE_SERVER_PID 983 setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket - setenv LANG en_US.US-ASCII + setenv LANG en_US.US-ASCII setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin" /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x objective-c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/tab_contents_view_mac.mm -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/tab_contents_view_mac.o @@ -1885,7 +1885,7 @@ Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-nor setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket setenv LANG en_US.US-ASCII setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin" - /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/device_orientation_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/device_orientation_dispatcher.o + /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/device_orientation_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/device_orientation_dispatcher.o Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/audio_message_filter.o renderer/audio_message_filter.cc normal i386 c++ com.apple.compilers.gcc.4_2 cd /b/build/slave/mac/build/src/chrome diff --git a/chromium/tools/fuchsia/comparative_tester/comparative_tester.py b/chromium/tools/fuchsia/comparative_tester/comparative_tester.py index d42f3391fc4..09bd203c308 100755 --- a/chromium/tools/fuchsia/comparative_tester/comparative_tester.py +++ b/chromium/tools/fuchsia/comparative_tester/comparative_tester.py @@ -7,18 +7,24 @@ # Fuchsia devices and then compares their output to each other, extracting the # relevant performance data from the output of gtest. +import argparse +import logging import os import re import subprocess import sys -from typing import * +import time + +from collections import defaultdict +from typing import Tuple, Dict, List import target_spec +import test_results -def RunCommand(command: List[str], msg: str, - ignore_errors: bool = False) -> str: +def RunCommand(command: List[str], msg: str) -> str: "One-shot start and complete command with useful default kwargs" + command = [piece for piece in command if piece != ""] proc = subprocess.run( command, stdout=subprocess.PIPE, @@ -29,70 +35,76 @@ def RunCommand(command: List[str], msg: str, if proc.returncode != 0: sys.stderr.write("{}\nreturn code: {}\nstdout: {}\nstderr: {}".format( msg, proc.returncode, out, err)) - if not ignore_errors: - raise subprocess.SubprocessError( - "Command failed to complete successfully. {}".format(command)) + raise subprocess.SubprocessError( + "Command failed to complete successfully. {}".format(command)) return out +# TODO(crbug.com/848465): replace with --test-launcher-filter-file directly +def ParseFilterFile(filepath: str) -> str: + positive_filters = [] + negative_filters = [] + with open(filepath, "r") as file: + for line in file: + # Only take the part of a line before a # sign + line = line.split("#", 1)[0].strip() + if line == "": + continue + elif line.startswith("-"): + negative_filters.append(line[1:]) + else: + positive_filters.append(line) + + return "--gtest_filter={}-{}".format(":".join(positive_filters), + ":".join(negative_filters)) + + class TestTarget(object): """TestTarget encapsulates a single BUILD.gn target, extracts a name from the target string, and manages the building and running of the target for both Linux and Fuchsia. """ - def __init__(self, target: str, filters: str = "") -> None: - self.Target = target - self.Name = target.split(":")[-1] - if filters != "": - self.FilterFlag = "--gtest_filter='" + filters + "'" + def __init__(self, target: str) -> None: + self._target = target + self._name = target.split(":")[-1] + self._filter_file = "testing/buildbot/filters/fuchsia.{}.filter".format( + self._name) + if not os.path.isfile(self._filter_file): + self._filter_flag = "" + self._filter_file = "" else: - self.FilterFlag = "" + self._filter_flag = ParseFilterFile(self._filter_file) def ExecFuchsia(self, out_dir: str, run_locally: bool) -> str: - runner_name = "{}/bin/run_{}".format(out_dir, self.Name) - command = [runner_name, self.FilterFlag] + runner_name = "{}/bin/run_{}".format(out_dir, self._name) + command = [runner_name, self._filter_flag, "--exclude-system-logs"] if not run_locally: command.append("-d") - - # TODO(stephanstross): Remove this when fuchsia logging fix lands - command.extend([ - "--test-launcher-summary-output", "/tmp/fuchsia.json", "--", - "--gtest_output=json:/data/test_summary.json" - ]) - return RunCommand( - command, - "Test {} failed on fuchsia!".format(self.Target), - ignore_errors=True) + return RunCommand(command, + "Test {} failed on fuchsia!".format(self._target)) def ExecLinux(self, out_dir: str, run_locally: bool) -> str: - local_path = "{}/{}".format(out_dir, self.Name) - command = [] + command = [] # type: List[str] + user = target_spec.linux_device_user + ip = target_spec.linux_device_ip + host_machine = "{0}@{1}".format(user, ip) if not run_locally: - user = target_spec.linux_device_user - ip = target_spec.linux_device_hostname - host_machine = "{0}@{1}".format(user, ip) # Next is the transfer of all the directories to the destination device. self.TransferDependencies(out_dir, host_machine) command = [ - "ssh", "{}@{}".format(user, ip), "xvfb-run -a {1}/{0}/{1} {2}".format( - out_dir, self.Name, self.FilterFlag) + "ssh", "{}@{}".format(user, ip), "{1}/{0}/{1} -- {2}".format( + out_dir, self._name, self._filter_flag) ] else: - command = [local_path, self.FilterFlag] - result = RunCommand( - command, - "Test {} failed on linux!".format(self.Target), - ignore_errors=True) - # Clean up the copy of the test files on the host after execution - RunCommand(["rm", "-rf", self.Name], - "Failed to remove host directory for {}".format(self.Target)) - return result + local_path = "{}/{}".format(out_dir, self._name) + command = [local_path, "--", self._filter_flag] + return RunCommand(command, "Test {} failed on linux!".format(self._target)) def TransferDependencies(self, out_dir: str, host: str): - gn_desc = ["gn", "desc", out_dir, self.Target, "runtime_deps"] + gn_desc = ["gn", "desc", out_dir, self._target, "runtime_deps"] out = RunCommand( - gn_desc, "Failed to get dependencies of target {}".format(self.Target)) + gn_desc, "Failed to get dependencies of target {}".format(self._target)) paths = [] for line in out.split("\n"): @@ -104,237 +116,65 @@ class TestTarget(object): common = os.path.commonpath(paths) paths = [os.path.relpath(path, common) for path in paths] - archive_name = self.Name + ".tar.gz" + archive_name = self._name + ".tar.gz" # Compress the dependencies of the test. + command = ["tar", "-czf", archive_name] + paths + if self._filter_file != "": + command.append(self._filter_file) RunCommand( - ["tar", "-czf", archive_name] + paths, - "{} dependency compression failed".format(self.Target), + command, + "{} dependency compression failed".format(self._target), ) # Make sure the containing directory exists on the host, for easy cleanup. - RunCommand(["ssh", host, "mkdir -p {}".format(self.Name)], - "Failed to create directory on host for {}".format(self.Target)) + RunCommand(["ssh", host, "mkdir -p {}".format(self._name)], + "Failed to create directory on host for {}".format(self._target)) # Transfer the test deps to the host. RunCommand( - ["scp", archive_name, "{}:{}/{}".format(host, self.Name, archive_name)], - "{} dependency transfer failed".format(self.Target), + [ + "scp", archive_name, "{}:{}/{}".format(host, self._name, + archive_name) + ], + "{} dependency transfer failed".format(self._target), ) # Decompress the dependencies once they're on the host. RunCommand( [ "ssh", host, "tar -xzf {0}/{1} -C {0}".format( - self.Name, archive_name) + self._name, archive_name) ], - "{} dependency decompression failed".format(self.Target), + "{} dependency decompression failed".format(self._target), ) # Clean up the local copy of the archive that is no longer needed. RunCommand( ["rm", archive_name], - "{} dependency archive cleanup failed".format(self.Target), + "{} dependency archive cleanup failed".format(self._target), ) -def ExtractParts(string: str) -> (str, float, str): - """This function accepts lines like the one that follow this sentence, and - attempts to extract all of the relevant pieces of information from it. - - task: 1_threads_scheduling_to_io_pump= .47606626678091973 us/task - - The above line would be split into chunks as follows: - - info=data units - - info and units can be any string, and data must be a valid float. data and - units must be separated by a space, and info and data must be separated by - at least an '=' - """ - pieces = string.split("=") - info = pieces[0].strip() - measure = pieces[1].strip().split(" ") - data = float(measure[0]) - units = measure[1].strip() - return info, data, units - - -class ResultLine(object): - """This class is a single line of the comparison between linux and fuchsia. - GTests output several lines of important info per test, which are collected, - and then the pertinent pieces of information are extracted and stored in a - ResultLine for each line, containing a shared description and unit, as well as - linux and fuchsia performance scores. - """ +def RunTest(target: TestTarget, run_locally: bool = False) -> None: + linux_out = target.ExecLinux(target_spec.linux_out_dir, run_locally) + linux_result = test_results.TargetResultFromStdout(linux_out.splitlines(), + target._name) + print("Ran Linux") + fuchsia_out = target.ExecFuchsia(target_spec.fuchsia_out_dir, run_locally) + fuchsia_result = test_results.TargetResultFromStdout(fuchsia_out.splitlines(), + target._name) + print("Ran Fuchsia") + outname = "{}.{}.json".format(target._name, time.time()) + linux_result.WriteToJson("{}/{}".format(target_spec.raw_linux_dir, outname)) + fuchsia_result.WriteToJson("{}/{}".format(target_spec.raw_fuchsia_dir, + outname)) + print("Wrote result files") - def __init__(self, linux_line: str, fuchsia_line: str) -> None: - linux_info, linux_val, linux_unit = ExtractParts(linux_line) - fuchsia_info, fuchsia_val, fuchsia_unit = ExtractParts(fuchsia_line) - - if linux_info != fuchsia_info: - print("Info mismatch! fuchsia was: {}".format(fuchsia_info)) - if linux_unit != fuchsia_unit: - print("Unit mismatch! fuchsia was: {}".format(fuchsia_unit)) - - self.desc = linux_info - self.linux = linux_val - self.fuchsia = fuchsia_val - self.unit = fuchsia_unit - - def comparison(self) -> float: - return (self.fuchsia / self.linux) * 100.0 - - def ToCsvFormat(self) -> str: - return ",".join([ - self.desc.replace(",", ";"), - str(self.linux), - str(self.fuchsia), - str(self.comparison()), - self.unit, - ]) - - def __format__(self, format_spec: str) -> str: - return "{} in {}: linux:{}, fuchsia:{}, ratio:{}".format( - self.desc, self.unit, self.linux, self.fuchsia, self.comparison()) - - -class TestComparison(object): - """This class represents a single test target, and all of its informative - lines of output for each test case, extracted into statistical comparisons of - this run on linux v fuchsia. - """ - - def __init__(self, name: str, tests: Dict[str, List[ResultLine]]) -> None: - self.suite_name = name - self.tests = tests - - def MakeCsvFormat(self) -> str: - lines = [] - for test_name, lines in self.tests.items(): - for line in lines: - lines.append("{},{},{}".format(self.suite_name, test_name, - line.MakeCsvFormat())) - return "\n".join(lines) - - def __format__(self, format_spec: str) -> str: - lines = [self.suite_name] - for test_case, lines in self.tests.items(): - lines.append(" {}".format(test_case)) - for line in lines: - lines.append(" {}".format(line)) - return "\n".join(lines) - - -def ExtractCases(out_lines: List[str]) -> Dict[str, List[str]]: - """ExtractCases attempts to associate GTest names to the lines of output that - they produce. Given a list of input like the following: - - [==========] Running 24 tests from 10 test cases. - [----------] Global test environment set-up. - [----------] 9 tests from ScheduleWorkTest - [ RUN ] ScheduleWorkTest.ThreadTimeToIOFromOneThread - *RESULT task: 1_threads_scheduling_to_io_pump= .47606626678091973 us/task - RESULT task_min_batch_time: 1_threads_scheduling_to_io_pump= .335 us/task - RESULT task_max_batch_time: 1_threads_scheduling_to_io_pump= 5.071 us/task - *RESULT task_thread_time: 1_threads_scheduling_to_io_pump= .3908787013 us/task - [ OK ] ScheduleWorkTest.ThreadTimeToIOFromOneThread (5352 ms) - [ RUN ] ScheduleWorkTest.ThreadTimeToIOFromTwoThreads - *RESULT task: 2_threads_scheduling_to_io_pump= 6.216794903666874 us/task - RESULT task_min_batch_time: 2_threads_scheduling_to_io_pump= 2.523 us/task - RESULT task_max_batch_time: 2_threads_scheduling_to_io_pump= 142.989 us/task - *RESULT task_thread_time: 2_threads_scheduling_to_io_pump= 2.02621823 us/task - [ OK ] ScheduleWorkTest.ThreadTimeToIOFromTwoThreads (5022 ms) - [ RUN ] ScheduleWorkTest.ThreadTimeToIOFromFourThreads - - It will first skip all lines which do not contain either RUN or RESULT. - Then, each 'RUN' line is stripped of the bracketed portion, down to just the - name of the test, and then placed into a dictionary that maps it to all the - lines beneath it, up to the next RUN line. The RESULT lines all have their - RESULT portions chopped out as well, and only the piece following RESULT is - kept - - {'ScheduleWorkTest.ThreadTimeToIOFromOneThread':[ - 'task: 1_threads_scheduling_to_io_pump= .47606626678091973 us/task', - 'task_min_batch_time: 1_threads_scheduling_to_io_pump= .335 us/task', - 'task_max_batch_time: 1_threads_scheduling_to_io_pump= 5.071 us/task', - 'task_thread_time: 1_threads_scheduling_to_io_pump= .390834314 us/task'], - 'ScheduleWorkTest.ThreadTimeToIOFromTwoThreads':[ - 'task: 2_threads_scheduling_to_io_pump= 6.216794903666874 us/task', - 'task_min_batch_time: 2_threads_scheduling_to_io_pump= 2.523 us/task', - 'task_max_batch_time: 2_threads_scheduling_to_io_pump= 142.989 us/task', - 'task_thread_time: 2_threads_scheduling_to_io_pump= 2.02620013 us/task'], - 'ScheduleWorkTest.ThreadTimeToIOFromFourThreads':[]} - """ - lines = [] - for line in out_lines: - if "RUN" in line or "RESULT" in line: - lines.append(line) - cases = {} - name = "" - case_lines = [] - for line in lines: - # We've hit a new test suite, write the old accumulators and start new - # ones. The name variable is checked to make sure this isn't the first one - # in the list of lines - if "RUN" in line: - if name != "": - cases[name] = case_lines - case_lines = [] - name = line.split("]")[-1] # Get the actual name of the test case. - else: - if "RESULT" not in line: - print("{} did not get filtered!".format(line)) - line_trimmed = line.split("RESULT")[-1].strip() - case_lines.append(line_trimmed) - return cases - - -def CollateTests(linux_lines: List[str], fuchsia_lines: List[str], - test_target: str) -> TestComparison: - """This function takes the GTest output of a single test target, and matches - the informational sections of the outputs together, before collapsing them - down into ResultLines attached to TestComparisons. - """ - - linux_cases = ExtractCases(linux_lines) - fuchsia_cases = ExtractCases(fuchsia_lines) - - comparisons = {} - for case_name, linux_case_lines in linux_cases.items(): - # If fuchsia didn't contain that test case, skip it, but alert the user. - if not case_name in fuchsia_cases: - print("Fuchsia is missing test case {}".format(case_name)) - continue - - fuchsia_case_lines = fuchsia_cases[case_name] - - # Each test case should output its informational lines in the same order, so - # if tests only produce partial output, any tailing info should be dropped, - # and only data that was produced by both tests will be compared. - paired_case_lines = zip(linux_case_lines, fuchsia_case_lines) - if len(linux_case_lines) != len(fuchsia_case_lines): - print("Linux and Fuchsia have produced different output lengths for the " - "test {}!".format(case_name)) - desc_lines = [ResultLine(*pair) for pair in paired_case_lines] - comparisons[case_name] = desc_lines - - for case_name in fuchsia_cases.keys(): - if case_name not in comparisons.keys(): - print("Linux is missing test case {}".format(case_name)) - - return TestComparison(test_target, comparisons) - - -def RunTest(target: TestTarget, run_locally: bool = False) -> TestComparison: - - linux_output = target.ExecLinux(target_spec.linux_out_dir, run_locally) - fuchsia_output = target.ExecFuchsia(target_spec.fuchsia_out_dir, run_locally) - return CollateTests( - linux_output.split("\n"), fuchsia_output.split("\n"), target.Name) - - -def RunGnForDirectory(dir_name: str, target_os: str) -> None: +def RunGnForDirectory(dir_name: str, target_os: str, is_debug: bool) -> None: if not os.path.exists(dir_name): os.makedirs(dir_name) + + debug_str = str(is_debug).lower() + with open("{}/{}".format(dir_name, "args.gn"), "w") as args_file: - args_file.write("is_debug = false\n") + args_file.write("is_debug = {}\n".format(debug_str)) args_file.write("dcheck_always_on = false\n") args_file.write("is_component_build = false\n") args_file.write("use_goma = true\n") @@ -343,48 +183,76 @@ def RunGnForDirectory(dir_name: str, target_os: str) -> None: subprocess.run(["gn", "gen", dir_name]).check_returncode() -def GenerateTestData() -> List[List[TestComparison]]: +def GenerateTestData(do_config: bool, do_build: bool, num_reps: int, + is_debug: bool): DIR_SOURCE_ROOT = os.path.abspath( os.path.join(os.path.dirname(__file__), *([os.pardir] * 3))) os.chdir(DIR_SOURCE_ROOT) - + os.makedirs(target_spec.results_dir, exist_ok=True) + os.makedirs(target_spec.raw_linux_dir, exist_ok=True) + os.makedirs(target_spec.raw_fuchsia_dir, exist_ok=True) # Grab parameters from config file. linux_dir = target_spec.linux_out_dir fuchsia_dir = target_spec.fuchsia_out_dir - test_input = [] - for (test, filters) in target_spec.test_targets.items(): - test_input.append(TestTarget(test, filters)) - print("Test targets collected:\n{}".format("\n".join( - [test.Target for test in test_input]))) - - RunGnForDirectory(linux_dir, "linux") - RunGnForDirectory(fuchsia_dir, "fuchsia") - - # Build test targets in both output directories. - for directory in [linux_dir, fuchsia_dir]: - build_command = ["autoninja", "-C", directory] \ - + [test.Target for test in test_input] - RunCommand( - build_command, - "Unable to build targets in directory {}".format(directory), - ) - print("Builds completed.") + test_input = [] # type: List[TestTarget] + for target in target_spec.test_targets: + test_input.append(TestTarget(target)) + print("Test targets collected:\n{}".format(",".join( + [test._target for test in test_input]))) + if do_config: + RunGnForDirectory(linux_dir, "linux", is_debug) + RunGnForDirectory(fuchsia_dir, "fuchsia", is_debug) + print("Ran GN") + elif is_debug: + logging.warning("The --is_debug flag is ignored unless --do_config is also \ + specified") + + if do_build: + # Build test targets in both output directories. + for directory in [linux_dir, fuchsia_dir]: + build_command = ["autoninja", "-C", directory] \ + + [test._target for test in test_input] + RunCommand(build_command, + "autoninja failed in directory {}".format(directory)) + print("Builds completed.") # Execute the tests, one at a time, per system, and collect their results. - results = [] - print("Running Tests") - for test in test_input: - results.append(RunTest(test)) + for i in range(0, num_reps): + print("Running Test set {}".format(i)) + for test_target in test_input: + print("Running Target {}".format(test_target._name)) + RunTest(test_target) + print("Finished {}".format(test_target._name)) print("Tests Completed") - with open("comparison_results.csv", "w") as out_file: - out_file.write( - "target,test,description,linux,fuchsia,fuchsia/linux,units\n") - for result in results: - out_file.write(result.MakeCsvFormat()) - out_file.write("\n") - return results + + +def main() -> int: + cmd_flags = argparse.ArgumentParser( + description="Execute tests repeatedly and collect performance data.") + cmd_flags.add_argument( + "--do-config", + action="store_true", + help="WARNING: This flag over-writes args.gn in the directories " + "configured. GN is executed before running the tests.") + cmd_flags.add_argument( + "--do-build", + action="store_true", + help="Build the tests before running them.") + cmd_flags.add_argument( + "--is-debug", + action="store_true", + help="This config-and-build cycle is a debug build") + cmd_flags.add_argument( + "--num-repetitions", + type=int, + default=1, + help="The number of times to execute each test target.") + cmd_flags.parse_args() + GenerateTestData(cmd_flags.do_config, cmd_flags.do_build, + cmd_flags.num_repetitions, cmd_flags.is_debug) + return 0 if __name__ == "__main__": - sys.exit(GenerateTestData()) + sys.exit(main()) diff --git a/chromium/tools/fuchsia/comparative_tester/generate_perf_report.py b/chromium/tools/fuchsia/comparative_tester/generate_perf_report.py new file mode 100755 index 00000000000..3a461d03ea6 --- /dev/null +++ b/chromium/tools/fuchsia/comparative_tester/generate_perf_report.py @@ -0,0 +1,289 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""generate_perf_report.py is to be used after comparative_tester.py has been +executed and written some test data into the location specified by +target_spec.py. It writes to results_dir and reads all present test info from +raw_data_dir. Using this script should just be a matter of invoking it from +chromium/src while raw test data exists in raw_data_dir.""" + +import json +import logging +import math +import os +import sys +from typing import List, Dict, Set, Tuple, Optional, Any, TypeVar, Callable + +import target_spec +from test_results import (TargetResult, ReadTargetFromJson, TestResult, + ResultLine) + + +class LineStats(object): + + def __init__(self, desc: str, unit: str, time_avg: float, time_dev: float, + cv: float, samples: int) -> None: + self.desc = desc + self.time_avg = time_avg + self.time_dev = time_dev + self.cv = cv + self.unit = unit + self.sample_num = samples + + def ToString(self) -> str: + if self.sample_num > 1: + return "{}: {:.5f} σ={:.5f} {} with n={} cv={}".format( + self.desc, self.time_avg, self.time_dev, self.unit, self.sample_num, + self.cv) + else: + return "{}: {:.5f} with only one sample".format(self.desc, self.time_avg) + + +def LineFromList(lines: List[ResultLine]) -> Optional[LineStats]: + desc = lines[0].desc + unit = lines[0].unit + times = [line.meas for line in lines] + avg, dev, cv = GenStats(times) + return LineStats(desc, unit, avg, dev, cv, len(lines)) + + +class TestStats(object): + + def __init__(self, name: str, time_avg: float, time_dev: float, cv: float, + samples: int, lines: List[LineStats]) -> None: + self.name = name + self.time_avg = time_avg + self.time_dev = time_dev + self.cv = cv + self.sample_num = samples + self.lines = lines + + def ToLines(self) -> List[str]: + lines = [] + if self.sample_num > 1: + lines.append("{}: {:.5f} σ={:.5f}ms with n={} cv={}".format( + self.name, self.time_avg, self.time_dev, self.sample_num, self.cv)) + else: + lines.append("{}: {:.5f} with only one sample".format( + self.name, self.time_avg)) + for line in self.lines: + lines.append(" {}".format(line.ToString())) + return lines + + +def TestFromList(tests: List[TestResult]) -> TestStats: + name = tests[0].name + avg, dev, cv = GenStats([test.time for test in tests]) + lines = {} # type: Dict[str, List[ResultLine]] + for test in tests: + assert test.name == name + for line in test.lines: + if not line.desc in lines: + lines[line.desc] = [line] + else: + lines[line.desc].append(line) + test_lines = [] + for _, line_list in lines.items(): + stat_line = LineFromList(line_list) + if stat_line: + test_lines.append(stat_line) + return TestStats(name, avg, dev, cv, len(tests), test_lines) + + +class TargetStats(object): + + def __init__(self, name: str, samples: int, tests: List[TestStats]) -> None: + self.name = name + self.sample_num = samples + self.tests = tests + + def ToLines(self) -> List[str]: + lines = [] + if self.sample_num > 1: + lines.append("{}: ".format(self.name)) + else: + lines.append("{}: with only one sample".format(self.name)) + for test in self.tests: + for line in test.ToLines(): + lines.append(" {}".format(line)) + return lines + + def __format__(self, format_spec): + return "\n".join(self.ToLines()) + + +def TargetFromList(results: List[TargetResult]) -> TargetStats: + name = results[0].name + sample_num = len(results) + tests = {} # type: Dict[str, List[TestResult]] + for result in results: + assert result.name == name + # This groups tests by name so that they can be considered independently, + # so that in the event tests flake out, their average times can + # still be accurately calculated + for test in result.tests: + if not test.name in tests.keys(): + tests[test.name] = [test] + tests[test.name].append(test) + test_stats = [TestFromList(test_list) for _, test_list in tests.items()] + return TargetStats(name, sample_num, test_stats) + + +def GenStats(corpus: List[float]) -> Tuple[float, float, float]: + avg = sum(corpus) / len(corpus) + adjusted_sum = 0.0 + for item in corpus: + adjusted = item - avg + adjusted_sum += adjusted * adjusted + + dev = math.sqrt(adjusted_sum / len(corpus)) + cv = dev / avg + return avg, dev, cv + + +def DirectoryStats(directory: str) -> List[TargetStats]: + resultMap = {} # type: Dict[str, List[TargetResult]] + for file in os.listdir(directory): + results = ReadTargetFromJson("{}/{}".format(directory, file)) + if not results.name in resultMap.keys(): + resultMap[results.name] = [results] + else: + resultMap[results.name].append(results) + + targets = [] + for _, resultList in resultMap.items(): + targets.append(TargetFromList(resultList)) + return targets + + +def CompareTargets(linux: TargetStats, fuchsia: TargetStats) -> Dict[str, Any]: + """Compare takes a corpus of statistics from both Fuchsia and Linux, and then + lines up the values, compares them to each other, and writes them into a + dictionary that can be JSONified. + """ + assert linux.name == fuchsia.name + paired_tests = ZipListsByPredicate(linux.tests, fuchsia.tests, + lambda test: test.name) + + paired_tests = MapDictValues(paired_tests, CompareTests) + return {"name": linux.name, "tests": paired_tests} + + +def CompareTests(linux: TestStats, fuchsia: TestStats) -> Dict[str, Any]: + assert linux != None or fuchsia != None + if linux != None and fuchsia != None: + assert linux.name == fuchsia.name + paired_lines = ZipListsByPredicate(linux.lines, fuchsia.lines, + lambda line: line.desc) + paired_lines = MapDictValues(paired_lines, CompareLines) + result = {"lines": paired_lines, "unit": "ms"} # type: Dict[str, Any] + + if linux: + result["name"] = linux.name + result["linux_avg"] = linux.time_avg + result["linux_dev"] = linux.time_dev + result["linux_cv"] = linux.cv + + if fuchsia == None: + logging.warning("Fuchsia is missing test case {}".format(linux.name)) + else: + result["name"] = fuchsia.name + result["fuchsia_avg"] = fuchsia.time_avg + result["fuchsia_dev"] = fuchsia.time_dev + result["fuchsia_cv"] = fuchsia.cv + + +def CompareLines(linux: LineStats, fuchsia: LineStats) -> Dict[str, Any]: + """CompareLines wraps two LineStats objects up as a JSON-dumpable dict, with + missing values written as -1 (which specifically doesn't make sense for time + elapsed measurements). It also logs a warning every time a line is given which + can't be matched up. If both lines passed are None, or their units or + descriptions are not the same(which should never happen) this function fails. + """ + assert linux != None or fuchsia != None + result = {} # type: Dict[str, Any] + if linux != None and fuchsia != None: + assert linux.desc == fuchsia.desc + assert linux.unit == fuchsia.unit + + if linux: + result["desc"] = linux.desc + result["unit"] = linux.unit + result["linux_avg"] = linux.time_avg + result["linux_dev"] = linux.time_dev + result["linux_cv"] = linux.cv + + if fuchsia == None: + logging.warning("Fuchsia is missing test line {}".format(linux.desc)) + else: + result["desc"] = fuchsia.desc + result["unit"] = fuchsia.unit + result["fuchsia_avg"] = fuchsia.time_avg + result["fuchsia_dev"] = fuchsia.time_dev + result["fuchsia_cv"] = fuchsia.cv + + return result + + +T = TypeVar("T") +R = TypeVar("R") + + +def ZipListsByPredicate(left: List[T], right: List[T], + pred: Callable[[T], R]) -> Dict[R, Tuple[T, T]]: + """This function takes two lists, and a predicate. The predicate is applied to + the values in both lists to obtain a keying value from them. Each item is then + inserted into the returned dictionary using the obtained key. Finally, after + all items have been added to the dict, any items that do not have a pair are + discarded after warning the user, and the new dictionary is returned. The + predicate should not map multiple values from one list to the same key. + """ + paired_items = {} # type: Dict [R, Tuple[T, T]] + for item in left: + key = pred(item) + # the first list shouldn't cause any key collisions + assert key not in paired_items.keys() + paired_items[key] = item, None + + for item in right: + key = pred(item) + if key in paired_items.keys(): + # elem 1 of the tuple is always None if the key exists in the map + prev, _ = paired_items[key] + paired_items[key] = prev, item + else: + paired_items[key] = None, item + + return paired_items + + +U = TypeVar("U") +V = TypeVar("V") + + +def MapDictValues(dct: Dict[T, Tuple[R, U]], + predicate: Callable[[R, U], V]) -> Dict[T, V]: + """This function applies the predicate to all the values in the dictionary, + returning a new dictionary with the new values. + """ + out_dict = {} + for key, val in dct.items(): + out_dict[key] = predicate(*val) + return out_dict + + +def main(): + linux_avgs = DirectoryStats(target_spec.raw_linux_dir) + fuchsia_avgs = DirectoryStats(target_spec.raw_fuchsia_dir) + paired_targets = ZipListsByPredicate(linux_avgs, fuchsia_avgs, + lambda target: target.name) + for name, targets in paired_targets.items(): + comparison_dict = CompareTargets(*targets) + with open("{}/{}.json".format(target_spec.results_dir, name), + "w") as outfile: + json.dump(comparison_dict, outfile, indent=2) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/chromium/tools/fuchsia/comparative_tester/target_spec.py b/chromium/tools/fuchsia/comparative_tester/target_spec.py index 0aac16ba1ec..d0122cd4cc7 100644 --- a/chromium/tools/fuchsia/comparative_tester/target_spec.py +++ b/chromium/tools/fuchsia/comparative_tester/target_spec.py @@ -3,17 +3,24 @@ # found in the LICENSE file. # Fields for use when working with a physical linux device connected locally -linux_device_hostname = "192.168.42.32" +linux_device_ip = "192.168.42.32" linux_device_user = "potat" +fuchsia_device_ip = "192.168.42.64" + +# The linux build directory. linux_out_dir = "out/default" +# The fuchsia build directory. fuchsia_out_dir = "out/fuchsia" +# The location in src that will store final statistical data on perftest results +results_dir = "results" +# The location in src that stores the information from each comparative +# invocation of a perftest +raw_linux_dir = results_dir + "/linux_raw" +raw_fuchsia_dir = results_dir + "/fuchsia_raw" -# A map of test target names to a list of test filters to be passed to -# --gtest_filter. Stick to *_perftests. Also, whoo implicit string -# joining! -test_targets = { - "base:base_perftests": "-WaitableEventPerfTest.Throughput" - ":MessageLoopPerfTest.PostTaskRate/1_Posting_Thread", - "net:net_perftests": "", -} +# A list of test targets to deploy to both devices. Stick to *_perftests. +test_targets = [ + "base:base_perftests", + "net:net_perftests", +] diff --git a/chromium/tools/fuchsia/comparative_tester/test_results.py b/chromium/tools/fuchsia/comparative_tester/test_results.py new file mode 100644 index 00000000000..6bd5dc9336b --- /dev/null +++ b/chromium/tools/fuchsia/comparative_tester/test_results.py @@ -0,0 +1,186 @@ +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import logging +import os + +from typing import Any, Dict, List, Tuple, Optional + + +def UnitStringIsValid(unit: str) -> bool: + accepted_units = [ + "us/hop", "us/task", "ns/sample", "ms", "s", "count", "KB", "MB/s", "us" + ] + return unit in accepted_units + + +class ResultLine(object): + """ResultLine objects are each an individual line of output, complete with a + unit, measurement, and descriptive component + """ + + def __init__(self, desc: str, measure: float, unit: str) -> None: + self.desc = desc + self.meas = measure + self.unit = unit + + def ToJsonDict(self) -> Dict[str, Any]: + return { + "description": self.desc, + "measurement": self.meas, + "unit": self.unit, + } + + +def ReadResultLineFromJson(dct: Dict[str, Any]): + return ResultLine(dct["description"], float(dct["measurement"]), dct["unit"]) + + +def ResultLineFromStdout(line: str) -> Optional[ResultLine]: + if "pkgsvr" in line: + return None + chunks = line.split() + # There should be 1 chunk for the measure, 1 for the unit, and at least one + # for the line description, so at least 3 total + if len(chunks) < 3: + logging.warning("The line {} contains too few space-separated pieces to be " + "parsed as a ResultLine".format(line)) + return None + unit = chunks[-1] + if not UnitStringIsValid(unit): + logging.warning("The unit string parsed from {} was {}, which was not " + "expected".format(line, unit)) + return None + try: + measure = float(chunks[-2]) + desc = " ".join(chunks[:-2]) + return ResultLine(desc, measure, unit) + except ValueError as e: + logging.warning("The chunk {} could not be parsed as a valid measurement " + "because of {}".format(chunks[-2], str(e))) + return None + + +class TestResult(object): + """TestResult objects comprise the smallest unit of a GTest target, and + contain the name of the individual test run, and the time that the test took + to run.""" + + def __init__(self, name: str, time: float, lines: List[ResultLine]) -> None: + self.name = name + self.time = time + self.lines = lines + + def ToJsonDict(self) -> Dict[str, Any]: + return { + "name": self.name, + "time_in_ms": self.time, + "lines": [line.ToJsonDict() for line in self.lines] + } + + +def ReadTestFromJson(obj_dict: Dict[str, Any]) -> TestResult: + name = obj_dict["name"] + time = obj_dict["time_in_ms"] + lines = [ReadResultLineFromJson(line) for line in obj_dict["lines"]] + return TestResult(name, time, lines) + + +def ExtractTestInfo(line: str) -> Tuple[str, float]: + # Trim off the [ OK ] part of the line + trimmed = line.lstrip("[ OK ]").strip() + try: + test_name, rest = trimmed.split("(") # Isolate the measurement + except Exception as e: + err_text = "Could not extract the case name from {} because of error {}"\ + .format(trimmed, str(e)) + raise Exception(err_text) + try: + measure, _ = rest.split(")", 1)[0].split() + except Exception as e: + err_text = "Could not extract measure and units from {}\ + because of error {}".format(rest, str(e)) + raise Exception(err_text) + return test_name.strip(), float(measure) + + +def TaggedTestFromLines(lines: List[str]) -> TestResult: + test_name, time = ExtractTestInfo(lines[-1]) + res_lines = [] + for line in lines[:-1]: + res_line = ResultLineFromStdout(line) + if res_line: + res_lines.append(res_line) + else: + logging.warning("Couldn't parse line {} into a ResultLine".format(line)) + return TestResult(test_name, time, res_lines) + + +class TargetResult(object): + """TargetResult objects contain the entire set of TestSuite objects that are + invoked by a single test target, such as base:base_perftests and the like. + They also include the name of the target, and the time it took the target to + run. + """ + + def __init__(self, name: str, tests: List[TestResult]) -> None: + self.name = name + self.tests = tests + + def ToJsonDict(self) -> Dict[str, Any]: + return { + "name": self.name, + "tests": [test.ToJsonDict() for test in self.tests] + } + + def WriteToJson(self, path: str) -> None: + with open(path, "w") as outfile: + json.dump(self.ToJsonDict(), outfile, indent=2) + + +def ReadTargetFromJson(path: str): + with open(path, "r") as json_file: + dct = json.load(json_file) + return TargetResult( + dct["name"], [ReadTestFromJson(test_dct) for test_dct in dct["tests"]]) + + +def TargetResultFromStdout(lines: List[str], name: str) -> TargetResult: + """TargetResultFromStdout attempts to associate GTest names to the lines of + output that they produce. Example input looks something like the following: + + [ RUN ] TestNameFoo + INFO measurement units + ... + [ OK ] TestNameFoo (measurement units) + ... + + Unfortunately, Because the results of output from perftest targets is not + necessarily consistent between test targets, this makes a best-effort to parse + as much information from them as possible. + """ + test_line_lists = [] # type: List[List[str]] + test_line_accum = [] # type: List[str] + read_lines = False + for line in lines: + # We're starting a test suite + if line.startswith("[ RUN ]"): + read_lines = True + # We have a prior suite that needs to be added + if len(test_line_accum) > 0: + test_line_lists.append(test_line_accum) + test_line_accum = [] + elif read_lines: + # We don't actually care about the data in the RUN line, just its + # presence. the OK line contains the same info, as well as the total test + # run time + test_line_accum.append(line) + if line.startswith("[ OK ]"): + read_lines = False + + test_cases = [ + TaggedTestFromLines(test_lines) for test_lines in test_line_lists + ] + return TargetResult(name, test_cases) diff --git a/chromium/tools/fuchsia/local-sdk.py b/chromium/tools/fuchsia/local-sdk.py index 9d9cd6d59d6..6751ba047c2 100755 --- a/chromium/tools/fuchsia/local-sdk.py +++ b/chromium/tools/fuchsia/local-sdk.py @@ -56,7 +56,7 @@ def main(args): tempdir = tempfile.mkdtemp() sdk_tar = os.path.join(tempdir, 'fuchsia-sdk.tgz') - Run('go', 'run', 'scripts/makesdk.go', '-output', sdk_tar, '.') + Run('go', 'run', 'scripts/sdk/foundation/makesdk.go', '-output', sdk_tar, '.') # Nuke the SDK from DEPS, put our just-built one there, and set a fake .hash # file. This means that on next gclient runhooks, we'll restore to the diff --git a/chromium/tools/gdb/gdbinit b/chromium/tools/gdb/gdbinit index 44b3195c3c7..039ced9f32c 100644 --- a/chromium/tools/gdb/gdbinit +++ b/chromium/tools/gdb/gdbinit @@ -1,5 +1,5 @@ -# This is gdbinit for source level debugging with -fdebug-prefix-map compile -# option. +# This is gdbinit for source level debugging with -fdebug-compilation-dir +# compile option. python diff --git a/chromium/tools/gn/BUILD.gn b/chromium/tools/gn/BUILD.gn deleted file mode 100644 index 1e171fa941c..00000000000 --- a/chromium/tools/gn/BUILD.gn +++ /dev/null @@ -1,374 +0,0 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import("//build/config/jumbo.gni") -import("//testing/test.gni") -import("//testing/libfuzzer/fuzzer_test.gni") - -jumbo_static_library("gn_lib") { - configs += [ "//build/config:precompiled_headers" ] - - sources = [ - "action_target_generator.cc", - "action_target_generator.h", - "action_values.cc", - "action_values.h", - "analyzer.cc", - "analyzer.h", - "args.cc", - "args.h", - "binary_target_generator.cc", - "binary_target_generator.h", - "build_settings.cc", - "build_settings.h", - "builder.cc", - "builder.h", - "builder_record.cc", - "builder_record.h", - "bundle_data.cc", - "bundle_data.h", - "bundle_data_target_generator.cc", - "bundle_data_target_generator.h", - "bundle_file_rule.cc", - "bundle_file_rule.h", - "c_include_iterator.cc", - "c_include_iterator.h", - "command_analyze.cc", - "command_args.cc", - "command_check.cc", - "command_clean.cc", - "command_desc.cc", - "command_format.cc", - "command_format.h", - "command_gen.cc", - "command_help.cc", - "command_ls.cc", - "command_path.cc", - "command_refs.cc", - "commands.cc", - "commands.h", - "config.cc", - "config.h", - "config_values.cc", - "config_values.h", - "config_values_extractors.cc", - "config_values_extractors.h", - "config_values_generator.cc", - "config_values_generator.h", - "copy_target_generator.cc", - "copy_target_generator.h", - "create_bundle_target_generator.cc", - "create_bundle_target_generator.h", - "deps_iterator.cc", - "deps_iterator.h", - "desc_builder.cc", - "desc_builder.h", - "eclipse_writer.cc", - "eclipse_writer.h", - "err.cc", - "err.h", - "escape.cc", - "escape.h", - "exec_process.cc", - "exec_process.h", - "filesystem_utils.cc", - "filesystem_utils.h", - "function_exec_script.cc", - "function_foreach.cc", - "function_forward_variables_from.cc", - "function_get_label_info.cc", - "function_get_path_info.cc", - "function_get_target_outputs.cc", - "function_process_file_template.cc", - "function_read_file.cc", - "function_rebase_path.cc", - "function_set_default_toolchain.cc", - "function_set_defaults.cc", - "function_template.cc", - "function_toolchain.cc", - "function_write_file.cc", - "functions.cc", - "functions.h", - "functions_target.cc", - "group_target_generator.cc", - "group_target_generator.h", - "header_checker.cc", - "header_checker.h", - "import_manager.cc", - "import_manager.h", - "inherited_libraries.cc", - "inherited_libraries.h", - "input_conversion.cc", - "input_conversion.h", - "input_file.cc", - "input_file.h", - "input_file_manager.cc", - "input_file_manager.h", - "item.cc", - "item.h", - "json_project_writer.cc", - "json_project_writer.h", - "label.cc", - "label.h", - "label_pattern.cc", - "label_pattern.h", - "label_ptr.h", - "lib_file.cc", - "lib_file.h", - "loader.cc", - "loader.h", - "location.cc", - "location.h", - "ninja_action_target_writer.cc", - "ninja_action_target_writer.h", - "ninja_binary_target_writer.cc", - "ninja_binary_target_writer.h", - "ninja_build_writer.cc", - "ninja_build_writer.h", - "ninja_bundle_data_target_writer.cc", - "ninja_bundle_data_target_writer.h", - "ninja_copy_target_writer.cc", - "ninja_copy_target_writer.h", - "ninja_create_bundle_target_writer.cc", - "ninja_create_bundle_target_writer.h", - "ninja_group_target_writer.cc", - "ninja_group_target_writer.h", - "ninja_target_writer.cc", - "ninja_target_writer.h", - "ninja_toolchain_writer.cc", - "ninja_toolchain_writer.h", - "ninja_utils.cc", - "ninja_utils.h", - "ninja_writer.cc", - "ninja_writer.h", - "operators.cc", - "operators.h", - "output_file.cc", - "output_file.h", - "parse_node_value_adapter.cc", - "parse_node_value_adapter.h", - "parse_tree.cc", - "parse_tree.h", - "parser.cc", - "parser.h", - "path_output.cc", - "path_output.h", - "pattern.cc", - "pattern.h", - "pool.cc", - "pool.h", - "qt_creator_writer.cc", - "qt_creator_writer.h", - "runtime_deps.cc", - "runtime_deps.h", - "scheduler.cc", - "scheduler.h", - "scope.cc", - "scope.h", - "scope_per_file_provider.cc", - "scope_per_file_provider.h", - "settings.cc", - "settings.h", - "setup.cc", - "setup.h", - "source_dir.cc", - "source_dir.h", - "source_file.cc", - "source_file.h", - "source_file_type.cc", - "source_file_type.h", - "standard_out.cc", - "standard_out.h", - "string_utils.cc", - "string_utils.h", - "substitution_list.cc", - "substitution_list.h", - "substitution_pattern.cc", - "substitution_pattern.h", - "substitution_type.cc", - "substitution_type.h", - "substitution_writer.cc", - "substitution_writer.h", - "switches.cc", - "switches.h", - "target.cc", - "target.h", - "target_generator.cc", - "target_generator.h", - "template.cc", - "template.h", - "token.cc", - "token.h", - "tokenizer.cc", - "tokenizer.h", - "tool.cc", - "tool.h", - "toolchain.cc", - "toolchain.h", - "trace.cc", - "trace.h", - "unique_vector.h", - "value.cc", - "value.h", - "value_extractors.cc", - "value_extractors.h", - "variables.cc", - "variables.h", - "visibility.cc", - "visibility.h", - "visual_studio_utils.cc", - "visual_studio_utils.h", - "visual_studio_writer.cc", - "visual_studio_writer.h", - "xcode_object.cc", - "xcode_object.h", - "xcode_writer.cc", - "xcode_writer.h", - "xml_element_writer.cc", - "xml_element_writer.h", - ] - - deps = [ - "//base", - "//base/third_party/dynamic_annotations", - ] -} - -action("last_commit_position") { - script = "last_commit_position.py" - - # This dependency forces a re-run when the code is synced. - inputs = [ - "//build/util/LASTCHANGE", - ] - - outfile = "$target_gen_dir/last_commit_position.h" - outputs = [ - outfile, - ] - - args = [ - rebase_path("//", root_build_dir), - rebase_path(outfile, root_build_dir), - "TOOLS_GN_LAST_COMMIT_POSITION_H_", - ] -} - -# Note for Windows debugging: GN is super-multithreaded and uses a lot of STL. -# Iterator debugging on Windows does locking for every access, which ends up -# slowing down debug runtime from 0:36 to 9:40. If you want to run debug builds -# of GN over the large Chrome build, you will want to set the arg: -# enable_iterator_debugging = false -executable("gn") { - defines = [ "GN_BUILD" ] - sources = [ - "gn_main.cc", - ] - - deps = [ - ":gn_lib", - ":last_commit_position", - "//base", - "//build/config:exe_and_shlib_deps", - "//build/win:default_exe_manifest", - ] -} - -test("gn_unittests") { - deps = [ - ":gn_unittests_sources", - ] - - data = [ - "format_test_data/", - ] -} - -jumbo_source_set("gn_unittests_sources") { - testonly = true - - sources = [ - "action_target_generator_unittest.cc", - "analyzer_unittest.cc", - "args_unittest.cc", - "builder_unittest.cc", - "c_include_iterator_unittest.cc", - "command_format_unittest.cc", - "config_unittest.cc", - "config_values_extractors_unittest.cc", - "escape_unittest.cc", - "exec_process_unittest.cc", - "filesystem_utils_unittest.cc", - "function_foreach_unittest.cc", - "function_forward_variables_from_unittest.cc", - "function_get_label_info_unittest.cc", - "function_get_path_info_unittest.cc", - "function_get_target_outputs_unittest.cc", - "function_process_file_template_unittest.cc", - "function_rebase_path_unittest.cc", - "function_template_unittest.cc", - "function_toolchain_unittest.cc", - "function_write_file_unittest.cc", - "functions_target_unittest.cc", - "functions_unittest.cc", - "header_checker_unittest.cc", - "inherited_libraries_unittest.cc", - "input_conversion_unittest.cc", - "label_pattern_unittest.cc", - "label_unittest.cc", - "loader_unittest.cc", - "ninja_action_target_writer_unittest.cc", - "ninja_binary_target_writer_unittest.cc", - "ninja_build_writer_unittest.cc", - "ninja_bundle_data_target_writer_unittest.cc", - "ninja_copy_target_writer_unittest.cc", - "ninja_create_bundle_target_writer_unittest.cc", - "ninja_group_target_writer_unittest.cc", - "ninja_target_writer_unittest.cc", - "ninja_toolchain_writer_unittest.cc", - "operators_unittest.cc", - "parse_tree_unittest.cc", - "parser_unittest.cc", - "path_output_unittest.cc", - "pattern_unittest.cc", - "runtime_deps_unittest.cc", - "scope_per_file_provider_unittest.cc", - "scope_unittest.cc", - "source_dir_unittest.cc", - "source_file_unittest.cc", - "string_utils_unittest.cc", - "substitution_pattern_unittest.cc", - "substitution_writer_unittest.cc", - "target_unittest.cc", - "template_unittest.cc", - "test_with_scheduler.cc", - "test_with_scheduler.h", - "test_with_scope.cc", - "test_with_scope.h", - "tokenizer_unittest.cc", - "unique_vector_unittest.cc", - "value_unittest.cc", - "visibility_unittest.cc", - "visual_studio_utils_unittest.cc", - "visual_studio_writer_unittest.cc", - "xcode_object_unittest.cc", - "xml_element_writer_unittest.cc", - ] - - public_deps = [ - ":gn_lib", - "//base/test:run_all_unittests", - "//base/test:test_support", - "//testing/gtest", - ] -} - -fuzzer_test("gn_parser_fuzzer") { - sources = [ - "parser_fuzzer.cc", - ] - deps = [ - ":gn_lib", - ] -} diff --git a/chromium/tools/gn/DEPS b/chromium/tools/gn/DEPS deleted file mode 100644 index 0de07bbaf08..00000000000 --- a/chromium/tools/gn/DEPS +++ /dev/null @@ -1,3 +0,0 @@ -include_rules = [ - "+third_party/re2", -] diff --git a/chromium/tools/gn/OWNERS b/chromium/tools/gn/OWNERS deleted file mode 100644 index 82224cd8517..00000000000 --- a/chromium/tools/gn/OWNERS +++ /dev/null @@ -1,3 +0,0 @@ -brettw@chromium.org -dpranke@chromium.org -sdefresne@chromium.org diff --git a/chromium/tools/gn/README.md b/chromium/tools/gn/README.md index fd0d9c56d6d..b9da9f3d3d0 100644 --- a/chromium/tools/gn/README.md +++ b/chromium/tools/gn/README.md @@ -1,18 +1 @@ -# What is GN? - -GN is a meta-build system that generates [Ninja](https://ninja-build.org) -build files so that you can build Chromium with Ninja. - -## I want more info on GN! - -Read these links: - - * [Quick start](docs/quick_start.md) - * [FAQ](docs/faq.md) - * [Language and operation details](docs/language.md) - * [Reference](docs/reference.md): The built-in `gn help` documentation. - * [Style guide](docs/style_guide.md) - * [Cross compiling and toolchains](docs/cross_compiles.md) - * [Hacking on GN itself](docs/hacking.md) - * [Standaline GN projects](docs/standalone.md) - * [Pushing new binaries](docs/update_binaries.md) +gn now lives at https://gn.googlesource.com/ diff --git a/chromium/tools/gn/action_target_generator.cc b/chromium/tools/gn/action_target_generator.cc deleted file mode 100644 index ca3986eb496..00000000000 --- a/chromium/tools/gn/action_target_generator.cc +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright (c) 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "tools/gn/action_target_generator.h" - -#include "base/stl_util.h" -#include "tools/gn/build_settings.h" -#include "tools/gn/err.h" -#include "tools/gn/filesystem_utils.h" -#include "tools/gn/functions.h" -#include "tools/gn/parse_tree.h" -#include "tools/gn/scope.h" -#include "tools/gn/value.h" -#include "tools/gn/value_extractors.h" -#include "tools/gn/variables.h" - -ActionTargetGenerator::ActionTargetGenerator( - Target* target, - Scope* scope, - const FunctionCallNode* function_call, - Target::OutputType type, - Err* err) - : TargetGenerator(target, scope, function_call, err), - output_type_(type) { -} - -ActionTargetGenerator::~ActionTargetGenerator() = default; - -void ActionTargetGenerator::DoRun() { - target_->set_output_type(output_type_); - - if (!FillSources()) - return; - if (output_type_ == Target::ACTION_FOREACH && target_->sources().empty()) { - // Foreach rules must always have some sources to have an effect. - *err_ = Err(function_call_, "action_foreach target has no sources.", - "If you don't specify any sources, there is nothing to run your\n" - "script over."); - return; - } - - if (!FillInputs()) - return; - - if (!FillScript()) - return; - - if (!FillScriptArgs()) - return; - - if (!FillResponseFileContents()) - return; - - if (!FillOutputs(output_type_ == Target::ACTION_FOREACH)) - return; - - if (!FillDepfile()) - return; - - if (!FillPool()) - return; - - if (!FillCheckIncludes()) - return; - - if (!CheckOutputs()) - return; - - // Action outputs don't depend on the current toolchain so we can skip adding - // that dependency. - - // response_file_contents and {{response_file_name}} in the args must go - // together. - const auto& required_args_substitutions = - target_->action_values().args().required_types(); - bool has_rsp_file_name = base::ContainsValue(required_args_substitutions, - SUBSTITUTION_RSP_FILE_NAME); - if (target_->action_values().uses_rsp_file() && !has_rsp_file_name) { - *err_ = Err(function_call_, "Missing {{response_file_name}} in args.", - "This target defines response_file_contents but doesn't use\n" - "{{response_file_name}} in the args, which means the response file\n" - "will be unused."); - return; - } - if (!target_->action_values().uses_rsp_file() && has_rsp_file_name) { - *err_ = Err(function_call_, "Missing response_file_contents definition.", - "This target uses {{response_file_name}} in the args, but does not\n" - "define response_file_contents which means the response file\n" - "will be empty."); - return; - } -} - -bool ActionTargetGenerator::FillScript() { - // If this gets called, the target type requires a script, so error out - // if it doesn't have one. - const Value* value = scope_->GetValue(variables::kScript, true); - if (!value) { - *err_ = Err(function_call_, "This target type requires a \"script\"."); - return false; - } - if (!value->VerifyTypeIs(Value::STRING, err_)) - return false; - - SourceFile script_file = - scope_->GetSourceDir().ResolveRelativeFile( - *value, err_, - scope_->settings()->build_settings()->root_path_utf8()); - if (err_->has_error()) - return false; - target_->action_values().set_script(script_file); - return true; -} - -bool ActionTargetGenerator::FillScriptArgs() { - const Value* value = scope_->GetValue(variables::kArgs, true); - if (!value) - return true; // Nothing to do. - - if (!target_->action_values().args().Parse(*value, err_)) - return false; - if (!EnsureValidSubstitutions( - target_->action_values().args().required_types(), - &IsValidScriptArgsSubstitution, - value->origin(), err_)) - return false; - - return true; -} - -bool ActionTargetGenerator::FillResponseFileContents() { - const Value* value = scope_->GetValue(variables::kResponseFileContents, true); - if (!value) - return true; // Nothing to do. - - if (!target_->action_values().rsp_file_contents().Parse(*value, err_)) - return false; - if (!EnsureValidSubstitutions( - target_->action_values().rsp_file_contents().required_types(), - &IsValidSourceSubstitution, value->origin(), err_)) - return false; - - return true; -} - -bool ActionTargetGenerator::FillDepfile() { - const Value* value = scope_->GetValue(variables::kDepfile, true); - if (!value) - return true; - - SubstitutionPattern depfile; - if (!depfile.Parse(*value, err_)) - return false; - if (!EnsureSubstitutionIsInOutputDir(depfile, *value)) - return false; - - target_->action_values().set_depfile(depfile); - return true; -} - -bool ActionTargetGenerator::FillPool() { - const Value* value = scope_->GetValue(variables::kPool, true); - if (!value) - return true; - - Label label = Label::Resolve(scope_->GetSourceDir(), - ToolchainLabelForScope(scope_), *value, err_); - if (err_->has_error()) - return false; - - LabelPtrPair pair(label); - pair.origin = target_->defined_from(); - - target_->action_values().set_pool(std::move(pair)); - return true; -} - -bool ActionTargetGenerator::CheckOutputs() { - const SubstitutionList& outputs = target_->action_values().outputs(); - if (outputs.list().empty()) { - *err_ = Err(function_call_, "Action has no outputs.", - "If you have no outputs, the build system can not tell when your\n" - "script needs to be run."); - return false; - } - - if (output_type_ == Target::ACTION) { - if (!outputs.required_types().empty()) { - *err_ = Err(function_call_, "Action has patterns in the output.", - "An action target should have the outputs completely specified. If\n" - "you want to provide a mapping from source to output, use an\n" - "\"action_foreach\" target."); - return false; - } - } else if (output_type_ == Target::ACTION_FOREACH) { - // A foreach target should always have a pattern in the outputs. - if (outputs.required_types().empty()) { - *err_ = Err(function_call_, - "action_foreach should have a pattern in the output.", - "An action_foreach target should have a source expansion pattern in\n" - "it to map source file to unique output file name. Otherwise, the\n" - "build system can't determine when your script needs to be run."); - return false; - } - } - return true; -} - -bool ActionTargetGenerator::FillInputs() { - const Value* value = scope_->GetValue(variables::kInputs, true); - if (!value) - return true; - - Target::FileList dest_inputs; - if (!ExtractListOfRelativeFiles(scope_->settings()->build_settings(), *value, - scope_->GetSourceDir(), &dest_inputs, err_)) - return false; - target_->config_values().inputs().swap(dest_inputs); - return true; -} diff --git a/chromium/tools/gn/action_target_generator.h b/chromium/tools/gn/action_target_generator.h deleted file mode 100644 index 0ea3cbbab87..00000000000 --- a/chromium/tools/gn/action_target_generator.h +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef TOOLS_GN_ACTION_TARGET_GENERATOR_H_ -#define TOOLS_GN_ACTION_TARGET_GENERATOR_H_ - -#include "base/macros.h" -#include "tools/gn/target.h" -#include "tools/gn/target_generator.h" - -// Populates a Target with the values from an action[_foreach] rule. -class ActionTargetGenerator : public TargetGenerator { - public: - ActionTargetGenerator(Target* target, - Scope* scope, - const FunctionCallNode* function_call, - Target::OutputType type, - Err* err); - ~ActionTargetGenerator() override; - - protected: - void DoRun() override; - - private: - bool FillScript(); - bool FillScriptArgs(); - bool FillResponseFileContents(); - bool FillDepfile(); - bool FillPool(); - bool FillInputs(); - - // Checks for errors in the outputs variable. - bool CheckOutputs(); - - Target::OutputType output_type_; - - DISALLOW_COPY_AND_ASSIGN(ActionTargetGenerator); -}; - -#endif // TOOLS_GN_ACTION_TARGET_GENERATOR_H_ diff --git a/chromium/tools/gn/action_target_generator_unittest.cc b/chromium/tools/gn/action_target_generator_unittest.cc deleted file mode 100644 index 254bc9827ad..00000000000 --- a/chromium/tools/gn/action_target_generator_unittest.cc +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "testing/gtest/include/gtest/gtest.h" -#include "tools/gn/scheduler.h" -#include "tools/gn/test_with_scheduler.h" -#include "tools/gn/test_with_scope.h" - -using ActionTargetGenerator = TestWithScheduler; - -// Tests that actions can't have output substitutions. -TEST_F(ActionTargetGenerator, ActionOutputSubstitutions) { - TestWithScope setup; - Scope::ItemVector items_; - setup.scope()->set_item_collector(&items_); - - // First test one with no substitutions, this should be valid. - TestParseInput input_good( - R"(action("foo") { - script = "//foo.py" - sources = [ "//bar.txt" ] - outputs = [ "//out/Debug/one.txt" ] - })"); - ASSERT_FALSE(input_good.has_error()); - - // This should run fine. - Err err; - input_good.parsed()->Execute(setup.scope(), &err); - ASSERT_FALSE(err.has_error()) << err.message(); - - // Same thing with a pattern in the output should fail. - TestParseInput input_bad( - R"(action("foo") { - script = "//foo.py" - sources = [ "//bar.txt" ] - outputs = [ "//out/Debug/{{source_name_part}}.txt" ] - })"); - ASSERT_FALSE(input_bad.has_error()); - - // This should run fine. - input_bad.parsed()->Execute(setup.scope(), &err); - ASSERT_TRUE(err.has_error()); -} - -// Tests that arg and response file substitutions are validated for -// action_foreach targets. -TEST_F(ActionTargetGenerator, ActionForeachSubstitutions) { - TestWithScope setup; - Scope::ItemVector items_; - setup.scope()->set_item_collector(&items_); - - // Args listing a response file but missing a response file definition should - // fail. - TestParseInput input_missing_resp_file( - R"(action_foreach("foo") { - script = "//foo.py" - sources = [ "//bar.txt" ] - outputs = [ "//out/Debug/{{source_name_part}}" ] - args = [ "{{response_file_name}}" ] - })"); - ASSERT_FALSE(input_missing_resp_file.has_error()); - Err err; - input_missing_resp_file.parsed()->Execute(setup.scope(), &err); - ASSERT_TRUE(err.has_error()); - - // Adding a response file definition should pass. - err = Err(); - TestParseInput input_resp_file( - R"(action_foreach("foo") { - script = "//foo.py" - sources = [ "//bar.txt" ] - outputs = [ "//out/Debug/{{source_name_part}}" ] - args = [ "{{response_file_name}}" ] - response_file_contents = [ "{{source_name_part}}" ] - })"); - ASSERT_FALSE(input_resp_file.has_error()); - input_resp_file.parsed()->Execute(setup.scope(), &err); - ASSERT_FALSE(err.has_error()) << err.message(); - - // Defining a response file but not referencing it should fail. - err = Err(); - TestParseInput input_missing_rsp_args( - R"(action_foreach("foo") { - script = "//foo.py" - sources = [ "//bar.txt" ] - outputs = [ "//out/Debug/{{source_name_part}}" ] - args = [ "{{source_name_part}}" ] - response_file_contents = [ "{{source_name_part}}" ] - })"); - ASSERT_FALSE(input_missing_rsp_args.has_error()); - input_missing_rsp_args.parsed()->Execute(setup.scope(), &err); - ASSERT_TRUE(err.has_error()) << err.message(); - - // Bad substitutions in args. - err = Err(); - TestParseInput input_bad_args( - R"(action_foreach("foo") { - script = "//foo.py" - sources = [ "//bar.txt" ] - outputs = [ "//out/Debug/{{source_name_part}}" ] - args = [ "{{response_file_name}} {{ldflags}}" ] - response_file_contents = [ "{{source_name_part}}" ] - })"); - ASSERT_FALSE(input_bad_args.has_error()); - input_bad_args.parsed()->Execute(setup.scope(), &err); - ASSERT_TRUE(err.has_error()) << err.message(); - - // Bad substitutions in response file contents. - err = Err(); - TestParseInput input_bad_rsp( - R"(action_foreach("foo") { - script = "//foo.py" - sources = [ "//bar.txt" ] - outputs = [ "//out/Debug/{{source_name_part}}" ] - args = [ "{{response_file_name}}" ] - response_file_contents = [ "{{source_name_part}} {{ldflags}}" ] - })"); - ASSERT_FALSE(input_bad_rsp.has_error()); - input_bad_rsp.parsed()->Execute(setup.scope(), &err); - ASSERT_TRUE(err.has_error()) << err.message(); -} diff --git a/chromium/tools/gn/action_values.cc b/chromium/tools/gn/action_values.cc deleted file mode 100644 index 3290da4396a..00000000000 --- a/chromium/tools/gn/action_values.cc +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "tools/gn/action_values.h" - -#include "tools/gn/settings.h" -#include "tools/gn/substitution_writer.h" -#include "tools/gn/target.h" - -ActionValues::ActionValues() = default; - -ActionValues::~ActionValues() = default; - -void ActionValues::GetOutputsAsSourceFiles( - const Target* target, - std::vector* result) const { - if (target->output_type() == Target::BUNDLE_DATA) { - // The bundle_data target has no output, the real output will be generated - // by the create_bundle target. - } else if (target->output_type() == Target::COPY_FILES || - target->output_type() == Target::ACTION_FOREACH) { - // Copy and foreach applies the outputs to the sources. - SubstitutionWriter::ApplyListToSources( - target, target->settings(), outputs_, target->sources(), result); - } else { - // Actions (and anything else that happens to specify an output) just use - // the output list with no substitution. - SubstitutionWriter::GetListAsSourceFiles(outputs_, result); - } -} diff --git a/chromium/tools/gn/action_values.h b/chromium/tools/gn/action_values.h deleted file mode 100644 index 806a39f8c64..00000000000 --- a/chromium/tools/gn/action_values.h +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef TOOLS_GN_ACTION_VALUES_H_ -#define TOOLS_GN_ACTION_VALUES_H_ - -#include -#include - -#include "base/macros.h" -#include "tools/gn/label_ptr.h" -#include "tools/gn/source_file.h" -#include "tools/gn/substitution_list.h" - -class Pool; -class Target; - -// Holds the values (outputs, args, script name, etc.) for either an action or -// an action_foreach target. -class ActionValues { - public: - ActionValues(); - ~ActionValues(); - - // Filename of the script to execute. - const SourceFile& script() const { return script_; } - void set_script(const SourceFile& s) { script_ = s; } - - // Arguments to the script. - SubstitutionList& args() { return args_; } - const SubstitutionList& args() const { return args_; } - - // Files created by the script. These are strings rather than SourceFiles - // since they will often contain {{source expansions}}. - SubstitutionList& outputs() { return outputs_; } - const SubstitutionList& outputs() const { return outputs_; } - - // Expands the outputs() above to the final SourceFile list. - void GetOutputsAsSourceFiles(const Target* target, - std::vector* result) const; - - // Depfile generated by the script. - const SubstitutionPattern& depfile() const { return depfile_; } - bool has_depfile() const { return !depfile_.ranges().empty(); } - void set_depfile(const SubstitutionPattern& depfile) { depfile_ = depfile; } - - // Response file contents. Empty means no response file. - SubstitutionList& rsp_file_contents() { return rsp_file_contents_; } - const SubstitutionList& rsp_file_contents() const { - return rsp_file_contents_; - } - bool uses_rsp_file() const { return !rsp_file_contents_.list().empty(); } - - // Pool option - const LabelPtrPair& pool() const { return pool_; } - void set_pool(LabelPtrPair pool) { pool_ = std::move(pool); } - - private: - SourceFile script_; - SubstitutionList args_; - SubstitutionList outputs_; - SubstitutionPattern depfile_; - SubstitutionList rsp_file_contents_; - LabelPtrPair pool_; - - DISALLOW_COPY_AND_ASSIGN(ActionValues); -}; - -#endif // TOOLS_GN_ACTION_VALUES_H_ diff --git a/chromium/tools/gn/analyzer.cc b/chromium/tools/gn/analyzer.cc deleted file mode 100644 index c5158c5b6fa..00000000000 --- a/chromium/tools/gn/analyzer.cc +++ /dev/null @@ -1,486 +0,0 @@ -// Copyright 2016 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "tools/gn/analyzer.h" - -#include -#include -#include -#include -#include - -#include "base/json/json_reader.h" -#include "base/json/json_writer.h" -#include "base/strings/string_util.h" -#include "base/values.h" -#include "tools/gn/builder.h" -#include "tools/gn/config.h" -#include "tools/gn/config_values_extractors.h" -#include "tools/gn/deps_iterator.h" -#include "tools/gn/err.h" -#include "tools/gn/filesystem_utils.h" -#include "tools/gn/loader.h" -#include "tools/gn/location.h" -#include "tools/gn/pool.h" -#include "tools/gn/source_file.h" -#include "tools/gn/target.h" - -namespace { - -struct Inputs { - std::vector source_vec; - std::vector
        . (See "os" and "toolchain"). -# - Handle "Arguments:" blocks a bit better (the argument names could be -# distinguished). -# - Convert "|blahblah|" to . -# - Spit out other similar formats like wiki, markdown, whatever. - -import cgi -import subprocess -import sys - - -def GetOutput(*args): - try: - return subprocess.check_output([sys.argv[1]] + list(args)) - except subprocess.CalledProcessError: - return '' - - -def ParseTopLevel(out): - commands = [] - output = [] - for line in out.splitlines(): - if line.startswith(' '): - command, sep, rest = line.partition(':') - command = command.strip() - is_option = command.startswith('-') - output_line = ['
      • '] - if not is_option: - commands.append(command) - output_line.append('') - output_line.append(cgi.escape(command)) - if not is_option: - output_line.append('') - output_line.extend([sep + cgi.escape(rest) + '
      • ']) - output.append(''.join(output_line)) - else: - output.append('

        ' + cgi.escape(line) + '

        ') - return commands, output - - -def ParseCommand(command, out): - first_line = True - got_example = False - output = [] - for line in out.splitlines(): - if first_line: - name, sep, rest = line.partition(':') - name = name.strip() - output.append('

        ' + - cgi.escape(name + sep + rest) + '

        ') - first_line = False - else: - if line.startswith('Example'): - # Special subsection that's pre-formatted. - if got_example: - output.append('') - got_example = True - output.append('

        Example

        ') - output.append('
        ')
        -      elif not line.strip():
        -        output.append('

        ') - elif not line.startswith(' ') and line.endswith(':'): - # Subsection. - output.append('

        ' + cgi.escape(line[:-1]) + '

        ') - else: - output.append(cgi.escape(line)) - if got_example: - output.append('
        ') - return output - - -def main(): - if len(sys.argv) < 2: - print 'usage: help_as_html.py ' - return 1 - header = ''' - - - - - - -

        GN

        -''' - footer = '
        ' - commands, output = ParseTopLevel(GetOutput('help')) - for command in commands: - output += ParseCommand(command, GetOutput('help', command)) - print header + '\n'.join(output) + footer - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/chromium/tools/gn/bin/roll_gn.py b/chromium/tools/gn/bin/roll_gn.py deleted file mode 100755 index cf4c878c242..00000000000 --- a/chromium/tools/gn/bin/roll_gn.py +++ /dev/null @@ -1,461 +0,0 @@ -#!/usr/bin/env python -# Copyright 2014 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""An auto-roller for GN binaries into Chromium. - -This script is used to update the GN binaries that a Chromium -checkout uses. In order to update the binaries, one must follow -four steps in order: - -1. Trigger try jobs to build a new GN binary at tip-of-tree and upload - the newly-built binaries into the right Google CloudStorage bucket. -2. Wait for the try jobs to complete. -3. Update the buildtools repo with the .sha1 hashes of the newly built - binaries. -4. Update Chromium's DEPS file to the new version of the buildtools repo. - -The script has four commands that correspond to the four steps above: -'build', 'wait', 'roll_buildtools', and 'roll_deps'. - -The script has a fifth command, 'roll', that runs the four in order. - -If given no arguments, the script will run the 'roll' command. - -It can only be run on linux in a clean Chromium checkout; it should -error out in most cases if something bad happens, but the error checking -isn't yet foolproof. - -""" - -from __future__ import print_function - -import argparse -import json -import os -import re -import subprocess -import sys -import tempfile -import time -import urllib2 - - -depot_tools_path = os.path.abspath(os.path.normpath(os.path.join( - os.path.dirname(__file__), '..', '..', '..', 'third_party', 'depot_tools'))) -if not depot_tools_path in sys.path: - sys.path.insert(0, depot_tools_path) - - -CHROMIUM_REPO = 'https://chromium.googlesource.com/chromium/src.git' - -COMMITISH_DIGITS = 10 - -UNKNOWN, PENDING, STARTED, SUCCESS = ( - 'unknown', 'pending', 'started', 'success') - -class BuildResult(object): - def __init__(self): - self.masterName = '-' - self.builderName = '-' - self.buildNumber = '-' - self.state = UNKNOWN - self.sha1 = '-' - self.url = '-' - - -class GNRoller(object): - def __init__(self): - self.chromium_src_dir = None - self.buildtools_dir = None - self.old_gn_commitish = None - self.new_gn_commitish = None - self.old_gn_version = None - self.new_gn_version = None - self.reviewer = 'dpranke@chromium.org' - if os.getenv('USER') == 'dpranke': - self.reviewer = 'brettw@chromium.org' - - def Roll(self): - parser = argparse.ArgumentParser() - parser.usage = __doc__ - parser.add_argument('command', nargs='?', default='roll', - help='build|roll|roll_buildtools|roll_deps|wait' - ' (%(default)s is the default)') - - args = parser.parse_args() - command = args.command - ret = self.SetUp() - if not ret and command in ('roll', 'build'): - ret = self.TriggerBuild() - if not ret and command in ('roll', 'wait'): - ret = self.WaitForBuildToFinish() - if not ret and command in ('roll', 'roll_buildtools'): - ret = self.RollBuildtools() - if not ret and command in ('roll', 'roll_deps'): - ret = self.RollDEPS() - - return ret - - def SetUp(self): - if sys.platform not in ('darwin', 'linux2'): - print('roll_gn is only tested and working on Linux and Mac for now.') - return 1 - - ret, out, _ = self.Call('git config --get remote.origin.url') - origin = out.strip() - if ret or origin != CHROMIUM_REPO: - print('Not in a Chromium repo? git config --get remote.origin.url ' - 'returned %d: %s' % (ret, origin)) - return 1 - - ret, _, _ = self.Call('git diff -q') - if ret: - print("Checkout is dirty, exiting") - return 1 - - _, out, _ = self.Call('git rev-parse --show-toplevel', cwd=os.getcwd()) - self.chromium_src_dir = out.strip() - self.buildtools_dir = os.path.join(self.chromium_src_dir, 'buildtools') - - self.new_gn_commitish, self.new_gn_version = self.GetNewVersions() - - _, out, _ = self.Call('gn --version') - self.old_gn_version = out.strip() - - _, out, _ = self.Call('git crrev-parse %s' % self.old_gn_version) - self.old_gn_commitish = out.strip() - return 0 - - def GetNewVersions(self): - _, out, _ = self.Call('git log -1 --grep Cr-Commit-Position') - commit_msg = out.splitlines() - first_line = commit_msg[0] - new_gn_commitish = first_line.split()[1] - - last_line = commit_msg[-1] - new_gn_version = re.sub('.*master@{#(\d+)}', '\\1', last_line) - - return new_gn_commitish, new_gn_version - - def TriggerBuild(self): - ret, _, _ = self.Call('git new-branch build_gn_%s' % self.new_gn_version) - if ret: - print('Failed to create a new branch for build_gn_%s' % - self.new_gn_version) - return 1 - - self.MakeDummyDepsChange() - - ret, out, err = self.Call('git commit -a -m "Build gn at %s"' % - self.new_gn_version) - if ret: - print('git commit failed: %s' % out + err) - return 1 - - print('Uploading CL to build GN at {#%s} - %s' % - (self.new_gn_version, self.new_gn_commitish)) - ret, out, err = self.Call('git cl upload -f') - if ret: - print('git-cl upload failed: %s' % out + err) - return 1 - - print('Starting try jobs') - self.Call('git-cl try -m tryserver.chromium.linux ' - '-b linux_chromium_gn_upload -r %s' % self.new_gn_commitish) - self.Call('git-cl try -m tryserver.chromium.mac ' - '-b mac_chromium_gn_upload -r %s' % self.new_gn_commitish) - self.Call('git-cl try -m tryserver.chromium.win ' - '-b win_chromium_gn_upload -r %s' % self.new_gn_commitish) - - return 0 - - def MakeDummyDepsChange(self): - with open('DEPS') as fp: - deps_content = fp.read() - new_deps = deps_content.replace("'buildtools_revision':", - "'buildtools_revision': ") - - with open('DEPS', 'w') as fp: - fp.write(new_deps) - - def WaitForBuildToFinish(self): - ret = self.CheckoutBuildBranch() - if ret: - return ret - - print('Checking build') - results = self.CheckBuild() - while (any(r.state in (PENDING, STARTED) for r in results.values())): - print() - print('Sleeping for 30 seconds') - time.sleep(30) - print('Checking build') - results = self.CheckBuild() - - ret = 0 if all(r.state == SUCCESS for r in results.values()) else 1 - if ret: - print('Build failed.') - else: - print('Builds ready.') - - # Close the build CL and move off of the build branch back to whatever - # we were on before. - self.Call('git-cl set-close') - self.MoveToLastHead() - - return ret - - def CheckoutBuildBranch(self): - ret, out, err = self.Call('git checkout build_gn_%s' % self.new_gn_version) - if ret: - print('Failed to check out build_gn_%s' % self.new_gn_version) - if out: - print(out) - if err: - print(err, file=sys.stderr) - return ret - - def CheckBuild(self): - _, out, _ = self.Call('git-cl try-results') - - builders = { - 'linux_chromium_gn_upload': 'linux64', - 'mac_chromium_gn_upload': 'mac', - 'win_chromium_gn_upload': 'win' - } - - results = {} - for platform in ('linux64', 'mac', 'win'): - results[platform] = BuildResult() - - state = PENDING - for line in out.splitlines(): - fields = line.strip().split() - if fields[0] == 'Started:': - state = STARTED - if fields[0] == 'Successes:': - state = SUCCESS - elif fields[0] == 'Total': - pass - elif fields[0] in builders: - builder = fields[0] - platform = builders[builder] - result = results[platform] - result.masterName = ('tryserver.chromium.%s' % - platform.replace('linux64', 'linux')) - result.builderName = builder - result.url = fields[1] - if result.url.startswith('id'): - result.state = PENDING - else: - result.state = state - result.buildNumber = int(result.url[result.url.rfind('/')+1:]) - - for result in results.values(): - if result.state == SUCCESS: - url = 'https://luci-milo.appspot.com/prpc/milo.BuildInfo/Get' - data = json.dumps({"buildbot": { - 'masterName': result.masterName, - 'builderName': result.builderName, - 'buildNumber': result.buildNumber, - }}) - headers = { - 'content-type': 'application/json', - 'accept': 'application/json', - } - - req = urllib2.Request(url, data, headers) - resp = urllib2.urlopen(req) - data = resp.read() - resp.close() - - # The first line of the response is garbage; skip it. - js = json.loads(data.splitlines()[1]) - - sha1_step_name = 'gn sha1' - for step in js['step']['substep']: - if step['step']['name'] == sha1_step_name: - sha1 = step['step']['text'][-1] - - result.sha1 = sha1 - - for platform, r in results.items(): - print(platform) - print(' sha1: %s' % r.sha1) - print(' state: %s' % r.state) - print(' build: %s' % r.buildNumber) - print(' url: %s' % r.url) - print() - - return results - - def RollBuildtools(self): - ret = self.CheckoutBuildBranch() - if ret: - return ret - - results = self.CheckBuild() - if (len(results) < 3 or - not all(r.state == SUCCESS for r in results.values()) or - not all(r.sha1 != '-' for r in results.values())): - print("Roll isn't done or didn't succeed, exiting:") - return 1 - - desc = self.GetBuildtoolsDesc() - - self.Call('git new-branch roll_buildtools_gn_%s' % self.new_gn_version, - cwd=self.buildtools_dir) - - for platform in results: - fname = 'gn.exe.sha1' if platform == 'win' else 'gn.sha1' - path = os.path.join(self.buildtools_dir, platform, fname) - with open(path, 'w') as fp: - fp.write('%s\n' % results[platform].sha1) - - desc_file = tempfile.NamedTemporaryFile(delete=False) - try: - desc_file.write(desc) - desc_file.close() - self.Call('git commit -a -F %s' % desc_file.name, - cwd=self.buildtools_dir) - self.Call('git-cl upload -f --send-mail', - cwd=self.buildtools_dir) - finally: - os.remove(desc_file.name) - - ret, out, err = self.Call('git cl land', cwd=self.buildtools_dir) - if ret: - print("buildtools git cl land failed: %d" % ret) - if out: - print(out) - if err: - print(err) - return ret - - # Fetch the revision we just committed so that RollDEPS will find it. - self.Call('git fetch', cwd=self.buildtools_dir) - - # Reset buildtools to the new commit so that we're not still on the - # merged branch. - self.Call('git checkout origin/master', cwd=self.buildtools_dir) - - _, out, _ = self.Call('git rev-parse origin/master', - cwd=self.buildtools_dir) - new_buildtools_commitish = out.strip() - print('Ready to roll buildtools to %s in DEPS' % new_buildtools_commitish) - - return 0 - - def RollDEPS(self): - ret, _, _ = self.Call('git new-branch roll_gn_%s' % self.new_gn_version) - if ret: - print('Failed to create a new branch for roll_gn_%s' % - self.new_gn_version) - return 1 - - _, out, _ = self.Call('git rev-parse origin/master', - cwd=self.buildtools_dir) - new_buildtools_commitish = out.strip() - - new_deps_lines = [] - old_buildtools_commitish = '' - with open(os.path.join(self.chromium_src_dir, 'DEPS')) as fp: - for l in fp.readlines(): - m = re.match(".*'buildtools_revision':.*'(.+)',", l) - if m: - old_buildtools_commitish = m.group(1) - new_deps_lines.append(" 'buildtools_revision': '%s',\n" % - new_buildtools_commitish) - else: - new_deps_lines.append(l) - - if not old_buildtools_commitish: - print('Could not update DEPS properly, exiting') - return 1 - - with open('DEPS', 'w') as fp: - fp.write(''.join(new_deps_lines)) - - desc = self.GetDEPSRollDesc(old_buildtools_commitish, - new_buildtools_commitish) - desc_file = tempfile.NamedTemporaryFile(delete=False) - try: - desc_file.write(desc) - desc_file.close() - self.Call('git commit -a -F %s' % desc_file.name) - self.Call('git-cl upload -f --send-mail --use-commit-queue') - finally: - os.remove(desc_file.name) - - # Move off of the roll branch onto whatever we were on before. - # Do not explicitly close the roll CL issue, however; the CQ - # will close it when the roll lands, assuming it does so. - self.MoveToLastHead() - - return 0 - - def MoveToLastHead(self): - # When this is called, there will be a commit + a checkout as - # the two most recent entries in the reflog, assuming nothing as - # modified the repo while this script has been running. - _, out, _ = self.Call('git reflog -2') - m = re.search('moving from ([^\s]+)', out) - last_head = m.group(1) - self.Call('git checkout %s' % last_head) - - def GetBuildtoolsDesc(self): - gn_changes = self.GetGNChanges() - return ( - 'Roll gn %s..%s (r%s:r%s)\n' - '\n' - '%s' - '\n' - 'TBR=%s\n' % ( - self.old_gn_commitish[:COMMITISH_DIGITS], - self.new_gn_commitish[:COMMITISH_DIGITS], - self.old_gn_version, - self.new_gn_version, - gn_changes, - self.reviewer, - )) - - def GetDEPSRollDesc(self, old_buildtools_commitish, new_buildtools_commitish): - gn_changes = self.GetGNChanges() - - return ( - 'Roll buildtools %s..%s\n' - '\n' - ' In order to roll GN %s..%s (r%s:r%s) and pick up\n' - ' the following changes:\n' - '\n' - '%s' - '\n' - 'TBR=%s\n' % ( - old_buildtools_commitish[:COMMITISH_DIGITS], - new_buildtools_commitish[:COMMITISH_DIGITS], - self.old_gn_commitish[:COMMITISH_DIGITS], - self.new_gn_commitish[:COMMITISH_DIGITS], - self.old_gn_version, - self.new_gn_version, - gn_changes, - self.reviewer, - )) - - def GetGNChanges(self): - _, out, _ = self.Call( - "git log --pretty=' %h %s' " + - "%s..%s tools/gn" % (self.old_gn_commitish, self.new_gn_commitish)) - return out - - def Call(self, cmd, cwd=None): - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True, - cwd=(cwd or self.chromium_src_dir)) - out, err = proc.communicate() - return proc.returncode, out or '', err or '' - - -if __name__ == '__main__': - roller = GNRoller() - sys.exit(roller.Roll()) diff --git a/chromium/tools/gn/binary_target_generator.cc b/chromium/tools/gn/binary_target_generator.cc deleted file mode 100644 index 4ff9366524c..00000000000 --- a/chromium/tools/gn/binary_target_generator.cc +++ /dev/null @@ -1,182 +0,0 @@ -// Copyright (c) 2013 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "tools/gn/binary_target_generator.h" - -#include "tools/gn/config_values_generator.h" -#include "tools/gn/deps_iterator.h" -#include "tools/gn/err.h" -#include "tools/gn/filesystem_utils.h" -#include "tools/gn/functions.h" -#include "tools/gn/scope.h" -#include "tools/gn/settings.h" -#include "tools/gn/value_extractors.h" -#include "tools/gn/variables.h" - -BinaryTargetGenerator::BinaryTargetGenerator( - Target* target, - Scope* scope, - const FunctionCallNode* function_call, - Target::OutputType type, - Err* err) - : TargetGenerator(target, scope, function_call, err), - output_type_(type) { -} - -BinaryTargetGenerator::~BinaryTargetGenerator() = default; - -void BinaryTargetGenerator::DoRun() { - target_->set_output_type(output_type_); - - if (!FillOutputName()) - return; - - if (!FillOutputPrefixOverride()) - return; - - if (!FillOutputDir()) - return; - - if (!FillOutputExtension()) - return; - - if (!FillSources()) - return; - - if (!FillPublic()) - return; - - if (!FillFriends()) - return; - - if (!FillCheckIncludes()) - return; - - if (!FillConfigs()) - return; - - if (!FillAllowCircularIncludesFrom()) - return; - - if (!FillCompleteStaticLib()) - return; - - // Config values (compiler flags, etc.) set directly on this target. - ConfigValuesGenerator gen(&target_->config_values(), scope_, - scope_->GetSourceDir(), err_); - gen.Run(); - if (err_->has_error()) - return; -} - -bool BinaryTargetGenerator::FillCompleteStaticLib() { - if (target_->output_type() == Target::STATIC_LIBRARY) { - const Value* value = scope_->GetValue(variables::kCompleteStaticLib, true); - if (!value) - return true; - if (!value->VerifyTypeIs(Value::BOOLEAN, err_)) - return false; - target_->set_complete_static_lib(value->boolean_value()); - } - return true; -} - -bool BinaryTargetGenerator::FillFriends() { - const Value* value = scope_->GetValue(variables::kFriend, true); - if (value) { - return ExtractListOfLabelPatterns(*value, scope_->GetSourceDir(), - &target_->friends(), err_); - } - return true; -} - -bool BinaryTargetGenerator::FillOutputName() { - const Value* value = scope_->GetValue(variables::kOutputName, true); - if (!value) - return true; - if (!value->VerifyTypeIs(Value::STRING, err_)) - return false; - target_->set_output_name(value->string_value()); - return true; -} - -bool BinaryTargetGenerator::FillOutputPrefixOverride() { - const Value* value = scope_->GetValue(variables::kOutputPrefixOverride, true); - if (!value) - return true; - if (!value->VerifyTypeIs(Value::BOOLEAN, err_)) - return false; - target_->set_output_prefix_override(value->boolean_value()); - return true; -} - -bool BinaryTargetGenerator::FillOutputDir() { - const Value* value = scope_->GetValue(variables::kOutputDir, true); - if (!value) - return true; - if (!value->VerifyTypeIs(Value::STRING, err_)) - return false; - - if (value->string_value().empty()) - return true; // Treat empty string as the default and do nothing. - - const BuildSettings* build_settings = scope_->settings()->build_settings(); - SourceDir dir = scope_->GetSourceDir().ResolveRelativeDir( - *value, err_, build_settings->root_path_utf8()); - if (err_->has_error()) - return false; - - if (!EnsureStringIsInOutputDir(build_settings->build_dir(), - dir.value(), value->origin(), err_)) - return false; - target_->set_output_dir(dir); - return true; -} - -bool BinaryTargetGenerator::FillOutputExtension() { - const Value* value = scope_->GetValue(variables::kOutputExtension, true); - if (!value) - return true; - if (!value->VerifyTypeIs(Value::STRING, err_)) - return false; - target_->set_output_extension(value->string_value()); - return true; -} - -bool BinaryTargetGenerator::FillAllowCircularIncludesFrom() { - const Value* value = scope_->GetValue( - variables::kAllowCircularIncludesFrom, true); - if (!value) - return true; - - UniqueVector